diff --git a/.yamato/upm-ci-renderstreaming-packages.yml b/.yamato/upm-ci-renderstreaming-packages.yml index bc8b43619..5e7581827 100644 --- a/.yamato/upm-ci-renderstreaming-packages.yml +++ b/.yamato/upm-ci-renderstreaming-packages.yml @@ -104,4 +104,4 @@ publish_{{ package.name }}: - .yamato/upm-ci-renderstreaming-packages.yml#test_{{ package.name }}_{{ platform.name }}_{{ editor.version }} {% endfor %} {% endfor %} -{% endfor %} \ No newline at end of file +{% endfor %} diff --git a/Assets/Scenes/samplescene.unity b/Assets/Scenes/samplescene.unity index 79c066b85..ff3368841 100644 --- a/Assets/Scenes/samplescene.unity +++ b/Assets/Scenes/samplescene.unity @@ -38,7 +38,7 @@ RenderSettings: m_ReflectionIntensity: 1 m_CustomReflection: {fileID: 0} m_Sun: {fileID: 2035111234} - m_IndirectSpecularColor: {r: 5.6792383, g: 8.17355, b: 13.152499, a: 1} + m_IndirectSpecularColor: {r: 6.066754, g: 8.58853, b: 13.636173, a: 1} m_UseRadianceAmbientProbe: 0 --- !u!157 &3 LightmapSettings: @@ -151,7 +151,7 @@ Transform: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 168218571} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0.019999983, y: 1.196, z: 1.8100982} + m_LocalPosition: {x: 0.019999983, y: 1.196, z: 1.8101001} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 405591765} @@ -639,7 +639,7 @@ Transform: m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 10 + m_RootOrder: 11 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &292819704 GameObject: @@ -918,7 +918,7 @@ RectTransform: - {fileID: 1079513099} - {fileID: 292819707} m_Father: {fileID: 0} - m_RootOrder: 12 + m_RootOrder: 13 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} m_AnchorMin: {x: 0, y: 0} m_AnchorMax: {x: 0, y: 0} @@ -1020,7 +1020,7 @@ Transform: m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 4 + m_RootOrder: 5 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &790650199 GameObject: @@ -1062,7 +1062,7 @@ MonoBehaviour: Version=1.0.0.0, Culture=neutral, PublicKeyToken=null m_text: A m_isRightToLeft: 0 - m_fontAsset: {fileID: 11400000, guid: 8f586378b4e144a9851e7b34d9b748ee, type: 2} + m_fontAsset: {fileID: 0} m_sharedMaterial: {fileID: 2100000, guid: e73a58f6e2794ae7b1b7e50b7fb811b0, type: 2} m_fontSharedMaterials: [] m_fontMaterial: {fileID: 0} @@ -1225,7 +1225,7 @@ Transform: m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 3 + m_RootOrder: 4 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &1020492026 GameObject: @@ -1278,7 +1278,7 @@ Transform: m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 8 + m_RootOrder: 9 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &1037846703 GameObject: @@ -1310,7 +1310,7 @@ Transform: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1037846703} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0.019999983, y: 1.196, z: -0.6870241} + m_LocalPosition: {x: 0.019999983, y: 1.196, z: -0.6870246} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 405591765} @@ -1860,7 +1860,9 @@ MonoBehaviour: username: streamingSize: {x: 1280, y: 720} interval: 5 - captureCamera: {fileID: 1299133684} + captureCameras: + - {fileID: 1299133684} + - {fileID: 1742567395} arrayButtonClickEvent: - elementId: 1 click: @@ -1925,7 +1927,7 @@ Transform: m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 9 + m_RootOrder: 10 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1001 &1275443619 PrefabInstance: @@ -2170,7 +2172,7 @@ GameObject: - component: {fileID: 1299133682} - component: {fileID: 1299133681} m_Layer: 0 - m_Name: Render Streaming Camera + m_Name: Render Streaming Camera 1 m_TagString: MainCamera m_Icon: {fileID: 0} m_NavMeshLayer: 0 @@ -2409,7 +2411,7 @@ Transform: - {fileID: 405591765} - {fileID: 1547029146} m_Father: {fileID: 0} - m_RootOrder: 6 + m_RootOrder: 7 m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} --- !u!1 &1547029145 GameObject: @@ -2690,8 +2692,227 @@ Transform: m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 11 + m_RootOrder: 12 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &1742567391 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1742567396} + - component: {fileID: 1742567395} + - component: {fileID: 1742567394} + - component: {fileID: 1742567393} + - component: {fileID: 1742567392} + m_Layer: 0 + m_Name: Render Streaming Camera 2 + m_TagString: MainCamera + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!114 &1742567392 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1742567391} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 23c1ce4fb46143f46bc5cb5224c934f6, type: 3} + m_Name: + m_EditorClassIdentifier: + clearColorMode: 0 + backgroundColorHDR: {r: 0.025, g: 0.07, b: 0.19, a: 0} + clearDepth: 1 + volumeLayerMask: + serializedVersion: 2 + m_Bits: 4294967295 + volumeAnchorOverride: {fileID: 0} + antialiasing: 2 + dithering: 1 + stopNaNs: 0 + physicalParameters: + m_Iso: 200 + m_ShutterSpeed: 0.005 + m_Aperture: 16 + m_BladeCount: 5 + m_Curvature: {x: 2, y: 11} + m_BarrelClipping: 0.25 + m_Anamorphism: 0 + flipYMode: 0 + fullscreenPassthrough: 0 + customRenderingSettings: 0 + invertFaceCulling: 0 + probeLayerMask: + serializedVersion: 2 + m_Bits: 4294967295 + m_RenderingPathCustomFrameSettings: + bitDatas: + data1: 68184748261149 + data2: 4539628424389459968 + renderingPathCustomFrameSettingsOverrideMask: + mask: + data1: 0 + data2: 0 + defaultFrameSettings: 0 + m_Version: 5 + m_ObsoleteRenderingPath: 0 + m_ObsoleteFrameSettings: + overrides: 0 + enableShadow: 0 + enableContactShadows: 0 + enableShadowMask: 0 + enableSSR: 0 + enableSSAO: 0 + enableSubsurfaceScattering: 0 + enableTransmission: 0 + enableAtmosphericScattering: 0 + enableVolumetrics: 0 + enableReprojectionForVolumetrics: 0 + enableLightLayers: 0 + enableExposureControl: 1 + diffuseGlobalDimmer: 0 + specularGlobalDimmer: 0 + shaderLitMode: 0 + enableDepthPrepassWithDeferredRendering: 0 + enableTransparentPrepass: 0 + enableMotionVectors: 0 + enableObjectMotionVectors: 0 + enableDecals: 0 + enableRoughRefraction: 0 + enableTransparentPostpass: 0 + enableDistortion: 0 + enablePostprocess: 0 + enableOpaqueObjects: 0 + enableTransparentObjects: 0 + enableRealtimePlanarReflection: 0 + enableMSAA: 0 + enableAsyncCompute: 0 + runLightListAsync: 0 + runSSRAsync: 0 + runSSAOAsync: 0 + runContactShadowsAsync: 0 + runVolumeVoxelizationAsync: 0 + lightLoopSettings: + overrides: 0 + enableDeferredTileAndCluster: 0 + enableComputeLightEvaluation: 0 + enableComputeLightVariants: 0 + enableComputeMaterialVariants: 0 + enableFptlForForwardOpaque: 0 + enableBigTilePrepass: 0 + isFptlEnabled: 0 +--- !u!114 &1742567393 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1742567391} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: e8a636f62116c0a40bbfefdf876d4608, type: 3} + m_Name: + m_EditorClassIdentifier: + m_movementSensitivityFactor: 0.1 + boost: 3.5 + positionLerpTime: 0.2 + mouseSensitivityCurve: + serializedVersion: 2 + m_Curve: + - serializedVersion: 3 + time: 0 + value: 0.5 + inSlope: 4.6264863 + outSlope: 4.6264863 + tangentMode: 0 + weightedMode: 0 + inWeight: 0 + outWeight: 0.11557238 + - serializedVersion: 3 + time: 1 + value: 2.5 + inSlope: 0 + outSlope: 0 + tangentMode: 0 + weightedMode: 0 + inWeight: 0 + outWeight: 0 + m_PreInfinity: 2 + m_PostInfinity: 2 + m_RotationOrder: 4 + rotationLerpTime: 0.01 + invertY: 0 +--- !u!81 &1742567394 +AudioListener: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1742567391} + m_Enabled: 1 +--- !u!20 &1742567395 +Camera: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1742567391} + m_Enabled: 1 + serializedVersion: 2 + m_ClearFlags: 1 + m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} + m_projectionMatrixMode: 2 + m_GateFitMode: 2 + m_FOVAxisMode: 0 + m_SensorSize: {x: 36, y: 24} + m_LensShift: {x: 0, y: 0} + m_FocalLength: 18 + m_NormalizedViewPortRect: + serializedVersion: 2 + x: 0 + y: 0 + width: 1 + height: 1 + near clip plane: 0.3 + far clip plane: 1000 + field of view: 67.380135 + orthographic: 0 + orthographic size: 5 + m_Depth: -1 + m_CullingMask: + serializedVersion: 2 + m_Bits: 4294967295 + m_RenderingPath: -1 + m_TargetTexture: {fileID: 0} + m_TargetDisplay: 0 + m_TargetEye: 3 + m_HDR: 0 + m_AllowMSAA: 0 + m_AllowDynamicResolution: 0 + m_ForceIntoRT: 1 + m_OcclusionCulling: 1 + m_StereoConvergence: 10 + m_StereoSeparation: 0.022 +--- !u!4 &1742567396 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1742567391} + m_LocalRotation: {x: -0.07861943, y: 0.8794968, z: -0.15624021, w: -0.44259855} + m_LocalPosition: {x: 5.122, y: 2.016, z: 1.538} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 0} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 20.146002, y: 233.427, z: 0.001} --- !u!1 &1847340766 GameObject: m_ObjectHideFlags: 19 @@ -2743,7 +2964,7 @@ Transform: m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 7 + m_RootOrder: 8 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &2011204166 GameObject: @@ -2806,7 +3027,7 @@ Transform: m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 5 + m_RootOrder: 6 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &2035111233 GameObject: @@ -2899,7 +3120,7 @@ Transform: m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 2 + m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 45, y: 130, z: 90} --- !u!114 &2035111236 MonoBehaviour: @@ -3038,7 +3259,7 @@ Transform: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 2059789766} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 2.510098, y: 1.196, z: 1.1760491} + m_LocalPosition: {x: 2.5101, y: 1.196, z: 1.1760501} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 405591765} diff --git a/Assets/Scripts/Editor/RenderStreamingEditor.cs b/Assets/Scripts/Editor/RenderStreamingEditor.cs index 47c861469..6abf409e4 100644 --- a/Assets/Scripts/Editor/RenderStreamingEditor.cs +++ b/Assets/Scripts/Editor/RenderStreamingEditor.cs @@ -13,7 +13,7 @@ public override void OnInspectorGUI() ShowIceServerList(serializedObject.FindProperty("iceServers")); EditorGUILayout.PropertyField(serializedObject.FindProperty("interval")); EditorGUILayout.PropertyField(serializedObject.FindProperty("streamingSize")); - EditorGUILayout.PropertyField(serializedObject.FindProperty("captureCamera")); + EditorGUILayout.PropertyField(serializedObject.FindProperty("captureCameras"), true); EditorGUILayout.PropertyField(serializedObject.FindProperty("arrayButtonClickEvent"), true); serializedObject.ApplyModifiedProperties(); } diff --git a/Assets/Scripts/RenderStreaming.cs b/Assets/Scripts/RenderStreaming.cs index 2d8878507..588efa4cb 100644 --- a/Assets/Scripts/RenderStreaming.cs +++ b/Assets/Scripts/RenderStreaming.cs @@ -18,6 +18,12 @@ public class ButtonClickElement public ButtonClickEvent click; } + class CameraMediaStream + { + public Camera camera; + public MediaStream[] mediaStreams = new MediaStream[2]; + } + public class RenderStreaming : MonoBehaviour { #pragma warning disable 0649 @@ -40,10 +46,11 @@ public class RenderStreaming : MonoBehaviour private float interval = 5.0f; [SerializeField, Tooltip("Camera to capture video stream")] - private Camera captureCamera; + private Camera[] captureCameras; [SerializeField, Tooltip("Array to set your own click event")] private ButtonClickElement[] arrayButtonClickEvent; + #pragma warning restore 0649 private Signaling signaling; @@ -51,8 +58,7 @@ public class RenderStreaming : MonoBehaviour private Dictionary> mapChannels = new Dictionary>(); private RTCConfiguration conf; private string sessionId; - private MediaStream videoStream; - private MediaStream audioStream; + private Dictionary cameraMediaStreamDict = new Dictionary(); public void Awake() { @@ -63,6 +69,7 @@ public void Awake() public void OnDestroy() { + Unity.WebRTC.Audio.Stop(); WebRTC.WebRTC.Finalize(); RemoteInput.Destroy(); } @@ -72,8 +79,24 @@ public IEnumerator Start() { yield break; } - videoStream = captureCamera.CaptureStream(streamingSize.x, streamingSize.y); - audioStream = Unity.WebRTC.Audio.CaptureStream(); + foreach (var camera in captureCameras) + { + var cameraMediaStream = new CameraMediaStream(); + cameraMediaStreamDict.Add(camera, cameraMediaStream); + camera.CreateRenderStreamTexture(1280, 720); + int mediaCount = cameraMediaStream.mediaStreams.Length; + for (int i = 0; i < mediaCount; ++i) + { + cameraMediaStream.mediaStreams[i] = new MediaStream(); + var rt = camera.GetStreamTexture(0); + int temp = i==0 ? 1 : (int)Mathf.Pow(i + 1, 10); + var videoTrack = new VideoStreamTrack("videoTrack" + i, rt, 1000000/temp); + cameraMediaStream.mediaStreams[i].AddTrack(videoTrack); + cameraMediaStream.mediaStreams[i].AddTrack(new AudioStreamTrack("audioTrack")); + } + } + Unity.WebRTC.Audio.Start(); + signaling = new Signaling(urlSignaling); var opCreate = signaling.Create(); yield return opCreate; @@ -87,6 +110,7 @@ public IEnumerator Start() conf = default; conf.iceServers = iceServers; + conf.bundle_policy = RTCBundlePolicy.kBundlePolicyMaxBundle; StartCoroutine(WebRTC.WebRTC.Update()); StartCoroutine(LoopPolling()); } @@ -135,30 +159,33 @@ IEnumerator GetOffer() { continue; } - var pc = new RTCPeerConnection(); - pcs.Add(offer.connectionId, pc); + var pc = new RTCPeerConnection(ref conf); + pcs.Add(offer.connectionId, pc); pc.OnDataChannel = new DelegateOnDataChannel(channel => { OnDataChannel(pc, channel); }); - pc.SetConfiguration(ref conf); pc.OnIceCandidate = new DelegateOnIceCandidate(candidate => { StartCoroutine(OnIceCandidate(offer.connectionId, candidate)); }); pc.OnIceConnectionChange = new DelegateOnIceConnectionChange(state => { if(state == RTCIceConnectionState.Disconnected) { - pc.Close(); + pc.Close(); + pcs.Remove(offer.connectionId); } }); + //make video bit rate starts at 16000kbits, and 160000kbits at max. string pattern = @"(a=fmtp:\d+ .*level-asymmetry-allowed=.*)\r\n"; _desc.sdp = Regex.Replace(_desc.sdp, pattern, "$1;x-google-start-bitrate=16000;x-google-max-bitrate=160000\r\n"); pc.SetRemoteDescription(ref _desc); - foreach (var track in videoStream.GetTracks()) - { - pc.AddTrack(track); - } - foreach(var track in audioStream.GetTracks()) + foreach (var k in cameraMediaStreamDict.Keys) { - pc.AddTrack(track); + foreach (var mediaStream in cameraMediaStreamDict[k].mediaStreams) + { + foreach (var track in mediaStream.GetTracks()) + { + pc.AddTrack(track, mediaStream.Id); + } + } } StartCoroutine(Answer(connectionId)); } @@ -182,7 +209,7 @@ IEnumerator Answer(string connectionId) Debug.LogError($"Network Error: {opLocalDesc.error}"); yield break; } - var op3 = signaling.PostAnswer(this.sessionId, connectionId, op.desc.sdp); + var op3 = signaling.PostAnswer(this.sessionId, connectionId, op.desc.sdp); yield return op3; if (op3.webRequest.isNetworkError) { @@ -216,6 +243,7 @@ IEnumerator GetCandidate() { continue; } + foreach (var candidate in candidateContainer.candidates) { RTCIceCandidate _candidate = default; diff --git a/Packages/com.unity.renderstreaming/Samples~/HDRP/Editor/RenderStreamingHDRPSimple.unitypackage b/Packages/com.unity.renderstreaming/Samples~/HDRP/Editor/RenderStreamingHDRPSimple.unitypackage index e1b316f45..931488981 100644 Binary files a/Packages/com.unity.renderstreaming/Samples~/HDRP/Editor/RenderStreamingHDRPSimple.unitypackage and b/Packages/com.unity.renderstreaming/Samples~/HDRP/Editor/RenderStreamingHDRPSimple.unitypackage differ diff --git a/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStream.cs b/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStream.cs index 51967429c..e9fa3bf44 100644 --- a/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStream.cs +++ b/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStream.cs @@ -6,122 +6,46 @@ namespace Unity.WebRTC { - public class MediaStream + public class MediaStream { - private IntPtr self; - private string id; + internal IntPtr nativePtr; + internal string id; + protected List mediaStreamTrackList = new List(); + private static int sMediaStreamCount = 0; + public string Id { get => id; private set { } } - private Dictionary VideoTrackToRts; - private List AudioTracks; + public MediaStream() : base() + { + sMediaStreamCount++; + id = "MediaStream" + sMediaStreamCount; + nativePtr = WebRTC.Context.CreateMediaStream(id); + } - private void StopTrack(MediaStreamTrack track) + public MediaStream(MediaStreamTrack[] tracks) : base() { + sMediaStreamCount++; + id = "MediaStream" + sMediaStreamCount; + nativePtr = WebRTC.Context.CreateMediaStream(id); - if (track.Kind == TrackKind.Video) - { - WebRTC.Context.StopMediaStreamTrack(track.self); - RenderTexture[] rts = VideoTrackToRts[track]; - if (rts != null) - { - CameraExtension.RemoveRt(rts); - rts[0].Release(); - rts[1].Release(); - UnityEngine.Object.Destroy(rts[0]); - UnityEngine.Object.Destroy(rts[1]); - } - } - else + foreach (var t in tracks) { - Audio.Stop(); + AddTrack(t); } - - } - private RenderTexture[] GetRts(MediaStreamTrack track) - { - return VideoTrackToRts[track]; - } - public MediaStreamTrack[] GetTracks() - { - MediaStreamTrack[] tracks = new MediaStreamTrack[VideoTrackToRts.Keys.Count + AudioTracks.Count]; - AudioTracks.CopyTo(tracks, 0); - VideoTrackToRts.Keys.CopyTo(tracks, AudioTracks.Count); - return tracks; - } - public MediaStreamTrack[] GetAudioTracks() - { - return AudioTracks.ToArray(); - } - public MediaStreamTrack[] GetVideoTracks() - { - MediaStreamTrack[] tracks = new MediaStreamTrack[VideoTrackToRts.Keys.Count]; - VideoTrackToRts.Keys.CopyTo(tracks, 0); - return tracks; } public void AddTrack(MediaStreamTrack track) { - if(track.Kind == TrackKind.Video) - { - VideoTrackToRts[track] = track.getRts(track); - } - else - { - AudioTracks.Add(track); - } - NativeMethods.MediaStreamAddTrack(self, track.self); - } - public void RemoveTrack(MediaStreamTrack track) - { - NativeMethods.MediaStreamRemoveTrack(self, track.self); - } - //for camera CaptureStream - internal MediaStream(RenderTexture[] rts, IntPtr ptr) - { - self = ptr; - id = Marshal.PtrToStringAnsi(NativeMethods.MediaStreamGetID(self)); - VideoTrackToRts = new Dictionary(); - AudioTracks = new List(); - //get initial tracks - int trackSize = 0; - IntPtr tracksNativePtr = NativeMethods.MediaStreamGetVideoTracks(self, ref trackSize); - IntPtr[] tracksPtr = new IntPtr[trackSize]; - Marshal.Copy(tracksNativePtr, tracksPtr, 0, trackSize); - //TODO: Linux compatibility - Marshal.FreeCoTaskMem(tracksNativePtr); - for (int i = 0; i < trackSize; i++) - { - MediaStreamTrack track = new MediaStreamTrack(tracksPtr[i]); - track.stopTrack += StopTrack; - track.getRts += GetRts; - VideoTrackToRts[track] = rts; - } + NativeMethods.MediaStreamAddTrack(nativePtr, track.nativePtr); + mediaStreamTrackList.Add(track); } - //for audio CaptureStream - internal MediaStream(IntPtr ptr) - { - self = ptr; - id = Marshal.PtrToStringAnsi(NativeMethods.MediaStreamGetID(self)); - VideoTrackToRts = new Dictionary(); - AudioTracks = new List(); - //get initial tracks - int trackSize = 0; - IntPtr trackNativePtr = NativeMethods.MediaStreamGetAudioTracks(self, ref trackSize); - IntPtr[] tracksPtr = new IntPtr[trackSize]; - Marshal.Copy(trackNativePtr, tracksPtr, 0, trackSize); - //TODO: Linux compatibility - Marshal.FreeCoTaskMem(trackNativePtr); - for (int i = 0; i < trackSize; i++) - { - MediaStreamTrack track = new MediaStreamTrack(tracksPtr[i]); - track.stopTrack += StopTrack; - track.getRts += GetRts; - AudioTracks.Add(track); - } + public MediaStreamTrack[] GetTracks() + { + return mediaStreamTrackList.ToArray(); } - } + internal class Cleaner : MonoBehaviour { private Action onDestroy; @@ -150,46 +74,68 @@ public static void AddCleanerCallback(this GameObject obj, Action callback) Cleaner.AddCleanerCallback(obj, callback); } } + + internal class CameraCapturerTextures + { + internal RenderTexture camRenderTexture; + internal List webRTCTextures = new List(); + } + public static class CameraExtension { - internal static List camCopyRts = new List(); - internal static bool started = false; - public static MediaStream CaptureStream(this Camera cam, int width, int height) + internal static Dictionary camCapturerTexturesDict = new Dictionary(); + + public static int GetStreamTextureCount(this Camera cam) { - if (camCopyRts.Count > 0) + CameraCapturerTextures textures; + if (camCapturerTexturesDict.TryGetValue(cam, out textures)) { - throw new NotImplementedException("Currently not allowed multiple MediaStream"); + return textures.webRTCTextures.Count; } + return 0; + } - RenderTexture[] rts = new RenderTexture[2]; - //rts[0] for render target, rts[1] for flip and WebRTC source - rts[0] = new RenderTexture(width, height, 0, RenderTextureFormat.BGRA32); - rts[1] = new RenderTexture(width, height, 0, RenderTextureFormat.BGRA32); - rts[0].Create(); - rts[1].Create(); - camCopyRts.Add(rts); - cam.targetTexture = rts[0]; - cam.gameObject.AddCleanerCallback(() => + public static RenderTexture GetStreamTexture(this Camera cam, int index) { + CameraCapturerTextures textures; + if (camCapturerTexturesDict.TryGetValue(cam, out textures)) { - if (rts != null) + if (index >= 0 && index < textures.webRTCTextures.Count) { - CameraExtension.RemoveRt(rts); - rts[0].Release(); - rts[1].Release(); - UnityEngine.Object.Destroy(rts[0]); - UnityEngine.Object.Destroy(rts[1]); + return textures.webRTCTextures[index]; } - }); - started = true; - return new MediaStream(rts, WebRTC.Context.CaptureVideoStream(rts[1].GetNativeTexturePtr(), width, height)); + } + return null; } - public static void RemoveRt(RenderTexture[] rts) + + public static void CreateRenderStreamTexture(this Camera cam, int width, int height, int count = 1) { - camCopyRts.Remove(rts); - if (camCopyRts.Count == 0) + CameraCapturerTextures cameraCapturerTextures = new CameraCapturerTextures(); + camCapturerTexturesDict.Add(cam, cameraCapturerTextures); + + cameraCapturerTextures.camRenderTexture = new RenderTexture(width, height, 0, RenderTextureFormat.BGRA32); + cameraCapturerTextures.camRenderTexture.Create(); + + int mipCount = count; + for (int i = 1, mipLevel = 1; i <= mipCount; ++i, mipLevel *= 4) { - started = false; + RenderTexture webRtcTex = new RenderTexture(width / mipLevel, height / mipLevel, 0, RenderTextureFormat.BGRA32); + webRtcTex.Create(); + cameraCapturerTextures.webRTCTextures.Add(webRtcTex); } + + cam.targetTexture = cameraCapturerTextures.camRenderTexture; + cam.gameObject.AddCleanerCallback(() => + { + cameraCapturerTextures.camRenderTexture.Release(); + UnityEngine.Object.Destroy(cameraCapturerTextures.camRenderTexture); + + foreach (var v in cameraCapturerTextures.webRTCTextures) + { + v.Release(); + UnityEngine.Object.Destroy(v); + } + cameraCapturerTextures.webRTCTextures.Clear(); + }); } } @@ -198,12 +144,7 @@ public static class Audio { private static bool started = false; private static AudioInput audioInput = new AudioInput(); - public static MediaStream CaptureStream() - { - audioInput.BeginRecording(); - started = true; - return new MediaStream(WebRTC.Context.CaptureAudioStream()); - } + public static void Update() { if (started) @@ -211,6 +152,13 @@ public static void Update() audioInput.UpdateAudio(); } } + + public static void Start() + { + audioInput.BeginRecording(); + started = true; + } + public static void Stop() { if (started) diff --git a/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStreamTrack.cs b/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStreamTrack.cs index 30df48c7d..5d30853d7 100644 --- a/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStreamTrack.cs +++ b/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStreamTrack.cs @@ -6,50 +6,49 @@ namespace Unity.WebRTC { public class MediaStreamTrack { - internal IntPtr self; - private TrackKind kind; - private string id; - private bool enabled; - private TrackState readyState; - internal Action stopTrack; - internal Func getRts; + internal IntPtr nativePtr; + protected string id; + protected TrackKind kind; + + internal MediaStreamTrack(IntPtr ptr) + { + nativePtr = ptr; + kind = NativeMethods.MediaStreamTrackGetKind(nativePtr); + id = Marshal.PtrToStringAnsi(NativeMethods.MediaStreamTrackGetID(nativePtr)); + } public bool Enabled { - get - { - return NativeMethods.MediaStreamTrackGetEnabled(self); - } - set - { - NativeMethods.MediaStreamTrackSetEnabled(self, value); - } + get { return NativeMethods.MediaStreamTrackGetEnabled(nativePtr); } + set { NativeMethods.MediaStreamTrackSetEnabled(nativePtr, value); } } + public TrackState ReadyState { get - { - return NativeMethods.MediaStreamTrackGetReadyState(self); - } + { return NativeMethods.MediaStreamTrackGetReadyState(nativePtr); } private set { } } public TrackKind Kind { get => kind; private set { } } public string Id { get => id; private set { } } + } - internal MediaStreamTrack(IntPtr ptr) + public class VideoStreamTrack : MediaStreamTrack + { + public VideoStreamTrack(string label, RenderTexture rt, int bitRateMbps=10000000) : base(WebRTC.Context.CreateVideoTrack(label, rt.GetNativeTexturePtr(), rt.width, rt.height, bitRateMbps)) { - self = ptr; - kind = NativeMethods.MediaStreamTrackGetKind(self); - id = Marshal.PtrToStringAnsi(NativeMethods.MediaStreamTrackGetID(self)); } - //Disassociate track from its source(video or audio), not for destroying the track - public void Stop() + } + + public class AudioStreamTrack : MediaStreamTrack + { + public AudioStreamTrack(string label) : base(WebRTC.Context.CreateAudioTrack(label)) { - stopTrack(this); } } + public enum TrackKind { Audio, diff --git a/Packages/com.unity.webrtc/Runtime/Srcipts/RTCPeerConnection.cs b/Packages/com.unity.webrtc/Runtime/Srcipts/RTCPeerConnection.cs index 518bb07ec..2a97317ba 100644 --- a/Packages/com.unity.webrtc/Runtime/Srcipts/RTCPeerConnection.cs +++ b/Packages/com.unity.webrtc/Runtime/Srcipts/RTCPeerConnection.cs @@ -212,10 +212,11 @@ public void Close() } } - public RTCRtpSender AddTrack(MediaStreamTrack track) + public RTCRtpSender AddTrack(MediaStreamTrack track, string mediaStreamId="unity") { - return new RTCRtpSender(NativeMethods.PeerConnectionAddTrack(self, track.self)); + return new RTCRtpSender(NativeMethods.PeerConnectionAddTrack(self, track.nativePtr, mediaStreamId)); } + public void RemoveTrack(RTCRtpSender sender) { NativeMethods.PeerConnectionRemoveTrack(self, sender.self); diff --git a/Packages/com.unity.webrtc/Runtime/Srcipts/WebRTC.cs b/Packages/com.unity.webrtc/Runtime/Srcipts/WebRTC.cs index 58a30059e..986da31bd 100644 --- a/Packages/com.unity.webrtc/Runtime/Srcipts/WebRTC.cs +++ b/Packages/com.unity.webrtc/Runtime/Srcipts/WebRTC.cs @@ -176,11 +176,19 @@ public enum RTCIceTransportPolicy All } + public enum RTCBundlePolicy + { + kBundlePolicyBalanced, + kBundlePolicyMaxBundle, + kBundlePolicyMaxCompat + }; + [Serializable] public struct RTCConfiguration { public RTCIceServer[] iceServers; public RTCIceTransportPolicy iceTransportPolicy; + public RTCBundlePolicy bundle_policy; } public enum CodecInitializationResult @@ -259,15 +267,16 @@ public static IEnumerator Update() { // Wait until all frame rendering is done yield return new WaitForEndOfFrame(); - if (CameraExtension.started) + //Blit is for DirectX Rendering API Only + + foreach (var k in CameraExtension.camCapturerTexturesDict.Keys) { - //Blit is for DirectX Rendering API Only - foreach(var rts in CameraExtension.camCopyRts) + foreach (var rt in CameraExtension.camCapturerTexturesDict[k].webRTCTextures) { - Graphics.Blit(rts[0], rts[1], flipMat); - } - GL.IssuePluginEvent(s_renderCallback, 0); + Graphics.Blit(CameraExtension.camCapturerTexturesDict[k].camRenderTexture, rt, flipMat); + } } + GL.IssuePluginEvent(s_renderCallback, 0); Audio.Update(); } } @@ -379,7 +388,7 @@ internal static class NativeMethods [DllImport(WebRTC.Lib)] public static extern void PeerConnectionSetRemoteDescription(IntPtr ptr, ref RTCSessionDescription desc); [DllImport(WebRTC.Lib)] - public static extern IntPtr PeerConnectionAddTrack(IntPtr pc, IntPtr track); + public static extern IntPtr PeerConnectionAddTrack(IntPtr pc, IntPtr track, [MarshalAs(UnmanagedType.LPStr, SizeConst = 256)] string mediaStreamId); [DllImport(WebRTC.Lib)] public static extern void PeerConnectionRemoveTrack(IntPtr pc, IntPtr sender); [DllImport(WebRTC.Lib)] @@ -413,9 +422,11 @@ internal static class NativeMethods [DllImport(WebRTC.Lib)] public static extern void DataChannelRegisterOnClose(IntPtr ptr, DelegateOnClose callback); [DllImport(WebRTC.Lib)] - public static extern IntPtr CaptureVideoStream(IntPtr context, IntPtr rt, int width, int height); + public static extern IntPtr CreateMediaStream(IntPtr context, [MarshalAs(UnmanagedType.LPStr, SizeConst = 256)] string label); + [DllImport(WebRTC.Lib)] + public static extern IntPtr CreateVideoTrack(IntPtr context, [MarshalAs(UnmanagedType.LPStr, SizeConst = 256)] string label, IntPtr rt, int width, int height, int bitRate); [DllImport(WebRTC.Lib)] - public static extern IntPtr CaptureAudioStream(IntPtr context); + public static extern IntPtr CreateAudioTrack(IntPtr context, [MarshalAs(UnmanagedType.LPStr, SizeConst = 256)] string label); [DllImport(WebRTC.Lib)] public static extern void MediaStreamAddTrack(IntPtr stream, IntPtr track); [DllImport(WebRTC.Lib)] @@ -453,8 +464,9 @@ internal struct Context public static Context Create(int uid = 0) { return NativeMethods.ContextCreate(uid); } public static CodecInitializationResult GetCodecInitializationResult() { return NativeMethods.GetCodecInitializationResult(); } public void Destroy(int uid = 0) { NativeMethods.ContextDestroy(uid); self = IntPtr.Zero; } - public IntPtr CaptureVideoStream(IntPtr rt, int width, int height) { return NativeMethods.CaptureVideoStream(self, rt, width, height); } - public IntPtr CaptureAudioStream() { return NativeMethods.CaptureAudioStream(self); } + public IntPtr CreateMediaStream(string label) { return NativeMethods.CreateMediaStream(self, label); } + public IntPtr CreateVideoTrack(string label, IntPtr rt, int width, int height, int bitRate) { return NativeMethods.CreateVideoTrack(self, label, rt, width, height, bitRate); } + public IntPtr CreateAudioTrack(string label) {return NativeMethods.CreateAudioTrack(self, label);} public IntPtr GetRenderEventFunc() { return NativeMethods.GetRenderEventFunc(self); } public void StopMediaStreamTrack(IntPtr track) { NativeMethods.StopMediaStreamTrack(self, track); } } diff --git a/Packages/com.unity.webrtc/Samples~/MediaStreamSample.cs b/Packages/com.unity.webrtc/Samples~/MediaStreamSample.cs index cb39b04bb..51b296ea0 100644 --- a/Packages/com.unity.webrtc/Samples~/MediaStreamSample.cs +++ b/Packages/com.unity.webrtc/Samples~/MediaStreamSample.cs @@ -19,7 +19,7 @@ public class MediaStreamSample : MonoBehaviour private RTCPeerConnection pc1, pc2; private List pc1Senders, pc2Senders; - private Unity.WebRTC.MediaStream audioStream, videoStream; + private Unity.WebRTC.MediaStream mediaStream; private RTCDataChannel dataChannel, remoteDataChannel; private Coroutine sdpCheck; private string msg; @@ -159,14 +159,11 @@ void Pc2OnIceCandidate(RTCIceCandidate candidate) } public void AddTracks() { - foreach (var track in audioStream.GetTracks()) + foreach (var track in mediaStream.GetTracks()) { pc1Senders.Add (pc1.AddTrack(track)); } - foreach(var track in videoStream.GetTracks()) - { - pc1Senders.Add(pc1.AddTrack(track)); - } + if(!videoUpdateStarted) { StartCoroutine(WebRTC.Update()); @@ -212,8 +209,19 @@ void Call() RTCDataChannelInit conf = new RTCDataChannelInit(true); dataChannel = pc1.CreateDataChannel("data", ref conf); - audioStream = Audio.CaptureStream(); - videoStream = cam.CaptureStream(1280, 720); + + cam.CreateRenderStreamTexture(1280, 720); + mediaStream = new MediaStream(); + int texCount = cam.GetStreamTextureCount(); + for (int i = 0; i < texCount; ++i) + { + RenderTexture rt = cam.GetStreamTexture(i); + mediaStream.AddTrack(new VideoStreamTrack("videoTrack"+1, rt)); + } + + mediaStream.AddTrack(new AudioStreamTrack("audioTrack")); + Audio.Start(); + RtImage.texture = cam.targetTexture; } diff --git a/Packages/com.unity.webrtc/Tests/Runtime/MediaStreamTest.cs b/Packages/com.unity.webrtc/Tests/Runtime/MediaStreamTest.cs index 1260d58bb..097461467 100644 --- a/Packages/com.unity.webrtc/Tests/Runtime/MediaStreamTest.cs +++ b/Packages/com.unity.webrtc/Tests/Runtime/MediaStreamTest.cs @@ -49,9 +49,16 @@ public IEnumerator MediaStreamTest_AddAndRemoveMediaStream() { pc2Senders.Add(peer2.AddTrack(e.Track)); }); - foreach (var track in cam.CaptureStream(1280, 720).GetTracks()) + + cam.CreateRenderStreamTexture(1280, 720, 2); + var mediaStream = new MediaStream(); + int texCount = cam.GetStreamTextureCount(); + for (int i = 0; i < texCount; ++i) { - pc1Senders.Add(peer1.AddTrack(track)); + var rt = cam.GetStreamTexture(i); + var videoStreamTrack = new VideoStreamTrack("videoTrack"+i, rt); + mediaStream.AddTrack(videoStreamTrack); + pc1Senders.Add(peer1.AddTrack(videoStreamTrack)); } var conf = new RTCDataChannelInit(true); diff --git a/Plugin/WebRTCPlugin/Callback.cpp b/Plugin/WebRTCPlugin/Callback.cpp index 1b1dad402..8c9750b3f 100644 --- a/Plugin/WebRTCPlugin/Callback.cpp +++ b/Plugin/WebRTCPlugin/Callback.cpp @@ -14,9 +14,9 @@ namespace WebRTC ID3D11Device* g_D3D11Device = nullptr; //natively created ID3D11Texture2D ptrs UnityFrameBuffer* renderTextures[bufferedFrameNum]; - Context* s_context; } + using namespace WebRTC; //get d3d11 device static void UNITY_INTERFACE_API OnGraphicsDeviceEvent(UnityGfxDeviceEventType eventType) @@ -35,14 +35,6 @@ static void UNITY_INTERFACE_API OnGraphicsDeviceEvent(UnityGfxDeviceEventType ev } case kUnityGfxDeviceEventShutdown: { - for (auto rt : renderTextures) - { - if (rt) - { - rt->Release(); - rt = nullptr; - } - } //UnityPluginUnload not called normally s_Graphics->UnregisterDeviceEventCallback(OnGraphicsDeviceEvent); break; diff --git a/Plugin/WebRTCPlugin/Context.cpp b/Plugin/WebRTCPlugin/Context.cpp index 74e9d3be0..b172fc0b4 100644 --- a/Plugin/WebRTCPlugin/Context.cpp +++ b/Plugin/WebRTCPlugin/Context.cpp @@ -1,5 +1,6 @@ #include "pch.h" #include "WebRTCPlugin.h" +#include "UnityEncoder.h" #include "Context.h" namespace WebRTC @@ -242,6 +243,9 @@ namespace WebRTC config.servers.push_back(iceServer); } config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; + + Json::Value bundle_policy = configJson["bundle_policy"]; + config.bundle_policy = (webrtc::PeerConnectionInterface::BundlePolicy)bundle_policy.asInt(); } #pragma warning(push) #pragma warning(disable: 4715) @@ -283,9 +287,9 @@ namespace WebRTC rtc::InitializeSSL(); audioDevice = new rtc::RefCountedObject(); - nvVideoCapturerUnique = std::make_unique(); - nvVideoCapturer = nvVideoCapturerUnique.get(); - auto dummyVideoEncoderFactory = std::make_unique(nvVideoCapturer); + + auto dummyVideoEncoderFactory = std::make_unique(); + pDummyVideoEncoderFactory = dummyVideoEncoderFactory.get(); peerConnectionFactory = webrtc::CreatePeerConnectionFactory( workerThread.get(), @@ -298,16 +302,18 @@ namespace WebRTC webrtc::CreateBuiltinVideoDecoderFactory(), nullptr, nullptr); + } Context::~Context() { + pDummyVideoEncoderFactory->Destroy(); clients.clear(); peerConnectionFactory = nullptr; - audioTrack = nullptr; - videoTracks.clear(); - audioStream = nullptr; - videoStreams.clear(); + + mediaSteamTrackList.clear(); + mediaStreamMap.clear(); + nvVideoCapturerList.clear(); workerThread->Quit(); workerThread.reset(); @@ -315,24 +321,49 @@ namespace WebRTC signalingThread.reset(); } - webrtc::MediaStreamInterface* Context::CreateVideoStream(UnityFrameBuffer* frameBuffer) + void Context::EncodeFrame() { - //TODO: label and stream id should be maintained in some way for multi-stream - auto videoTrack = peerConnectionFactory->CreateVideoTrack( - "video", peerConnectionFactory->CreateVideoSource(std::move(nvVideoCapturerUnique))); - if (!videoTracks.count(frameBuffer)) + for (std::list::iterator it= nvVideoCapturerList.begin(); it!= nvVideoCapturerList.end(); ++it) { - videoTracks[frameBuffer] = videoTrack; + (*it)->EncodeVideoData(); } - auto videoStream = peerConnectionFactory->CreateLocalMediaStream("video"); - videoStream->AddTrack(videoTrack); - videoStreams.push_back(videoStream); - nvVideoCapturer->unityRT = frameBuffer; - nvVideoCapturer->StartEncoder(); - return videoStream.get(); } - webrtc::MediaStreamInterface* Context::CreateAudioStream() + void Context::StopCapturer() + { + for (std::list::iterator it = nvVideoCapturerList.begin(); it != nvVideoCapturerList.end(); ++it) + { + (*it)->Stop(); + } + } + + webrtc::MediaStreamInterface* Context::CreateMediaStream(const std::string& stream_id) + { + if (mediaStreamMap.count(stream_id) == 0) + { + mediaStreamMap[stream_id] = peerConnectionFactory->CreateLocalMediaStream(stream_id); + } + + return mediaStreamMap[stream_id]; + } + + webrtc::MediaStreamTrackInterface* Context::CreateVideoTrack(const std::string& label, UnityFrameBuffer* frameBuffer, int32 width, int32 height, int32 bitRate) + { + UnityEncoder* pUnityEncoder = pDummyVideoEncoderFactory->CreatePlatformEncoder(WebRTC::Nvidia, width, height, bitRate); + UnityVideoCapturer* pUnityVideoCapturer = new UnityVideoCapturer(pUnityEncoder, width, height); + pUnityVideoCapturer->InitializeEncoder(); + pDummyVideoEncoderFactory->AddCapturer(pUnityVideoCapturer); + + auto videoTrack = peerConnectionFactory->CreateVideoTrack(label, peerConnectionFactory->CreateVideoSource(pUnityVideoCapturer)); + pUnityVideoCapturer->unityRT = frameBuffer; + pUnityVideoCapturer->StartEncoder(); + + nvVideoCapturerList.push_back(pUnityVideoCapturer); + mediaSteamTrackList.push_back(videoTrack); + return videoTrack; + } + + webrtc::MediaStreamTrackInterface* Context::CreateAudioTrack(const std::string& label) { //avoid optimization specially for voice cricket::AudioOptions audioOptions; @@ -340,10 +371,9 @@ namespace WebRTC audioOptions.noise_suppression = false; audioOptions.highpass_filter = false; //TODO: label and stream id should be maintained in some way for multi-stream - audioTrack = peerConnectionFactory->CreateAudioTrack("audio", peerConnectionFactory->CreateAudioSource(audioOptions)); - audioStream = peerConnectionFactory->CreateLocalMediaStream("audio"); - audioStream->AddTrack(audioTrack); - return audioStream.get(); + auto audioTrack = peerConnectionFactory->CreateAudioTrack(label, peerConnectionFactory->CreateAudioSource(audioOptions)); + mediaSteamTrackList.push_back(audioTrack); + return audioTrack; } PeerSDPObserver* PeerSDPObserver::Create(DelegateSetSDSuccess onSuccess, DelegateSetSDFailure onFailure) diff --git a/Plugin/WebRTCPlugin/Context.h b/Plugin/WebRTCPlugin/Context.h index 9bd487a87..2123d2eeb 100644 --- a/Plugin/WebRTCPlugin/Context.h +++ b/Plugin/WebRTCPlugin/Context.h @@ -1,9 +1,10 @@ #pragma once +#include "UnityEncoder.h" #include "DummyAudioDevice.h" #include "DummyVideoEncoder.h" #include "PeerConnectionObject.h" -#include "NvVideoCapturer.h" - +#include "UnityVideoCapturer.h" +#include "NvEncoder.h" namespace WebRTC { @@ -39,16 +40,17 @@ namespace WebRTC { public: explicit Context(int uid = -1); - webrtc::MediaStreamInterface* CreateVideoStream(UnityFrameBuffer* frameBuffer); - webrtc::MediaStreamInterface* CreateAudioStream(); + webrtc::MediaStreamInterface* CreateMediaStream(const std::string& stream_id); + webrtc::MediaStreamTrackInterface* CreateVideoTrack(const std::string& label, UnityFrameBuffer* frameBuffer, int32 width, int32 height, int32 bitRate); + webrtc::MediaStreamTrackInterface* CreateAudioTrack(const std::string& label); ~Context(); PeerConnectionObject* CreatePeerConnection(int id); PeerConnectionObject* CreatePeerConnection(int id, const std::string& conf); + + void EncodeFrame(); + void StopCapturer(); void DeletePeerConnection(int id) { clients.erase(id); } - void InitializeEncoder(int32 width, int32 height) { nvVideoCapturer->InitializeEncoder(width, height); } - void EncodeFrame() { nvVideoCapturer->EncodeVideoData(); } - void StopCapturer() { nvVideoCapturer->Stop(); } void ProcessAudioData(const float* data, int32 size) { audioDevice->ProcessAudioData(data, size); } private: int m_uid; @@ -56,14 +58,12 @@ namespace WebRTC std::unique_ptr signalingThread; std::map> clients; rtc::scoped_refptr peerConnectionFactory; - NvVideoCapturer* nvVideoCapturer; - std::unique_ptr nvVideoCapturerUnique; + DummyVideoEncoderFactory* pDummyVideoEncoderFactory; + std::map> mediaStreamMap; + std::list> mediaSteamTrackList; + + std::list nvVideoCapturerList; rtc::scoped_refptr audioDevice; - rtc::scoped_refptr audioTrack; - rtc::scoped_refptr audioStream; - //TODO: move videoTrack to NvVideoCapturer and maintain multiple NvVideoCapturer here - std::vector> videoStreams; - std::map> videoTracks; }; class PeerSDPObserver : public webrtc::SetSessionDescriptionObserver diff --git a/Plugin/WebRTCPlugin/DummyVideoEncoder.cpp b/Plugin/WebRTCPlugin/DummyVideoEncoder.cpp index 1a5dff007..7cba0071d 100644 --- a/Plugin/WebRTCPlugin/DummyVideoEncoder.cpp +++ b/Plugin/WebRTCPlugin/DummyVideoEncoder.cpp @@ -1,7 +1,9 @@ #include "pch.h" +#include "UnityEncoder.h" #include "DummyVideoEncoder.h" -#include "NvVideoCapturer.h" +#include "UnityVideoCapturer.h" #include +#include "NvEncoder.h" namespace WebRTC { @@ -67,7 +69,22 @@ namespace WebRTC SetRate(allocation.get_sum_kbps() * 1000); return 0; } - DummyVideoEncoderFactory::DummyVideoEncoderFactory(NvVideoCapturer* videoCapturer):capturer(videoCapturer){} + + DummyVideoEncoderFactory::DummyVideoEncoderFactory() + { + + } + + void DummyVideoEncoderFactory::Destroy() + { + for (std::list::iterator it = unityEncoders.begin(); it!= unityEncoders.end(); ++it) + { + delete *it; + } + unityEncoders.clear(); + NvEncoder::DestroyEncoderTexture(); + } + std::vector DummyVideoEncoderFactory::GetSupportedFormats() const { const absl::optional profileLevelId = @@ -87,8 +104,48 @@ namespace WebRTC const webrtc::SdpVideoFormat& format) { auto dummyVideoEncoder = std::make_unique(); - dummyVideoEncoder->SetKeyFrame.connect(capturer, &NvVideoCapturer::SetKeyFrame); - dummyVideoEncoder->SetRate.connect(capturer, &NvVideoCapturer::SetRate); + + { + //todo: According to condition of format choose different capturer. + //UnityVideoCapturer* pCapturer = *(++capturers.begin()); + + //dummyVideoEncoder->SetKeyFrame.connect(pCapturer, &UnityVideoCapturer::SetKeyFrame); + //dummyVideoEncoder->SetRate.connect(pCapturer, &UnityVideoCapturer::SetRate); + } + return dummyVideoEncoder; } + + UnityEncoder* DummyVideoEncoderFactory::CreatePlatformEncoder(EncoderPlatform platform, int width, int height, int bitRate) + { + UnityEncoder* pEncoder = NULL; + switch (platform) + { + case WebRTC::Nvidia: + pEncoder = new NvEncoder(); + break; + case WebRTC::Amd: + break; + case WebRTC::Soft: + break; + default: + break; + } + pEncoder->InitEncoder(width, height, bitRate); + unityEncoders.push_back(pEncoder); + return pEncoder; + } + + UnityEncoder* DummyVideoEncoderFactory::GetPlatformEncoder(EncoderPlatform platform, int width, int height, int bitRate) + { + for (std::list::iterator it = unityEncoders.begin(); it != unityEncoders.end(); ++it) + { + if ((*it)->getEncodeWidth() == width && (*it)->getEncodeHeight() == height && (*it)->getBitRate() == bitRate) { + return (*it); + } + } + + return CreatePlatformEncoder(platform, width, height, bitRate); + } + } diff --git a/Plugin/WebRTCPlugin/DummyVideoEncoder.h b/Plugin/WebRTCPlugin/DummyVideoEncoder.h index 43437e2ad..fe20463ec 100644 --- a/Plugin/WebRTCPlugin/DummyVideoEncoder.h +++ b/Plugin/WebRTCPlugin/DummyVideoEncoder.h @@ -2,7 +2,7 @@ namespace WebRTC { - class NvVideoCapturer; + class UnityVideoCapturer; class DummyVideoEncoder : public webrtc::VideoEncoder { public: @@ -38,6 +38,13 @@ namespace WebRTC webrtc::VideoBitrateAllocation lastBitrate; }; + enum EncoderPlatform + { + Nvidia, + Amd, + Soft, + }; + class DummyVideoEncoderFactory : public webrtc::VideoEncoderFactory { public: @@ -51,8 +58,14 @@ namespace WebRTC // Creates a VideoEncoder for the specified format. virtual std::unique_ptr CreateVideoEncoder( const webrtc::SdpVideoFormat& format) override; - DummyVideoEncoderFactory(NvVideoCapturer* videoCapturer); + DummyVideoEncoderFactory(); + void Destroy(); + + void AddCapturer(UnityVideoCapturer* _capturer) { capturers.push_back(_capturer); } + UnityEncoder* CreatePlatformEncoder(EncoderPlatform platform, int width, int height, int bitRate); + UnityEncoder* GetPlatformEncoder(EncoderPlatform platform, int width, int height, int bitRate); private: - NvVideoCapturer* capturer; + std::list capturers; + std::list unityEncoders; }; } diff --git a/Plugin/WebRTCPlugin/NvEncoder.cpp b/Plugin/WebRTCPlugin/NvEncoder.cpp index c12e17d78..bc805c406 100644 --- a/Plugin/WebRTCPlugin/NvEncoder.cpp +++ b/Plugin/WebRTCPlugin/NvEncoder.cpp @@ -6,29 +6,56 @@ namespace WebRTC { - NvEncoder::NvEncoder(int width, int height) :width(width), height(height) + std::list NvEncoder::nvEncoderInputTextureList; + NvEncoder::NvEncoder() { - LogPrint(StringFormat("width is %d, height is %d", width, height).c_str()); + if (pEncoderInterface==nullptr) + { + bool result = true; +#pragma region open an encode session + //open an encode session + NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS openEncdoeSessionExParams = { 0 }; + openEncdoeSessionExParams.version = NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER; + openEncdoeSessionExParams.device = g_D3D11Device; + openEncdoeSessionExParams.deviceType = NV_ENC_DEVICE_TYPE_DIRECTX; + openEncdoeSessionExParams.apiVersion = NVENCAPI_VERSION; + result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncOpenEncodeSessionEx(&openEncdoeSessionExParams, &pEncoderInterface))); + checkf(result, "Unable to open NvEnc encode session"); + LogPrint(StringFormat("OpenEncodeSession Error is %d", errorCode).c_str()); +#pragma endregion + } + + } + + NvEncoder::~NvEncoder() + { + ReleaseEncoderResources(); + if (pEncoderInterface) + { + bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncDestroyEncoder(pEncoderInterface)); + checkf(result, "Failed to destroy NV encoder interface"); + pEncoderInterface = nullptr; + } + + } + + void NvEncoder::InitEncoder(int width, int height, int _bitRate) + { + encodeWidth = width; + encodeHeight = height; + bitRate = _bitRate; + + LogPrint(StringFormat("width is %d, height is %d", encodeWidth, encodeHeight).c_str()); checkf(g_D3D11Device != nullptr, "D3D11Device is invalid"); - checkf(width > 0 && height > 0, "Invalid width or height!"); + checkf(encodeWidth > 0 && encodeHeight > 0, "Invalid width or height!"); + bool result = true; -#pragma region open an encode session - //open an encode session - NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS openEncdoeSessionExParams = { 0 }; - openEncdoeSessionExParams.version = NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER; - openEncdoeSessionExParams.device = g_D3D11Device; - openEncdoeSessionExParams.deviceType = NV_ENC_DEVICE_TYPE_DIRECTX; - openEncdoeSessionExParams.apiVersion = NVENCAPI_VERSION; - result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncOpenEncodeSessionEx(&openEncdoeSessionExParams, &pEncoderInterface))); - checkf(result, "Unable to open NvEnc encode session"); - LogPrint(StringFormat("OpenEncodeSession Error is %d", errorCode).c_str()); -#pragma endregion #pragma region set initialization parameters nvEncInitializeParams.version = NV_ENC_INITIALIZE_PARAMS_VER; - nvEncInitializeParams.encodeWidth = width; - nvEncInitializeParams.encodeHeight = height; - nvEncInitializeParams.darWidth = width; - nvEncInitializeParams.darHeight = height; + nvEncInitializeParams.encodeWidth = encodeWidth; + nvEncInitializeParams.encodeHeight = encodeHeight; + nvEncInitializeParams.darWidth = encodeWidth; + nvEncInitializeParams.darHeight = encodeHeight; nvEncInitializeParams.encodeGUID = NV_ENC_CODEC_H264_GUID; nvEncInitializeParams.presetGUID = NV_ENC_PRESET_LOW_LATENCY_HQ_GUID; nvEncInitializeParams.frameRateNum = frameRate; @@ -37,8 +64,8 @@ namespace WebRTC nvEncInitializeParams.reportSliceOffsets = 0; nvEncInitializeParams.enableSubFrameWrite = 0; nvEncInitializeParams.encodeConfig = &nvEncConfig; - nvEncInitializeParams.maxEncodeWidth = 3840; - nvEncInitializeParams.maxEncodeHeight = 2160; + nvEncInitializeParams.maxEncodeWidth = encodeWidth;//3840; + nvEncInitializeParams.maxEncodeHeight = encodeHeight;//2160; #pragma endregion #pragma region get preset ocnfig and set it NV_ENC_PRESET_CONFIG presetConfig = { 0 }; @@ -74,20 +101,10 @@ namespace WebRTC #pragma endregion InitEncoderResources(); isNvEncoderSupported = true; - } - NvEncoder::~NvEncoder() - { - ReleaseEncoderResources(); - if (pEncoderInterface) - { - bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncDestroyEncoder(pEncoderInterface)); - checkf(result, "Failed to destroy NV encoder interface"); - pEncoderInterface = nullptr; - } - + isInitialize = true; } - void NvEncoder::UpdateSettings() + void NvEncoder::UpdateSettings(int width, int height) { bool settingChanged = false; if (nvEncConfig.rcParams.averageBitRate != bitRate) @@ -120,11 +137,13 @@ namespace WebRTC lastBitRate = bitRate; } } + //entry for encoding a frame - void NvEncoder::EncodeFrame() + void NvEncoder::EncodeFrame(int width, int height) { - UpdateSettings(); + UpdateSettings(width, height); uint32 bufferIndexToWrite = frameCount % bufferedFrameNum; + Frame& frame = bufferedFrames[bufferIndexToWrite]; #pragma region set frame params //no free buffer, skip this frame @@ -140,8 +159,8 @@ namespace WebRTC picParams.pictureStruct = NV_ENC_PIC_STRUCT_FRAME; picParams.inputBuffer = frame.inputFrame.mappedResource; picParams.bufferFmt = frame.inputFrame.bufferFormat; - picParams.inputWidth = nvEncInitializeParams.encodeWidth; - picParams.inputHeight = nvEncInitializeParams.encodeHeight; + picParams.inputWidth = width; + picParams.inputHeight = height; picParams.outputBitstream = frame.outputFrame; picParams.inputTimeStamp = frameCount; #pragma endregion @@ -151,13 +170,15 @@ namespace WebRTC picParams.encodePicFlags |= NV_ENC_PIC_FLAG_FORCEIDR; } isIdrFrame = false; + bool result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncEncodePicture(pEncoderInterface, &picParams))); checkf(result, StringFormat("Failed to encode frame, error is %d", errorCode).c_str()); + #pragma endregion ProcessEncodedFrame(frame); frameCount++; } - + //get encoded frame void NvEncoder::ProcessEncodedFrame(Frame& frame) { @@ -166,12 +187,15 @@ namespace WebRTC { return; } + frame.isEncoding = false; + #pragma region retrieve encoded frame from output buffer NV_ENC_LOCK_BITSTREAM lockBitStream = { 0 }; lockBitStream.version = NV_ENC_LOCK_BITSTREAM_VER; lockBitStream.outputBitstream = frame.outputFrame; lockBitStream.doNotWait = nvEncInitializeParams.enableEncodeAsync; + bool result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncLockBitstream(pEncoderInterface, &lockBitStream))); checkf(result, StringFormat("Failed to lock bit stream, error is %d", errorCode).c_str()); if (lockBitStream.bitstreamSizeInBytes) @@ -179,20 +203,19 @@ namespace WebRTC frame.encodedFrame.resize(lockBitStream.bitstreamSizeInBytes); std::memcpy(frame.encodedFrame.data(), lockBitStream.bitstreamBufferPtr, lockBitStream.bitstreamSizeInBytes); } - result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncUnlockBitstream(pEncoderInterface, frame.outputFrame))); checkf(result, StringFormat("Failed to unlock bit stream, error is %d", errorCode).c_str()); frame.isIdrFrame = lockBitStream.pictureType == NV_ENC_PIC_TYPE_IDR; #pragma endregion - CaptureFrame(frame.encodedFrame); + captureFrame(frame.encodedFrame); } ID3D11Texture2D* NvEncoder::AllocateInputBuffers() { ID3D11Texture2D* inputTextures = nullptr; D3D11_TEXTURE2D_DESC desc = { 0 }; - desc.Width = width; - desc.Height = height; + desc.Width = encodeWidth; + desc.Height = encodeHeight; desc.MipLevels = 1; desc.ArraySize = 1; desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM; @@ -200,7 +223,7 @@ namespace WebRTC desc.Usage = D3D11_USAGE_DEFAULT; desc.BindFlags = D3D11_BIND_RENDER_TARGET; desc.CPUAccessFlags = 0; - g_D3D11Device->CreateTexture2D(&desc, NULL, &inputTextures); + HRESULT r = g_D3D11Device->CreateTexture2D(&desc, NULL, &inputTextures); return inputTextures; } NV_ENC_REGISTERED_PTR NvEncoder::RegisterResource(void *buffer) @@ -212,8 +235,8 @@ namespace WebRTC if (!registerResource.resourceToRegister) LogPrint("resource is not initialized"); - registerResource.width = width; - registerResource.height = height; + registerResource.width = encodeWidth; + registerResource.height = encodeHeight; LogPrint(StringFormat("nvEncRegisterResource: width is %d, height is %d", registerResource.width, registerResource.height).c_str()); registerResource.bufferFormat = NV_ENC_BUFFER_FORMAT_ARGB; checkf(NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncRegisterResource(pEncoderInterface, ®isterResource))), @@ -237,13 +260,38 @@ namespace WebRTC StringFormat("nvEncCreateBitstreamBuffer error is %d", errorCode).c_str()); return createBitstreamBuffer.bitstreamBuffer; } + + void NvEncoder::DestroyEncoderTexture() + { + for (std::list::iterator it = nvEncoderInputTextureList.begin(); it != nvEncoderInputTextureList.end(); ++it) + { + delete (*it); + } + nvEncoderInputTextureList.clear(); + } + + UnityFrameBuffer* NvEncoder::getEncoderTexture(int width, int height) + { + for (std::list::iterator it = nvEncoderInputTextureList.begin(); it!= nvEncoderInputTextureList.end(); ++it) + { + if ( (*it)->width==width && (*it)->height==height ) + { + return (*it)->texture; + } + } + + EncoderInputTexture* pEncoderInputTexture = new EncoderInputTexture(width, height); + nvEncoderInputTextureList.push_back(pEncoderInputTexture); + return pEncoderInputTexture->texture; + } + void NvEncoder::InitEncoderResources() { - for (uint32 i = 0; i < bufferedFrameNum; i++) + nvEncoderTexture = getEncoderTexture(encodeWidth, encodeHeight); + for (int i = 0; i < bufferedFrameNum; i++) { - renderTextures[i] = AllocateInputBuffers(); Frame& frame = bufferedFrames[i]; - frame.inputFrame.registeredResource = RegisterResource(renderTextures[i]); + frame.inputFrame.registeredResource = RegisterResource(nvEncoderTexture); frame.inputFrame.bufferFormat = NV_ENC_BUFFER_FORMAT_ARGB; MapResources(frame.inputFrame); frame.outputFrame = InitializeBitstreamBuffer(); @@ -263,10 +311,13 @@ namespace WebRTC { for (Frame& frame : bufferedFrames) { - ReleaseFrameInputBuffer(frame); - bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncDestroyBitstreamBuffer(pEncoderInterface, frame.outputFrame)); - checkf(result, "Failed to destroy output buffer bit stream"); - frame.outputFrame = nullptr; + if (frame.outputFrame!=nullptr) + { + ReleaseFrameInputBuffer(frame); + bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncDestroyBitstreamBuffer(pEncoderInterface, frame.outputFrame)); + checkf(result, "Failed to destroy output buffer bit stream"); + frame.outputFrame = nullptr; + } } } } diff --git a/Plugin/WebRTCPlugin/NvEncoder.h b/Plugin/WebRTCPlugin/NvEncoder.h index 490b92f8e..6a119765b 100644 --- a/Plugin/WebRTCPlugin/NvEncoder.h +++ b/Plugin/WebRTCPlugin/NvEncoder.h @@ -4,11 +4,12 @@ #include "nvEncodeAPI.h" #include #include +#include "UnityEncoder.h" namespace WebRTC { using OutputFrame = NV_ENC_OUTPUT_PTR; - class NvEncoder + class NvEncoder : public UnityEncoder { private: struct InputFrame @@ -28,21 +29,55 @@ namespace WebRTC std::atomic isEncoding = false; }; + struct EncoderInputTexture + { + UnityFrameBuffer* texture; + int width; + int height; + EncoderInputTexture(int w, int h) + { + width = w; + height = h; + D3D11_TEXTURE2D_DESC desc = { 0 }; + desc.Width = width; + desc.Height = height; + desc.MipLevels = 1; + desc.ArraySize = 1; + desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM; + desc.SampleDesc.Count = 1; + desc.Usage = D3D11_USAGE_DEFAULT; + desc.BindFlags = D3D11_BIND_RENDER_TARGET; + desc.CPUAccessFlags = 0; + HRESULT r = g_D3D11Device->CreateTexture2D(&desc, NULL, &texture); + } + + ~EncoderInputTexture() + { + texture->Release(); + texture = nullptr; + } + }; + public: - NvEncoder(int width, int height); + NvEncoder(); ~NvEncoder(); void SetRate(uint32 rate); - void UpdateSettings(); - void EncodeFrame(); + void UpdateSettings(int width, int height); + void EncodeFrame(int width, int height); bool IsSupported() const { return isNvEncoderSupported; } void SetIdrFrame() { isIdrFrame = true; } uint64 GetCurrentFrameCount() { return frameCount; } - sigslot::signal1&> CaptureFrame; + void InitEncoder(int width, int height, int _bitRate); void InitEncoderResources(); - + void* getRenderTexture() { return nvEncoderTexture; } + int getEncodeWidth() { return encodeWidth; } + int getEncodeHeight() { return encodeHeight; } + int getBitRate() { return bitRate; } + static void DestroyEncoderTexture(); + private: + static UnityFrameBuffer* getEncoderTexture(int width, int height); private: - void LoadNvEncApi(); void ReleaseFrameInputBuffer(Frame& frame); void ReleaseEncoderResources(); void ProcessEncodedFrame(Frame& frame); @@ -54,12 +89,15 @@ namespace WebRTC NV_ENC_CONFIG nvEncConfig = {}; _NVENCSTATUS errorCode; Frame bufferedFrames[bufferedFrameNum]; + static std::list nvEncoderInputTextureList; + UnityFrameBuffer* nvEncoderTexture; uint64 frameCount = 0; void* pEncoderInterface = nullptr; bool isNvEncoderSupported = false; + bool isInitialize = false; bool isIdrFrame = false; - int width = 1920; - int height = 1080; + int encodeWidth; + int encodeHeight; //10Mbps int bitRate = 10000000; //100Mbps diff --git a/Plugin/WebRTCPlugin/UnityEncoder.cpp b/Plugin/WebRTCPlugin/UnityEncoder.cpp new file mode 100644 index 000000000..52c58150e --- /dev/null +++ b/Plugin/WebRTCPlugin/UnityEncoder.cpp @@ -0,0 +1,13 @@ +#include "pch.h" +#include "UnityEncoder.h" + +namespace WebRTC +{ + UnityEncoder::UnityEncoder() + { + } + + UnityEncoder::~UnityEncoder() + { + } +} diff --git a/Plugin/WebRTCPlugin/UnityEncoder.h b/Plugin/WebRTCPlugin/UnityEncoder.h new file mode 100644 index 000000000..ecc80a058 --- /dev/null +++ b/Plugin/WebRTCPlugin/UnityEncoder.h @@ -0,0 +1,25 @@ +#pragma once + +namespace WebRTC +{ + class UnityEncoder + { + public: + UnityEncoder(); + virtual ~UnityEncoder(); + sigslot::signal1&> captureFrame; + virtual void SetRate(uint32 rate) = 0; + virtual void UpdateSettings(int width, int height) = 0; + virtual void EncodeFrame(int width, int height) = 0; + virtual bool IsSupported() const = 0; + virtual void SetIdrFrame() = 0; + virtual uint64 GetCurrentFrameCount() = 0; + virtual void InitEncoder(int width, int height, int _bitRate) = 0; + virtual void InitEncoderResources() = 0; + virtual void* getRenderTexture() = 0; + virtual int getEncodeWidth() = 0; + virtual int getEncodeHeight() = 0; + virtual int getBitRate() = 0; + }; +} + diff --git a/Plugin/WebRTCPlugin/NvVideoCapturer.cpp b/Plugin/WebRTCPlugin/UnityVideoCapturer.cpp similarity index 55% rename from Plugin/WebRTCPlugin/NvVideoCapturer.cpp rename to Plugin/WebRTCPlugin/UnityVideoCapturer.cpp index 9e9a43012..2383a7897 100644 --- a/Plugin/WebRTCPlugin/NvVideoCapturer.cpp +++ b/Plugin/WebRTCPlugin/UnityVideoCapturer.cpp @@ -1,23 +1,22 @@ #include "pch.h" -#include "NvVideoCapturer.h" +#include "UnityVideoCapturer.h" namespace WebRTC { - NvVideoCapturer::NvVideoCapturer() + UnityVideoCapturer::UnityVideoCapturer(UnityEncoder* pEncoder, int _width, int _height) : nvEncoder(pEncoder), width(_width), height(_height) { set_enable_video_adapter(false); SetSupportedFormats(std::vector(1, cricket::VideoFormat(width, height, cricket::VideoFormat::FpsToInterval(framerate), cricket::FOURCC_H264))); } - void NvVideoCapturer::EncodeVideoData() + void UnityVideoCapturer::EncodeVideoData() { if (captureStarted && !captureStopped) { - int curFrameNum = nvEncoder->GetCurrentFrameCount() % bufferedFrameNum; - context->CopyResource(renderTextures[curFrameNum], unityRT); - nvEncoder->EncodeFrame(); + context->CopyResource((ID3D11Resource*)nvEncoder->getRenderTexture(), unityRT); + nvEncoder->EncodeFrame(width, height); } } - void NvVideoCapturer::CaptureFrame(std::vector& data) + void UnityVideoCapturer::CaptureFrame(std::vector& data) { rtc::scoped_refptr buffer = new rtc::RefCountedObject(width, height, data); int64 timestamp = rtc::TimeMillis(); @@ -25,23 +24,22 @@ namespace WebRTC videoFrame.set_ntp_time_ms(timestamp); OnFrame(videoFrame, width, height); } - void NvVideoCapturer::StartEncoder() + void UnityVideoCapturer::StartEncoder() { captureStarted = true; SetKeyFrame(); } - void NvVideoCapturer::SetKeyFrame() + void UnityVideoCapturer::SetKeyFrame() { nvEncoder->SetIdrFrame(); } - void NvVideoCapturer::SetRate(uint32 rate) + void UnityVideoCapturer::SetRate(uint32 rate) { nvEncoder->SetRate(rate); } - void NvVideoCapturer::InitializeEncoder(int32 width, int32 height) + void UnityVideoCapturer::InitializeEncoder() { - nvEncoder = std::make_unique(width, height); - nvEncoder->CaptureFrame.connect(this, &NvVideoCapturer::CaptureFrame); + nvEncoder->captureFrame.connect(this, &UnityVideoCapturer::CaptureFrame); } } diff --git a/Plugin/WebRTCPlugin/NvVideoCapturer.h b/Plugin/WebRTCPlugin/UnityVideoCapturer.h similarity index 90% rename from Plugin/WebRTCPlugin/NvVideoCapturer.h rename to Plugin/WebRTCPlugin/UnityVideoCapturer.h index 5686f2216..11777dcce 100644 --- a/Plugin/WebRTCPlugin/NvVideoCapturer.h +++ b/Plugin/WebRTCPlugin/UnityVideoCapturer.h @@ -1,12 +1,12 @@ #pragma once -#include "NvEncoder.h" +#include "UnityEncoder.h" namespace WebRTC { - class NvVideoCapturer : public cricket::VideoCapturer + class UnityVideoCapturer : public cricket::VideoCapturer { public: - NvVideoCapturer(); + UnityVideoCapturer(UnityEncoder* pEncoder, int _width, int _height); void EncodeVideoData(); // Start the video capturer with the specified capture format. virtual cricket::CaptureState Start(const cricket::VideoFormat& Format) override @@ -17,7 +17,6 @@ namespace WebRTC virtual void Stop() override { captureStopped = true; - nvEncoder.reset(); } // Check if the video capturer is running. virtual bool IsRunning() override @@ -31,7 +30,7 @@ namespace WebRTC return false; } void StartEncoder(); - void InitializeEncoder(int32 width, int32 height); + void InitializeEncoder(); void SetKeyFrame(); void SetRate(uint32 rate); void CaptureFrame(std::vector& data); @@ -46,11 +45,11 @@ namespace WebRTC fourccs->push_back(cricket::FOURCC_H264); return true; } - std::unique_ptr nvEncoder; + UnityEncoder* nvEncoder; //just fake info - const int32 width = 1280; - const int32 height = 720; + int32 width; + int32 height; const int32 framerate = 60; bool captureStarted = false; diff --git a/Plugin/WebRTCPlugin/WebRTCPlugin.cpp b/Plugin/WebRTCPlugin/WebRTCPlugin.cpp index b525243ed..1cf4ae2c5 100644 --- a/Plugin/WebRTCPlugin/WebRTCPlugin.cpp +++ b/Plugin/WebRTCPlugin/WebRTCPlugin.cpp @@ -34,20 +34,25 @@ extern "C" return ContextManager::GetInstance()->GetCodecInitializationResult(); } - UNITY_INTERFACE_EXPORT webrtc::MediaStreamInterface* CaptureVideoStream(Context* context, UnityFrameBuffer* rt, int32 width, int32 height) + UNITY_INTERFACE_EXPORT webrtc::MediaStreamInterface* CreateMediaStream(Context* context, const char* label) { - context->InitializeEncoder(width, height); - return context->CreateVideoStream(rt); + return context->CreateMediaStream(label); } - //TODO: Multi-track support - UNITY_INTERFACE_EXPORT void StopMediaStreamTrack(Context* context, webrtc::MediaStreamTrackInterface* track) + + UNITY_INTERFACE_EXPORT webrtc::MediaStreamTrackInterface* CreateVideoTrack(Context* context, const char* label, UnityFrameBuffer* frameBuffer, int32 width, int32 height, int32 bitRate) { - context->StopCapturer(); + return context->CreateVideoTrack(label, frameBuffer, width, height, bitRate); } - UNITY_INTERFACE_EXPORT webrtc::MediaStreamInterface* CaptureAudioStream(Context* context) + UNITY_INTERFACE_EXPORT webrtc::MediaStreamTrackInterface* CreateAudioTrack(Context* context, const char* label) + { + return context->CreateAudioTrack(label); + } + + //TODO: Multi-track support + UNITY_INTERFACE_EXPORT void StopMediaStreamTrack(Context* context, webrtc::MediaStreamTrackInterface* track) { - return context->CreateAudioStream(); + context->StopCapturer(); } UNITY_INTERFACE_EXPORT void MediaStreamAddTrack(webrtc::MediaStreamInterface* stream, webrtc::MediaStreamTrackInterface* track) @@ -61,6 +66,7 @@ extern "C" stream->AddTrack((webrtc::VideoTrackInterface*)track); } } + UNITY_INTERFACE_EXPORT void MediaStreamRemoveTrack(webrtc::MediaStreamInterface* stream, webrtc::MediaStreamTrackInterface* track) { if (track->kind() == "audio") @@ -187,9 +193,9 @@ extern "C" { obj->Close(); } - UNITY_INTERFACE_EXPORT webrtc::RtpSenderInterface* PeerConnectionAddTrack(PeerConnectionObject* obj, webrtc::MediaStreamTrackInterface* track) + UNITY_INTERFACE_EXPORT webrtc::RtpSenderInterface* PeerConnectionAddTrack(PeerConnectionObject* obj, webrtc::MediaStreamTrackInterface* track, const char* mediaStreamId) { - return obj->connection->AddTrack(rtc::scoped_refptr (track), { "unity" }).value().get(); + return obj->connection->AddTrack(rtc::scoped_refptr (track), { mediaStreamId }).value().get(); } UNITY_INTERFACE_EXPORT void PeerConnectionRemoveTrack(PeerConnectionObject* obj, webrtc::RtpSenderInterface* sender) diff --git a/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj b/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj index 6f22abd39..6125b879b 100644 --- a/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj +++ b/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj @@ -180,7 +180,8 @@ - + + @@ -193,7 +194,8 @@ - + + Create Create diff --git a/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj.filters b/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj.filters index 0a0d2a3a4..d3885ee91 100644 --- a/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj.filters +++ b/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj.filters @@ -42,9 +42,6 @@ Header Files - - Header Files - Header Files @@ -60,6 +57,12 @@ Header Files\Unity + + Header Files + + + Header Files + @@ -83,9 +86,6 @@ Source Files - - Source Files - Source Files @@ -95,5 +95,11 @@ Source Files + + Source Files + + + Source Files + \ No newline at end of file diff --git a/Plugin/WebRTCPlugin/pch.h b/Plugin/WebRTCPlugin/pch.h index 0be103dff..231ba92ba 100644 --- a/Plugin/WebRTCPlugin/pch.h +++ b/Plugin/WebRTCPlugin/pch.h @@ -91,8 +91,7 @@ namespace WebRTC using int32 = signed int; using int64 = signed long long; - const uint32 bufferedFrameNum = 3; - extern UnityFrameBuffer* renderTextures[bufferedFrameNum]; + const uint32 bufferedFrameNum = 1; extern ID3D11DeviceContext* context; extern ID3D11Device* g_D3D11Device; } diff --git a/RenderStreamingSampleSrc~/RS_HDRPSampleSrc_1.x/ProjectSettings/ProjectVersion.txt b/RenderStreamingSampleSrc~/RS_HDRPSampleSrc_1.x/ProjectSettings/ProjectVersion.txt index e14b94af0..00a3cf8f0 100644 --- a/RenderStreamingSampleSrc~/RS_HDRPSampleSrc_1.x/ProjectSettings/ProjectVersion.txt +++ b/RenderStreamingSampleSrc~/RS_HDRPSampleSrc_1.x/ProjectSettings/ProjectVersion.txt @@ -1,2 +1,2 @@ -m_EditorVersion: 2019.1.9f1 -m_EditorVersionWithRevision: 2019.1.9f1 (d5f1b37da199) +m_EditorVersion: 2019.1.14f1 +m_EditorVersionWithRevision: 2019.1.14f1 (148b5891095a) diff --git a/WebApp/public/scripts/app.js b/WebApp/public/scripts/app.js index 13f2328d9..3c14a8bec 100644 --- a/WebApp/public/scripts/app.js +++ b/WebApp/public/scripts/app.js @@ -33,11 +33,20 @@ function onClickPlayButton() { const playerDiv = document.getElementById('player'); // add video player - const elementVideo = document.createElement('video'); - elementVideo.id = 'Video'; - elementVideo.style.touchAction = 'none'; - playerDiv.appendChild(elementVideo); - setupVideoPlayer(elementVideo).then(value => videoPlayer = value); + let elementVideos = []; + for (let i=0; i<2; i++) + { + const elementVideo = document.createElement('video'); + elementVideo.id = "Video"+i; + elementVideo.style.touchAction = 'none'; + playerDiv.appendChild(elementVideo); + + elementVideos.push(elementVideo); + } + + + setupVideoPlayer(elementVideos).then(value => videoPlayer = value); + // add green button const elementBlueButton = document.createElement('button'); @@ -92,15 +101,117 @@ function onClickPlayButton() { elementFullscreenButton.style.display = 'block'; } } + } -async function setupVideoPlayer(element, config) { - const videoPlayer = new VideoPlayer(element, config); +function setupMediaSelector(options) +{ + const playerDiv = document.getElementById('player'); + let mediaSelectDiv = document.createElement("div"); + mediaSelectDiv.id = "mediaSelect"; + mediaSelectDiv.setAttribute("style", "width:200px;"); + mediaSelectDiv.className = "custom-select"; + playerDiv.appendChild(mediaSelectDiv); + const mediaSelect = document.createElement("select"); + mediaSelectDiv.appendChild(mediaSelect); + let index = 0; + options.forEach(option=>{ + let optionItem = document.createElement("Option"); + optionItem.value = index++; + optionItem.innerHTML = option; + mediaSelect.appendChild(optionItem); + }) + + + let customSelects, selElmnt; + /*look for any elements with the class "custom-select":*/ + customSelects = document.getElementsByClassName("custom-select"); + for (let i = 0; i < customSelects.length; i++) { + selElmnt = customSelects[i].getElementsByTagName("select")[0]; + /*for each element, create a new DIV that will act as the selected item:*/ + let a = document.createElement("DIV"); + a.setAttribute("class", "select-selected"); + a.innerHTML = selElmnt.options[selElmnt.selectedIndex].innerHTML; + customSelects[i].appendChild(a); + /*for each element, create a new DIV that will contain the option list:*/ + let b = document.createElement("DIV"); + b.setAttribute("class", "select-items select-hide"); + for (let j = 1; j < selElmnt.length; j++) { + /*for each option in the original select element, + create a new DIV that will act as an option item:*/ + let c = document.createElement("DIV"); + c.innerHTML = selElmnt.options[j].innerHTML; + c.addEventListener("click", function(e) { + /*when an item is clicked, update the original select box, + and the selected item:*/ + let y, i, k, s, h; + s = this.parentNode.parentNode.getElementsByTagName("select")[0]; + + videoPlayer.selectMediaStream(this.innerHTML); + console.log(this.innerHTML); + + h = this.parentNode.previousSibling; + for (i = 0; i < s.length; i++) { + if (s.options[i].innerHTML == this.innerHTML) { + s.selectedIndex = i; + h.innerHTML = this.innerHTML; + y = this.parentNode.getElementsByClassName("same-as-selected"); + for (k = 0; k < y.length; k++) { + y[k].removeAttribute("class"); + } + this.setAttribute("class", "same-as-selected"); + break; + } + } + h.click(); + }); + b.appendChild(c); + } + customSelects[i].appendChild(b); + a.addEventListener("click", function(e) { + /*when the select box is clicked, close any other select boxes, + and open/close the current select box:*/ + e.stopPropagation(); + closeAllSelect(this); + this.nextSibling.classList.toggle("select-hide"); + this.classList.toggle("select-arrow-active"); + }); + } + function closeAllSelect(elmnt) { + /*a function that will close all select boxes in the document, + except the current select box:*/ + var x, y, i, arrNo = []; + x = document.getElementsByClassName("select-items"); + y = document.getElementsByClassName("select-selected"); + for (i = 0; i < y.length; i++) { + if (elmnt == y[i]) { + arrNo.push(i) + } else { + y[i].classList.remove("select-arrow-active"); + } + } + for (i = 0; i < x.length; i++) { + if (arrNo.indexOf(i)) { + x[i].classList.add("select-hide"); + } + } + } + /*if the user clicks anywhere outside the select box, + then close all select boxes:*/ + document.addEventListener("click", closeAllSelect); +} + +async function setupVideoPlayer(elements, config) { + const videoPlayer = new VideoPlayer(elements, config); await videoPlayer.setupConnection(); videoPlayer.ondisconnect = onDisconnect; + videoPlayer.onaddtrackfinish = onAddTrackFinish; registerKeyboardEvents(videoPlayer); - registerMouseEvents(videoPlayer, element); + + elements.forEach(element=>{ + registerMouseEvents(videoPlayer, element); + }); return videoPlayer; } @@ -112,6 +223,17 @@ function onDisconnect() { showPlayButton(); } +function onAddTrackFinish(mediaStreams) { + + let options = ["Select a media"]; + for (let i=0; i{ + v.playsInline = true; + v.addEventListener('loadedmetadata', function () { + v.play(); + _this.resizeVideo(); + }, true); + }) + +// TODO:: Fix "resizeVideo()" function to be able to use multiple videos +// this.video = element; +// this.video.playsInline = true; +// this.video.addEventListener('loadedmetadata', function () { +// _this.video.play(); +// _this.resizeVideo(); +// }, true); + this.interval = 3000; this.signaling = new Signaling(); this.ondisconnect = function(){}; + this.onaddtrackfinish = function (mediaStreams) {}; this.sleep = msec => new Promise(resolve => setTimeout(resolve, msec)); } @@ -28,6 +44,7 @@ export class VideoPlayer { } config.sdpSemantics = 'unified-plan'; config.iceServers = [{urls: ['stun:stun.l.google.com:19302']}]; + config.bundlePolicy = "max-bundle"; return config; } @@ -53,6 +70,13 @@ export class VideoPlayer { // Create peerConnection with proxy server and set up handlers this.pc = new RTCPeerConnection(this.cfg); + + this.pc.addTransceiver("video"); + this.pc.addTransceiver("audio"); + this.pc.addTransceiver("video"); + this.pc.addTransceiver("audio"); + + this.pc.onsignalingstatechange = function (e) { console.log('signalingState changed:', e); }; @@ -66,10 +90,32 @@ export class VideoPlayer { this.pc.onicegatheringstatechange = function (e) { console.log('iceGatheringState changed:', e); }; + let tempCount = 0; this.pc.ontrack = function (e) { + console.log('New track added: ', e.streams); - _this.video.srcObject = e.streams[0]; + console.log(e.track); + + if (_this.UnityStreams.indexOf(e.streams[0])==-1) + { + _this.UnityStreams.push(e.streams[0]); + if ( _this.UnityStreamCount==_this.UnityStreams.length ) + { + _this.onaddtrackfinish(_this.UnityStreams); + } + } }; + + _this.videos[0].onresize = function () { + console.log("video 0 width:=" + _this.videos[0].videoWidth); + console.log("video 0 height:=" + _this.videos[0].videoHeight); + } + + _this.videos[1].onresize = function () { + console.log("video 1 width:=" + _this.videos[1].videoWidth); + console.log("video 1 height:=" + _this.videos[1].videoHeight); + } + this.pc.onicecandidate = function (e) { if(e.candidate != null) { _this.signaling.sendCandidate(_this.sessionId, _this.connectionId, e.candidate.candidate, e.candidate.sdpMid, e.candidate.sdpMLineIndex); @@ -97,6 +143,7 @@ export class VideoPlayer { await this.createConnection(); // set local sdp offer.sdp = offer.sdp.replace(/useinbandfec=1/, 'useinbandfec=1;stereo=1;maxaveragebitrate=1048576'); + const desc = new RTCSessionDescription({sdp:offer.sdp, type:"offer"}); await this.pc.setLocalDescription(desc); await this.sendOffer(offer); @@ -129,6 +176,7 @@ export class VideoPlayer { if(answers.length > 0) { const answer = answers[0]; await this.setAnswer(sessionId, answer.sdp); + } await this.sleep(interval); } @@ -199,6 +247,18 @@ export class VideoPlayer { } }; + selectMediaStream(streamId){ + + for (let i=0; i