diff --git a/.yamato/upm-ci-webrtc.yml b/.yamato/upm-ci-webrtc.yml index 987be2ceb..82b1b0b01 100644 --- a/.yamato/upm-ci-webrtc.yml +++ b/.yamato/upm-ci-webrtc.yml @@ -4,7 +4,7 @@ editors: - version: trunk platforms: - name: win - type: Unity::VM + type: Unity::VM::GPU # currently the projects depends MSBuild. we should replace CMake # image: package-ci/win10:stable image: renderstreaming/win10:latest @@ -101,4 +101,4 @@ publish: - .yamato/upm-ci-webrtc.yml#test_{{ platform.name }}_{{ editor.version }} {% endfor %} {% endfor %} -{% endfor %} \ No newline at end of file +{% endfor %} diff --git a/Assets/Scripts/RenderStreaming.cs b/Assets/Scripts/RenderStreaming.cs index 929188157..fae0a69c2 100644 --- a/Assets/Scripts/RenderStreaming.cs +++ b/Assets/Scripts/RenderStreaming.cs @@ -17,6 +17,12 @@ public class ButtonClickElement public ButtonClickEvent click; } + public class CameraMediaStream + { + public Camera camera; + public MediaStream[] mediaStreams = new MediaStream[2]; + } + public class RenderStreaming : MonoBehaviour { #pragma warning disable 0649 @@ -29,11 +35,11 @@ public class RenderStreaming : MonoBehaviour [SerializeField, Tooltip("Time interval for polling from signaling server")] private float interval = 5.0f; - [SerializeField, Tooltip("Camera to capture video stream")] - private Camera captureCamera; - [SerializeField] private ButtonClickElement[] arrayButtonClickEvent; + + [SerializeField] + private bool isUseMinimalTextures = true; #pragma warning restore 0649 private Signaling signaling; @@ -41,7 +47,7 @@ public class RenderStreaming : MonoBehaviour private Dictionary> mapChannels = new Dictionary>(); private RTCConfiguration conf; private string sessionId; - private MediaStream videoStream; + private Dictionary cameraMediaStreamDict = new Dictionary(); public void Awake() { @@ -52,6 +58,7 @@ public void Awake() public void OnDestroy() { + Audio.Stop(); WebRTC.WebRTC.Finalize(); } public IEnumerator Start() @@ -60,7 +67,47 @@ public IEnumerator Start() { yield break; } - videoStream = captureCamera.CaptureStream(1280, 720); + + + if (isUseMinimalTextures) + { + foreach (var camera in Camera.allCameras) + { + CameraMediaStream cameraMediaStream = new CameraMediaStream(); + cameraMediaStreamDict.Add(camera, cameraMediaStream); + camera.CreateRenderStreamTexture(1280, 720); + int mediaCount = cameraMediaStream.mediaStreams.Length; + for (int i = 0; i < mediaCount; ++i) + { + cameraMediaStream.mediaStreams[i] = new MediaStream(); + RenderTexture rt = camera.GetStreamTexture(0); + int temp = i==0 ? 1 : (int)Mathf.Pow(i + 1, 10); + VideoStreamTrack videoTrack = new VideoStreamTrack("videoTrack" + i, rt, 1000000/temp); + cameraMediaStream.mediaStreams[i].AddTrack(videoTrack); + cameraMediaStream.mediaStreams[i].AddTrack(new AudioStreamTrack("audioTrack")); + } + } + }else{ + foreach (var camera in Camera.allCameras) + { + CameraMediaStream cameraMediaStream = new CameraMediaStream(); + cameraMediaStreamDict.Add(camera, cameraMediaStream); + camera.CreateRenderStreamTexture(1280, 720, cameraMediaStream.mediaStreams.Length); + int texCount = camera.GetStreamTextureCount(); + for (int i = 0; i < texCount; ++i) + { + int index = i; + cameraMediaStream.mediaStreams[i] = new MediaStream(); + RenderTexture rt = camera.GetStreamTexture(index); + VideoStreamTrack videoTrack = new VideoStreamTrack("videoTrack" + i, rt); + cameraMediaStream.mediaStreams[i].AddTrack(videoTrack); + cameraMediaStream.mediaStreams[i].AddTrack(new AudioStreamTrack("audioTrack")); + } + } + } + + Audio.Start(); + signaling = new Signaling(urlSignaling); var opCreate = signaling.Create(); yield return opCreate; @@ -125,27 +172,45 @@ IEnumerator GetOffer() { continue; } - var pc = new RTCPeerConnection(); - pcs.Add(offer.connectionId, pc); + RTCConfiguration config = default; + config.iceServers = new RTCIceServer[] + { + new RTCIceServer { urls = urlsIceServer }, + }; + config.bundle_policy = RTCBundlePolicy.kBundlePolicyMaxBundle; + var pc = new RTCPeerConnection(ref config); + pcs.Add(offer.connectionId, pc); pc.OnDataChannel = new DelegateOnDataChannel(channel => { OnDataChannel(pc, channel); }); - pc.SetConfiguration(ref conf); pc.OnIceCandidate = new DelegateOnIceCandidate(candidate => { StartCoroutine(OnIceCandidate(offer.connectionId, candidate)); }); pc.OnIceConnectionChange = new DelegateOnIceConnectionChange(state => { if(state == RTCIceConnectionState.Disconnected) { - pc.Close(); + pc.Close(); + pcs.Remove(offer.connectionId); } }); + //make video bit rate starts at 16000kbits, and 160000kbits at max. string pattern = @"(a=fmtp:\d+ .*level-asymmetry-allowed=.*)\r\n"; _desc.sdp = Regex.Replace(_desc.sdp, pattern, "$1;x-google-start-bitrate=16000;x-google-max-bitrate=160000\r\n"); + Debug.Log("remote sdp---------------------------------------------------------"); + Debug.Log(_desc.sdp); + pc.SetRemoteDescription(ref _desc); - foreach (var track in videoStream.GetTracks()) + + foreach (var k in cameraMediaStreamDict.Keys) { - pc.AddTrack(track); + foreach (var mediaStream in cameraMediaStreamDict[k].mediaStreams) + { + foreach (var track in mediaStream.GetTracks()) + { + pc.AddTrack(track, mediaStream.Id); + } + } } + StartCoroutine(Answer(connectionId)); } } @@ -163,12 +228,14 @@ IEnumerator Answer(string connectionId) } var opLocalDesc = pc.SetLocalDescription(ref op.desc); yield return opLocalDesc; + Debug.Log("local sdp---------------------------------------------------------"); + Debug.Log(op.desc.sdp); if (opLocalDesc.isError) { Debug.LogError($"Network Error: {opLocalDesc.error}"); yield break; } - var op3 = signaling.PostAnswer(this.sessionId, connectionId, op.desc.sdp); + var op3 = signaling.PostAnswer(this.sessionId, connectionId, op.desc.sdp); yield return op3; if (op3.webRequest.isNetworkError) { @@ -202,6 +269,7 @@ IEnumerator GetCandidate() { continue; } + foreach (var candidate in candidateContainer.candidates) { RTCIceCandidate _candidate = default; diff --git a/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStream.cs b/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStream.cs index f96d4feac..e9fa3bf44 100644 --- a/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStream.cs +++ b/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStream.cs @@ -6,122 +6,46 @@ namespace Unity.WebRTC { - public class MediaStream + public class MediaStream { - private IntPtr self; - private string id; + internal IntPtr nativePtr; + internal string id; + protected List mediaStreamTrackList = new List(); + private static int sMediaStreamCount = 0; + public string Id { get => id; private set { } } - private Dictionary VideoTrackToRts; - private List AudioTracks; + public MediaStream() : base() + { + sMediaStreamCount++; + id = "MediaStream" + sMediaStreamCount; + nativePtr = WebRTC.Context.CreateMediaStream(id); + } - private void StopTrack(MediaStreamTrack track) + public MediaStream(MediaStreamTrack[] tracks) : base() { + sMediaStreamCount++; + id = "MediaStream" + sMediaStreamCount; + nativePtr = WebRTC.Context.CreateMediaStream(id); - if (track.Kind == TrackKind.Video) - { - NativeMethods.StopMediaStreamTrack(track.self); - RenderTexture[] rts = VideoTrackToRts[track]; - if (rts != null) - { - CameraExtension.RemoveRt(rts); - rts[0].Release(); - rts[1].Release(); - UnityEngine.Object.Destroy(rts[0]); - UnityEngine.Object.Destroy(rts[1]); - } - } - else + foreach (var t in tracks) { - Audio.Stop(); + AddTrack(t); } - - } - private RenderTexture[] GetRts(MediaStreamTrack track) - { - return VideoTrackToRts[track]; - } - public MediaStreamTrack[] GetTracks() - { - MediaStreamTrack[] tracks = new MediaStreamTrack[VideoTrackToRts.Keys.Count + AudioTracks.Count]; - AudioTracks.CopyTo(tracks, 0); - VideoTrackToRts.Keys.CopyTo(tracks, AudioTracks.Count); - return tracks; - } - public MediaStreamTrack[] GetAudioTracks() - { - return AudioTracks.ToArray(); - } - public MediaStreamTrack[] GetVideoTracks() - { - MediaStreamTrack[] tracks = new MediaStreamTrack[VideoTrackToRts.Keys.Count]; - VideoTrackToRts.Keys.CopyTo(tracks, 0); - return tracks; } public void AddTrack(MediaStreamTrack track) { - if(track.Kind == TrackKind.Video) - { - VideoTrackToRts[track] = track.getRts(track); - } - else - { - AudioTracks.Add(track); - } - NativeMethods.MediaStreamAddTrack(self, track.self); - } - public void RemoveTrack(MediaStreamTrack track) - { - NativeMethods.MediaStreamRemoveTrack(self, track.self); - } - //for camera CaptureStream - internal MediaStream(RenderTexture[] rts, IntPtr ptr) - { - self = ptr; - id = Marshal.PtrToStringAnsi(NativeMethods.MediaStreamGetID(self)); - VideoTrackToRts = new Dictionary(); - AudioTracks = new List(); - //get initial tracks - int trackSize = 0; - IntPtr tracksNativePtr = NativeMethods.MediaStreamGetVideoTracks(self, ref trackSize); - IntPtr[] tracksPtr = new IntPtr[trackSize]; - Marshal.Copy(tracksNativePtr, tracksPtr, 0, trackSize); - //TODO: Linux compatibility - Marshal.FreeCoTaskMem(tracksNativePtr); - for (int i = 0; i < trackSize; i++) - { - MediaStreamTrack track = new MediaStreamTrack(tracksPtr[i]); - track.stopTrack += StopTrack; - track.getRts += GetRts; - VideoTrackToRts[track] = rts; - } + NativeMethods.MediaStreamAddTrack(nativePtr, track.nativePtr); + mediaStreamTrackList.Add(track); } - //for audio CaptureStream - internal MediaStream(IntPtr ptr) - { - self = ptr; - id = Marshal.PtrToStringAnsi(NativeMethods.MediaStreamGetID(self)); - VideoTrackToRts = new Dictionary(); - AudioTracks = new List(); - //get initial tracks - int trackSize = 0; - IntPtr trackNativePtr = NativeMethods.MediaStreamGetAudioTracks(self, ref trackSize); - IntPtr[] tracksPtr = new IntPtr[trackSize]; - Marshal.Copy(trackNativePtr, tracksPtr, 0, trackSize); - //TODO: Linux compatibility - Marshal.FreeCoTaskMem(trackNativePtr); - for (int i = 0; i < trackSize; i++) - { - MediaStreamTrack track = new MediaStreamTrack(tracksPtr[i]); - track.stopTrack += StopTrack; - track.getRts += GetRts; - AudioTracks.Add(track); - } + public MediaStreamTrack[] GetTracks() + { + return mediaStreamTrackList.ToArray(); } - } + internal class Cleaner : MonoBehaviour { private Action onDestroy; @@ -150,46 +74,68 @@ public static void AddCleanerCallback(this GameObject obj, Action callback) Cleaner.AddCleanerCallback(obj, callback); } } + + internal class CameraCapturerTextures + { + internal RenderTexture camRenderTexture; + internal List webRTCTextures = new List(); + } + public static class CameraExtension { - internal static List camCopyRts = new List(); - internal static bool started = false; - public static MediaStream CaptureStream(this Camera cam, int width, int height) + internal static Dictionary camCapturerTexturesDict = new Dictionary(); + + public static int GetStreamTextureCount(this Camera cam) { - if (camCopyRts.Count > 0) + CameraCapturerTextures textures; + if (camCapturerTexturesDict.TryGetValue(cam, out textures)) { - throw new NotImplementedException("Currently not allowed multiple MediaStream"); + return textures.webRTCTextures.Count; } + return 0; + } - RenderTexture[] rts = new RenderTexture[2]; - //rts[0] for render target, rts[1] for flip and WebRTC source - rts[0] = new RenderTexture(width, height, 0, RenderTextureFormat.BGRA32); - rts[1] = new RenderTexture(width, height, 0, RenderTextureFormat.BGRA32); - rts[0].Create(); - rts[1].Create(); - camCopyRts.Add(rts); - cam.targetTexture = rts[0]; - cam.gameObject.AddCleanerCallback(() => + public static RenderTexture GetStreamTexture(this Camera cam, int index) { + CameraCapturerTextures textures; + if (camCapturerTexturesDict.TryGetValue(cam, out textures)) { - if (rts != null) + if (index >= 0 && index < textures.webRTCTextures.Count) { - CameraExtension.RemoveRt(rts); - rts[0].Release(); - rts[1].Release(); - UnityEngine.Object.Destroy(rts[0]); - UnityEngine.Object.Destroy(rts[1]); + return textures.webRTCTextures[index]; } - }); - started = true; - return new MediaStream(rts, WebRTC.Context.CaptureVideoStream(rts[1].GetNativeTexturePtr(), width, height)); + } + return null; } - public static void RemoveRt(RenderTexture[] rts) + + public static void CreateRenderStreamTexture(this Camera cam, int width, int height, int count = 1) { - camCopyRts.Remove(rts); - if (camCopyRts.Count == 0) + CameraCapturerTextures cameraCapturerTextures = new CameraCapturerTextures(); + camCapturerTexturesDict.Add(cam, cameraCapturerTextures); + + cameraCapturerTextures.camRenderTexture = new RenderTexture(width, height, 0, RenderTextureFormat.BGRA32); + cameraCapturerTextures.camRenderTexture.Create(); + + int mipCount = count; + for (int i = 1, mipLevel = 1; i <= mipCount; ++i, mipLevel *= 4) { - started = false; + RenderTexture webRtcTex = new RenderTexture(width / mipLevel, height / mipLevel, 0, RenderTextureFormat.BGRA32); + webRtcTex.Create(); + cameraCapturerTextures.webRTCTextures.Add(webRtcTex); } + + cam.targetTexture = cameraCapturerTextures.camRenderTexture; + cam.gameObject.AddCleanerCallback(() => + { + cameraCapturerTextures.camRenderTexture.Release(); + UnityEngine.Object.Destroy(cameraCapturerTextures.camRenderTexture); + + foreach (var v in cameraCapturerTextures.webRTCTextures) + { + v.Release(); + UnityEngine.Object.Destroy(v); + } + cameraCapturerTextures.webRTCTextures.Clear(); + }); } } @@ -198,12 +144,7 @@ public static class Audio { private static bool started = false; private static AudioInput audioInput = new AudioInput(); - public static MediaStream CaptureStream() - { - audioInput.BeginRecording(); - started = true; - return new MediaStream(WebRTC.Context.CaptureAudioStream()); - } + public static void Update() { if (started) @@ -211,6 +152,13 @@ public static void Update() audioInput.UpdateAudio(); } } + + public static void Start() + { + audioInput.BeginRecording(); + started = true; + } + public static void Stop() { if (started) diff --git a/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStreamTrack.cs b/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStreamTrack.cs index 30df48c7d..5d30853d7 100644 --- a/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStreamTrack.cs +++ b/Packages/com.unity.webrtc/Runtime/Srcipts/MediaStreamTrack.cs @@ -6,50 +6,49 @@ namespace Unity.WebRTC { public class MediaStreamTrack { - internal IntPtr self; - private TrackKind kind; - private string id; - private bool enabled; - private TrackState readyState; - internal Action stopTrack; - internal Func getRts; + internal IntPtr nativePtr; + protected string id; + protected TrackKind kind; + + internal MediaStreamTrack(IntPtr ptr) + { + nativePtr = ptr; + kind = NativeMethods.MediaStreamTrackGetKind(nativePtr); + id = Marshal.PtrToStringAnsi(NativeMethods.MediaStreamTrackGetID(nativePtr)); + } public bool Enabled { - get - { - return NativeMethods.MediaStreamTrackGetEnabled(self); - } - set - { - NativeMethods.MediaStreamTrackSetEnabled(self, value); - } + get { return NativeMethods.MediaStreamTrackGetEnabled(nativePtr); } + set { NativeMethods.MediaStreamTrackSetEnabled(nativePtr, value); } } + public TrackState ReadyState { get - { - return NativeMethods.MediaStreamTrackGetReadyState(self); - } + { return NativeMethods.MediaStreamTrackGetReadyState(nativePtr); } private set { } } public TrackKind Kind { get => kind; private set { } } public string Id { get => id; private set { } } + } - internal MediaStreamTrack(IntPtr ptr) + public class VideoStreamTrack : MediaStreamTrack + { + public VideoStreamTrack(string label, RenderTexture rt, int bitRateMbps=10000000) : base(WebRTC.Context.CreateVideoTrack(label, rt.GetNativeTexturePtr(), rt.width, rt.height, bitRateMbps)) { - self = ptr; - kind = NativeMethods.MediaStreamTrackGetKind(self); - id = Marshal.PtrToStringAnsi(NativeMethods.MediaStreamTrackGetID(self)); } - //Disassociate track from its source(video or audio), not for destroying the track - public void Stop() + } + + public class AudioStreamTrack : MediaStreamTrack + { + public AudioStreamTrack(string label) : base(WebRTC.Context.CreateAudioTrack(label)) { - stopTrack(this); } } + public enum TrackKind { Audio, diff --git a/Packages/com.unity.webrtc/Runtime/Srcipts/RTCPeerConnection.cs b/Packages/com.unity.webrtc/Runtime/Srcipts/RTCPeerConnection.cs index 9c9e74469..53392be65 100644 --- a/Packages/com.unity.webrtc/Runtime/Srcipts/RTCPeerConnection.cs +++ b/Packages/com.unity.webrtc/Runtime/Srcipts/RTCPeerConnection.cs @@ -188,10 +188,11 @@ public void Close() NativeMethods.PeerConnectionClose(self, m_id); } - public RTCRtpSender AddTrack(MediaStreamTrack track) + public RTCRtpSender AddTrack(MediaStreamTrack track, string mediaStreamId="unity") { - return new RTCRtpSender(NativeMethods.PeerConnectionAddTrack(self, track.self)); + return new RTCRtpSender(NativeMethods.PeerConnectionAddTrack(self, track.nativePtr, mediaStreamId)); } + public void RemoveTrack(RTCRtpSender sender) { NativeMethods.PeerConnectionRemoveTrack(self, sender.self); diff --git a/Packages/com.unity.webrtc/Runtime/Srcipts/WebRTC.cs b/Packages/com.unity.webrtc/Runtime/Srcipts/WebRTC.cs index 1c919b57a..809d702af 100644 --- a/Packages/com.unity.webrtc/Runtime/Srcipts/WebRTC.cs +++ b/Packages/com.unity.webrtc/Runtime/Srcipts/WebRTC.cs @@ -178,11 +178,19 @@ public enum RTCIceTransportPolicy All } + public enum RTCBundlePolicy + { + kBundlePolicyBalanced, + kBundlePolicyMaxBundle, + kBundlePolicyMaxCompat + }; + [Serializable] public struct RTCConfiguration { public RTCIceServer[] iceServers; public RTCIceTransportPolicy iceTransportPolicy; + public RTCBundlePolicy bundle_policy; } public static class WebRTC @@ -219,15 +227,17 @@ public static IEnumerator Update() { // Wait until all frame rendering is done yield return new WaitForEndOfFrame(); - if (CameraExtension.started) + //Blit is for DirectX Rendering API Only + + foreach (var k in CameraExtension.camCapturerTexturesDict.Keys) { - //Blit is for DirectX Rendering API Only - foreach(var rts in CameraExtension.camCopyRts) + foreach (var rt in CameraExtension.camCapturerTexturesDict[k].webRTCTextures) { - Graphics.Blit(rts[0], rts[1], flipMat); - } - GL.IssuePluginEvent(NativeMethods.GetRenderEventFunc(), 0); + Graphics.Blit(CameraExtension.camCapturerTexturesDict[k].camRenderTexture, rt, flipMat); + } } + + GL.IssuePluginEvent(NativeMethods.GetRenderEventFunc(), 0); Audio.Update(); } } @@ -326,7 +336,7 @@ internal static class NativeMethods [DllImport(WebRTC.Lib)] public static extern void PeerConnectionSetRemoteDescription(IntPtr ptr, ref RTCSessionDescription desc); [DllImport(WebRTC.Lib)] - public static extern IntPtr PeerConnectionAddTrack(IntPtr pc, IntPtr track); + public static extern IntPtr PeerConnectionAddTrack(IntPtr pc, IntPtr track, [MarshalAs(UnmanagedType.LPStr, SizeConst = 256)] string mediaStreamId); [DllImport(WebRTC.Lib)] public static extern void PeerConnectionRemoveTrack(IntPtr pc, IntPtr sender); [DllImport(WebRTC.Lib)] @@ -362,7 +372,11 @@ internal static class NativeMethods [DllImport(WebRTC.Lib)] public static extern IntPtr CaptureVideoStream(IntPtr context, IntPtr rt, int width, int height); [DllImport(WebRTC.Lib)] - public static extern IntPtr CaptureAudioStream(IntPtr context); + public static extern IntPtr CreateMediaStream(IntPtr context, [MarshalAs(UnmanagedType.LPStr, SizeConst = 256)] string label); + [DllImport(WebRTC.Lib)] + public static extern IntPtr CreateVideoTrack(IntPtr context, [MarshalAs(UnmanagedType.LPStr, SizeConst = 256)] string label, IntPtr rt, int width, int height, int bitRate); + [DllImport(WebRTC.Lib)] + public static extern IntPtr CreateAudioTrack(IntPtr context, [MarshalAs(UnmanagedType.LPStr, SizeConst = 256)] string label); [DllImport(WebRTC.Lib)] public static extern void MediaStreamAddTrack(IntPtr stream, IntPtr track); [DllImport(WebRTC.Lib)] @@ -399,9 +413,9 @@ internal struct Context public static bool ToBool(Context v) { return v; } public static Context Create(int uid = 0) { return NativeMethods.ContextCreate(uid); } public void Destroy(int uid = 0) { NativeMethods.ContextDestroy(uid); self = IntPtr.Zero; } - public IntPtr CaptureVideoStream(IntPtr rt, int width, int height) { return NativeMethods.CaptureVideoStream(self, rt, width, height); } - public IntPtr CaptureAudioStream() { return NativeMethods.CaptureAudioStream(self); } - + public IntPtr CreateMediaStream(string label) { return NativeMethods.CreateMediaStream(self, label); } + public IntPtr CreateVideoTrack(string label, IntPtr rt, int width, int height, int bitRate) { return NativeMethods.CreateVideoTrack(self, label, rt, width, height, bitRate); } + public IntPtr CreateAudioTrack(string label) {return NativeMethods.CreateAudioTrack(self, label);} } } diff --git a/Packages/com.unity.webrtc/Samples/Example/MediaStreamSample.cs b/Packages/com.unity.webrtc/Samples/Example/MediaStreamSample.cs index cb39b04bb..51b296ea0 100644 --- a/Packages/com.unity.webrtc/Samples/Example/MediaStreamSample.cs +++ b/Packages/com.unity.webrtc/Samples/Example/MediaStreamSample.cs @@ -19,7 +19,7 @@ public class MediaStreamSample : MonoBehaviour private RTCPeerConnection pc1, pc2; private List pc1Senders, pc2Senders; - private Unity.WebRTC.MediaStream audioStream, videoStream; + private Unity.WebRTC.MediaStream mediaStream; private RTCDataChannel dataChannel, remoteDataChannel; private Coroutine sdpCheck; private string msg; @@ -159,14 +159,11 @@ void Pc2OnIceCandidate(RTCIceCandidate candidate) } public void AddTracks() { - foreach (var track in audioStream.GetTracks()) + foreach (var track in mediaStream.GetTracks()) { pc1Senders.Add (pc1.AddTrack(track)); } - foreach(var track in videoStream.GetTracks()) - { - pc1Senders.Add(pc1.AddTrack(track)); - } + if(!videoUpdateStarted) { StartCoroutine(WebRTC.Update()); @@ -212,8 +209,19 @@ void Call() RTCDataChannelInit conf = new RTCDataChannelInit(true); dataChannel = pc1.CreateDataChannel("data", ref conf); - audioStream = Audio.CaptureStream(); - videoStream = cam.CaptureStream(1280, 720); + + cam.CreateRenderStreamTexture(1280, 720); + mediaStream = new MediaStream(); + int texCount = cam.GetStreamTextureCount(); + for (int i = 0; i < texCount; ++i) + { + RenderTexture rt = cam.GetStreamTexture(i); + mediaStream.AddTrack(new VideoStreamTrack("videoTrack"+1, rt)); + } + + mediaStream.AddTrack(new AudioStreamTrack("audioTrack")); + Audio.Start(); + RtImage.texture = cam.targetTexture; } diff --git a/Packages/com.unity.webrtc/Tests/Runtime/MediaStreamTest.cs b/Packages/com.unity.webrtc/Tests/Runtime/MediaStreamTest.cs index 3e9247499..dd391660b 100644 --- a/Packages/com.unity.webrtc/Tests/Runtime/MediaStreamTest.cs +++ b/Packages/com.unity.webrtc/Tests/Runtime/MediaStreamTest.cs @@ -46,10 +46,19 @@ public IEnumerator MediaStreamTest_AddAndRemoveMediaStream() { pc2Senders.Add(peer2.AddTrack(e.Track)); }); - foreach (var track in cam.CaptureStream(1280, 720).GetTracks()) + + + cam.CreateRenderStreamTexture(1280, 720, 2); + MediaStream mediaStream = new MediaStream(); + int texCount = cam.GetStreamTextureCount(); + for (int i = 0; i < texCount; ++i) { - pc1Senders.Add(peer1.AddTrack(track)); + RenderTexture rt = cam.GetStreamTexture(i); + VideoStreamTrack videoStreamTrack = new VideoStreamTrack("videoTrack"+i, rt); + mediaStream.AddTrack(videoStreamTrack); + pc1Senders.Add(peer1.AddTrack(videoStreamTrack)); } + var conf = new RTCDataChannelInit(true); RTCOfferOptions options1 = default; diff --git a/Plugin/WebRTCPlugin/Callback.cpp b/Plugin/WebRTCPlugin/Callback.cpp index 5682d1eaf..c22b13745 100644 --- a/Plugin/WebRTCPlugin/Callback.cpp +++ b/Plugin/WebRTCPlugin/Callback.cpp @@ -12,8 +12,6 @@ namespace WebRTC ID3D11DeviceContext* context; //d3d11 device ID3D11Device* g_D3D11Device = nullptr; - //natively created ID3D11Texture2D ptrs - UnityFrameBuffer* renderTextures[bufferedFrameNum]; } using namespace WebRTC; //get d3d11 device @@ -33,14 +31,6 @@ static void UNITY_INTERFACE_API OnGraphicsDeviceEvent(UnityGfxDeviceEventType ev } case kUnityGfxDeviceEventShutdown: { - for (auto rt : renderTextures) - { - if (rt) - { - rt->Release(); - rt = nullptr; - } - } //UnityPluginUnload not called normally s_Graphics->UnregisterDeviceEventCallback(OnGraphicsDeviceEvent); break; diff --git a/Plugin/WebRTCPlugin/Context.cpp b/Plugin/WebRTCPlugin/Context.cpp index 59c5262dd..f1f770669 100644 --- a/Plugin/WebRTCPlugin/Context.cpp +++ b/Plugin/WebRTCPlugin/Context.cpp @@ -1,5 +1,6 @@ #include "pch.h" #include "WebRTCPlugin.h" +#include "UnityEncoder.h" #include "Context.h" namespace WebRTC @@ -240,6 +241,9 @@ namespace WebRTC } config.servers.push_back(stunServer); config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; + + Json::Value bundle_policy = configJson["bundle_policy"]; + config.bundle_policy = (webrtc::PeerConnectionInterface::BundlePolicy)bundle_policy.asInt(); } #pragma warning(push) #pragma warning(disable: 4715) @@ -292,9 +296,9 @@ namespace WebRTC rtc::InitializeSSL(); audioDevice = new rtc::RefCountedObject(); - nvVideoCapturerUnique = std::make_unique(); - nvVideoCapturer = nvVideoCapturerUnique.get(); - auto dummyVideoEncoderFactory = std::make_unique(nvVideoCapturer); + + auto dummyVideoEncoderFactory = std::make_unique(); + pDummyVideoEncoderFactory = dummyVideoEncoderFactory.get(); peerConnectionFactory = webrtc::CreatePeerConnectionFactory( workerThread.get(), @@ -307,16 +311,18 @@ namespace WebRTC webrtc::CreateBuiltinVideoDecoderFactory(), nullptr, nullptr); + } Context::~Context() { + pDummyVideoEncoderFactory->Destroy(); clients.clear(); peerConnectionFactory = nullptr; - audioTrack = nullptr; - videoTracks.clear(); - audioStream = nullptr; - videoStreams.clear(); + + mediaSteamTrackList.clear(); + mediaStreamMap.clear(); + nvVideoCapturerList.clear(); workerThread->Quit(); workerThread.reset(); @@ -324,24 +330,49 @@ namespace WebRTC signalingThread.reset(); } - webrtc::MediaStreamInterface* Context::CreateVideoStream(UnityFrameBuffer* frameBuffer) + void Context::EncodeFrame() { - //TODO: label and stream id should be maintained in some way for multi-stream - auto videoTrack = peerConnectionFactory->CreateVideoTrack( - "video", peerConnectionFactory->CreateVideoSource(std::move(nvVideoCapturerUnique))); - if (!videoTracks.count(frameBuffer)) + for (std::list::iterator it= nvVideoCapturerList.begin(); it!= nvVideoCapturerList.end(); ++it) { - videoTracks[frameBuffer] = videoTrack; + (*it)->EncodeVideoData(); } - auto videoStream = peerConnectionFactory->CreateLocalMediaStream("video"); - videoStream->AddTrack(videoTrack); - videoStreams.push_back(videoStream); - nvVideoCapturer->unityRT = frameBuffer; - nvVideoCapturer->StartEncoder(); - return videoStream.get(); } - webrtc::MediaStreamInterface* Context::CreateAudioStream() + void Context::StopCapturer() + { + for (std::list::iterator it = nvVideoCapturerList.begin(); it != nvVideoCapturerList.end(); ++it) + { + (*it)->Stop(); + } + } + + webrtc::MediaStreamInterface* Context::CreateMediaStream(const std::string& stream_id) + { + if (mediaStreamMap.count(stream_id) == 0) + { + mediaStreamMap[stream_id] = peerConnectionFactory->CreateLocalMediaStream(stream_id); + } + + return mediaStreamMap[stream_id]; + } + + webrtc::MediaStreamTrackInterface* Context::CreateVideoTrack(const std::string& label, UnityFrameBuffer* frameBuffer, int32 width, int32 height, int32 bitRate) + { + UnityEncoder* pUnityEncoder = pDummyVideoEncoderFactory->CreatePlatformEncoder(WebRTC::Nvidia, width, height, bitRate); + UnityVideoCapturer* pUnityVideoCapturer = new UnityVideoCapturer(pUnityEncoder, width, height); + pUnityVideoCapturer->InitializeEncoder(); + pDummyVideoEncoderFactory->AddCapturer(pUnityVideoCapturer); + + auto videoTrack = peerConnectionFactory->CreateVideoTrack(label, peerConnectionFactory->CreateVideoSource(pUnityVideoCapturer)); + pUnityVideoCapturer->unityRT = frameBuffer; + pUnityVideoCapturer->StartEncoder(); + + nvVideoCapturerList.push_back(pUnityVideoCapturer); + mediaSteamTrackList.push_back(videoTrack); + return videoTrack; + } + + webrtc::MediaStreamTrackInterface* Context::CreateAudioTrack(const std::string& label) { //avoid optimization specially for voice cricket::AudioOptions audioOptions; @@ -349,10 +380,9 @@ namespace WebRTC audioOptions.noise_suppression = false; audioOptions.highpass_filter = false; //TODO: label and stream id should be maintained in some way for multi-stream - audioTrack = peerConnectionFactory->CreateAudioTrack("audio", peerConnectionFactory->CreateAudioSource(audioOptions)); - audioStream = peerConnectionFactory->CreateLocalMediaStream("audio"); - audioStream->AddTrack(audioTrack); - return audioStream.get(); + auto audioTrack = peerConnectionFactory->CreateAudioTrack(label, peerConnectionFactory->CreateAudioSource(audioOptions)); + mediaSteamTrackList.push_back(audioTrack); + return audioTrack; } PeerSDPObserver* PeerSDPObserver::Create(DelegateSetSDSuccess onSuccess, DelegateSetSDFailure onFailure) diff --git a/Plugin/WebRTCPlugin/Context.h b/Plugin/WebRTCPlugin/Context.h index 939e09574..c1c5d7dff 100644 --- a/Plugin/WebRTCPlugin/Context.h +++ b/Plugin/WebRTCPlugin/Context.h @@ -1,9 +1,10 @@ #pragma once +#include "UnityEncoder.h" #include "DummyAudioDevice.h" #include "DummyVideoEncoder.h" #include "PeerConnectionObject.h" -#include "NvVideoCapturer.h" - +#include "UnityVideoCapturer.h" +#include "NvEncoder.h" namespace WebRTC { @@ -42,15 +43,15 @@ namespace WebRTC { public: explicit Context(int uid = -1); - webrtc::MediaStreamInterface* CreateVideoStream(UnityFrameBuffer* frameBuffer); - webrtc::MediaStreamInterface* CreateAudioStream(); + webrtc::MediaStreamInterface* CreateMediaStream(const std::string& stream_id); + webrtc::MediaStreamTrackInterface* CreateVideoTrack(const std::string& label, UnityFrameBuffer* frameBuffer, int32 width, int32 height, int32 bitRate); + webrtc::MediaStreamTrackInterface* CreateAudioTrack(const std::string& label); ~Context(); PeerConnectionObject* CreatePeerConnection(int id); PeerConnectionObject* CreatePeerConnection(int id, const std::string& conf); - void InitializeEncoder(int32 width, int32 height) { nvVideoCapturer->InitializeEncoder(width, height); } - void EncodeFrame() { nvVideoCapturer->EncodeVideoData(); } - void StopCapturer() { nvVideoCapturer->Stop(); } + void EncodeFrame(); + void StopCapturer(); void ProcessAudioData(const float* data, int32 size) { audioDevice->ProcessAudioData(data, size); } void DeleteClient(int id) { clients.erase(id); } private: @@ -59,14 +60,12 @@ namespace WebRTC std::unique_ptr signalingThread; std::map> clients; rtc::scoped_refptr peerConnectionFactory; - NvVideoCapturer* nvVideoCapturer; - std::unique_ptr nvVideoCapturerUnique; + DummyVideoEncoderFactory* pDummyVideoEncoderFactory; + std::map> mediaStreamMap; + std::list> mediaSteamTrackList; + + std::list nvVideoCapturerList; rtc::scoped_refptr audioDevice; - rtc::scoped_refptr audioTrack; - rtc::scoped_refptr audioStream; - //TODO: move videoTrack to NvVideoCapturer and maintain multiple NvVideoCapturer here - std::vector> videoStreams; - std::map> videoTracks; }; class PeerSDPObserver : public webrtc::SetSessionDescriptionObserver diff --git a/Plugin/WebRTCPlugin/DummyVideoEncoder.cpp b/Plugin/WebRTCPlugin/DummyVideoEncoder.cpp index 1a5dff007..7cba0071d 100644 --- a/Plugin/WebRTCPlugin/DummyVideoEncoder.cpp +++ b/Plugin/WebRTCPlugin/DummyVideoEncoder.cpp @@ -1,7 +1,9 @@ #include "pch.h" +#include "UnityEncoder.h" #include "DummyVideoEncoder.h" -#include "NvVideoCapturer.h" +#include "UnityVideoCapturer.h" #include +#include "NvEncoder.h" namespace WebRTC { @@ -67,7 +69,22 @@ namespace WebRTC SetRate(allocation.get_sum_kbps() * 1000); return 0; } - DummyVideoEncoderFactory::DummyVideoEncoderFactory(NvVideoCapturer* videoCapturer):capturer(videoCapturer){} + + DummyVideoEncoderFactory::DummyVideoEncoderFactory() + { + + } + + void DummyVideoEncoderFactory::Destroy() + { + for (std::list::iterator it = unityEncoders.begin(); it!= unityEncoders.end(); ++it) + { + delete *it; + } + unityEncoders.clear(); + NvEncoder::DestroyEncoderTexture(); + } + std::vector DummyVideoEncoderFactory::GetSupportedFormats() const { const absl::optional profileLevelId = @@ -87,8 +104,48 @@ namespace WebRTC const webrtc::SdpVideoFormat& format) { auto dummyVideoEncoder = std::make_unique(); - dummyVideoEncoder->SetKeyFrame.connect(capturer, &NvVideoCapturer::SetKeyFrame); - dummyVideoEncoder->SetRate.connect(capturer, &NvVideoCapturer::SetRate); + + { + //todo: According to condition of format choose different capturer. + //UnityVideoCapturer* pCapturer = *(++capturers.begin()); + + //dummyVideoEncoder->SetKeyFrame.connect(pCapturer, &UnityVideoCapturer::SetKeyFrame); + //dummyVideoEncoder->SetRate.connect(pCapturer, &UnityVideoCapturer::SetRate); + } + return dummyVideoEncoder; } + + UnityEncoder* DummyVideoEncoderFactory::CreatePlatformEncoder(EncoderPlatform platform, int width, int height, int bitRate) + { + UnityEncoder* pEncoder = NULL; + switch (platform) + { + case WebRTC::Nvidia: + pEncoder = new NvEncoder(); + break; + case WebRTC::Amd: + break; + case WebRTC::Soft: + break; + default: + break; + } + pEncoder->InitEncoder(width, height, bitRate); + unityEncoders.push_back(pEncoder); + return pEncoder; + } + + UnityEncoder* DummyVideoEncoderFactory::GetPlatformEncoder(EncoderPlatform platform, int width, int height, int bitRate) + { + for (std::list::iterator it = unityEncoders.begin(); it != unityEncoders.end(); ++it) + { + if ((*it)->getEncodeWidth() == width && (*it)->getEncodeHeight() == height && (*it)->getBitRate() == bitRate) { + return (*it); + } + } + + return CreatePlatformEncoder(platform, width, height, bitRate); + } + } diff --git a/Plugin/WebRTCPlugin/DummyVideoEncoder.h b/Plugin/WebRTCPlugin/DummyVideoEncoder.h index 43437e2ad..fe20463ec 100644 --- a/Plugin/WebRTCPlugin/DummyVideoEncoder.h +++ b/Plugin/WebRTCPlugin/DummyVideoEncoder.h @@ -2,7 +2,7 @@ namespace WebRTC { - class NvVideoCapturer; + class UnityVideoCapturer; class DummyVideoEncoder : public webrtc::VideoEncoder { public: @@ -38,6 +38,13 @@ namespace WebRTC webrtc::VideoBitrateAllocation lastBitrate; }; + enum EncoderPlatform + { + Nvidia, + Amd, + Soft, + }; + class DummyVideoEncoderFactory : public webrtc::VideoEncoderFactory { public: @@ -51,8 +58,14 @@ namespace WebRTC // Creates a VideoEncoder for the specified format. virtual std::unique_ptr CreateVideoEncoder( const webrtc::SdpVideoFormat& format) override; - DummyVideoEncoderFactory(NvVideoCapturer* videoCapturer); + DummyVideoEncoderFactory(); + void Destroy(); + + void AddCapturer(UnityVideoCapturer* _capturer) { capturers.push_back(_capturer); } + UnityEncoder* CreatePlatformEncoder(EncoderPlatform platform, int width, int height, int bitRate); + UnityEncoder* GetPlatformEncoder(EncoderPlatform platform, int width, int height, int bitRate); private: - NvVideoCapturer* capturer; + std::list capturers; + std::list unityEncoders; }; } diff --git a/Plugin/WebRTCPlugin/NvEncoder.cpp b/Plugin/WebRTCPlugin/NvEncoder.cpp index c12e17d78..bc805c406 100644 --- a/Plugin/WebRTCPlugin/NvEncoder.cpp +++ b/Plugin/WebRTCPlugin/NvEncoder.cpp @@ -6,29 +6,56 @@ namespace WebRTC { - NvEncoder::NvEncoder(int width, int height) :width(width), height(height) + std::list NvEncoder::nvEncoderInputTextureList; + NvEncoder::NvEncoder() { - LogPrint(StringFormat("width is %d, height is %d", width, height).c_str()); + if (pEncoderInterface==nullptr) + { + bool result = true; +#pragma region open an encode session + //open an encode session + NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS openEncdoeSessionExParams = { 0 }; + openEncdoeSessionExParams.version = NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER; + openEncdoeSessionExParams.device = g_D3D11Device; + openEncdoeSessionExParams.deviceType = NV_ENC_DEVICE_TYPE_DIRECTX; + openEncdoeSessionExParams.apiVersion = NVENCAPI_VERSION; + result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncOpenEncodeSessionEx(&openEncdoeSessionExParams, &pEncoderInterface))); + checkf(result, "Unable to open NvEnc encode session"); + LogPrint(StringFormat("OpenEncodeSession Error is %d", errorCode).c_str()); +#pragma endregion + } + + } + + NvEncoder::~NvEncoder() + { + ReleaseEncoderResources(); + if (pEncoderInterface) + { + bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncDestroyEncoder(pEncoderInterface)); + checkf(result, "Failed to destroy NV encoder interface"); + pEncoderInterface = nullptr; + } + + } + + void NvEncoder::InitEncoder(int width, int height, int _bitRate) + { + encodeWidth = width; + encodeHeight = height; + bitRate = _bitRate; + + LogPrint(StringFormat("width is %d, height is %d", encodeWidth, encodeHeight).c_str()); checkf(g_D3D11Device != nullptr, "D3D11Device is invalid"); - checkf(width > 0 && height > 0, "Invalid width or height!"); + checkf(encodeWidth > 0 && encodeHeight > 0, "Invalid width or height!"); + bool result = true; -#pragma region open an encode session - //open an encode session - NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS openEncdoeSessionExParams = { 0 }; - openEncdoeSessionExParams.version = NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER; - openEncdoeSessionExParams.device = g_D3D11Device; - openEncdoeSessionExParams.deviceType = NV_ENC_DEVICE_TYPE_DIRECTX; - openEncdoeSessionExParams.apiVersion = NVENCAPI_VERSION; - result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncOpenEncodeSessionEx(&openEncdoeSessionExParams, &pEncoderInterface))); - checkf(result, "Unable to open NvEnc encode session"); - LogPrint(StringFormat("OpenEncodeSession Error is %d", errorCode).c_str()); -#pragma endregion #pragma region set initialization parameters nvEncInitializeParams.version = NV_ENC_INITIALIZE_PARAMS_VER; - nvEncInitializeParams.encodeWidth = width; - nvEncInitializeParams.encodeHeight = height; - nvEncInitializeParams.darWidth = width; - nvEncInitializeParams.darHeight = height; + nvEncInitializeParams.encodeWidth = encodeWidth; + nvEncInitializeParams.encodeHeight = encodeHeight; + nvEncInitializeParams.darWidth = encodeWidth; + nvEncInitializeParams.darHeight = encodeHeight; nvEncInitializeParams.encodeGUID = NV_ENC_CODEC_H264_GUID; nvEncInitializeParams.presetGUID = NV_ENC_PRESET_LOW_LATENCY_HQ_GUID; nvEncInitializeParams.frameRateNum = frameRate; @@ -37,8 +64,8 @@ namespace WebRTC nvEncInitializeParams.reportSliceOffsets = 0; nvEncInitializeParams.enableSubFrameWrite = 0; nvEncInitializeParams.encodeConfig = &nvEncConfig; - nvEncInitializeParams.maxEncodeWidth = 3840; - nvEncInitializeParams.maxEncodeHeight = 2160; + nvEncInitializeParams.maxEncodeWidth = encodeWidth;//3840; + nvEncInitializeParams.maxEncodeHeight = encodeHeight;//2160; #pragma endregion #pragma region get preset ocnfig and set it NV_ENC_PRESET_CONFIG presetConfig = { 0 }; @@ -74,20 +101,10 @@ namespace WebRTC #pragma endregion InitEncoderResources(); isNvEncoderSupported = true; - } - NvEncoder::~NvEncoder() - { - ReleaseEncoderResources(); - if (pEncoderInterface) - { - bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncDestroyEncoder(pEncoderInterface)); - checkf(result, "Failed to destroy NV encoder interface"); - pEncoderInterface = nullptr; - } - + isInitialize = true; } - void NvEncoder::UpdateSettings() + void NvEncoder::UpdateSettings(int width, int height) { bool settingChanged = false; if (nvEncConfig.rcParams.averageBitRate != bitRate) @@ -120,11 +137,13 @@ namespace WebRTC lastBitRate = bitRate; } } + //entry for encoding a frame - void NvEncoder::EncodeFrame() + void NvEncoder::EncodeFrame(int width, int height) { - UpdateSettings(); + UpdateSettings(width, height); uint32 bufferIndexToWrite = frameCount % bufferedFrameNum; + Frame& frame = bufferedFrames[bufferIndexToWrite]; #pragma region set frame params //no free buffer, skip this frame @@ -140,8 +159,8 @@ namespace WebRTC picParams.pictureStruct = NV_ENC_PIC_STRUCT_FRAME; picParams.inputBuffer = frame.inputFrame.mappedResource; picParams.bufferFmt = frame.inputFrame.bufferFormat; - picParams.inputWidth = nvEncInitializeParams.encodeWidth; - picParams.inputHeight = nvEncInitializeParams.encodeHeight; + picParams.inputWidth = width; + picParams.inputHeight = height; picParams.outputBitstream = frame.outputFrame; picParams.inputTimeStamp = frameCount; #pragma endregion @@ -151,13 +170,15 @@ namespace WebRTC picParams.encodePicFlags |= NV_ENC_PIC_FLAG_FORCEIDR; } isIdrFrame = false; + bool result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncEncodePicture(pEncoderInterface, &picParams))); checkf(result, StringFormat("Failed to encode frame, error is %d", errorCode).c_str()); + #pragma endregion ProcessEncodedFrame(frame); frameCount++; } - + //get encoded frame void NvEncoder::ProcessEncodedFrame(Frame& frame) { @@ -166,12 +187,15 @@ namespace WebRTC { return; } + frame.isEncoding = false; + #pragma region retrieve encoded frame from output buffer NV_ENC_LOCK_BITSTREAM lockBitStream = { 0 }; lockBitStream.version = NV_ENC_LOCK_BITSTREAM_VER; lockBitStream.outputBitstream = frame.outputFrame; lockBitStream.doNotWait = nvEncInitializeParams.enableEncodeAsync; + bool result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncLockBitstream(pEncoderInterface, &lockBitStream))); checkf(result, StringFormat("Failed to lock bit stream, error is %d", errorCode).c_str()); if (lockBitStream.bitstreamSizeInBytes) @@ -179,20 +203,19 @@ namespace WebRTC frame.encodedFrame.resize(lockBitStream.bitstreamSizeInBytes); std::memcpy(frame.encodedFrame.data(), lockBitStream.bitstreamBufferPtr, lockBitStream.bitstreamSizeInBytes); } - result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncUnlockBitstream(pEncoderInterface, frame.outputFrame))); checkf(result, StringFormat("Failed to unlock bit stream, error is %d", errorCode).c_str()); frame.isIdrFrame = lockBitStream.pictureType == NV_ENC_PIC_TYPE_IDR; #pragma endregion - CaptureFrame(frame.encodedFrame); + captureFrame(frame.encodedFrame); } ID3D11Texture2D* NvEncoder::AllocateInputBuffers() { ID3D11Texture2D* inputTextures = nullptr; D3D11_TEXTURE2D_DESC desc = { 0 }; - desc.Width = width; - desc.Height = height; + desc.Width = encodeWidth; + desc.Height = encodeHeight; desc.MipLevels = 1; desc.ArraySize = 1; desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM; @@ -200,7 +223,7 @@ namespace WebRTC desc.Usage = D3D11_USAGE_DEFAULT; desc.BindFlags = D3D11_BIND_RENDER_TARGET; desc.CPUAccessFlags = 0; - g_D3D11Device->CreateTexture2D(&desc, NULL, &inputTextures); + HRESULT r = g_D3D11Device->CreateTexture2D(&desc, NULL, &inputTextures); return inputTextures; } NV_ENC_REGISTERED_PTR NvEncoder::RegisterResource(void *buffer) @@ -212,8 +235,8 @@ namespace WebRTC if (!registerResource.resourceToRegister) LogPrint("resource is not initialized"); - registerResource.width = width; - registerResource.height = height; + registerResource.width = encodeWidth; + registerResource.height = encodeHeight; LogPrint(StringFormat("nvEncRegisterResource: width is %d, height is %d", registerResource.width, registerResource.height).c_str()); registerResource.bufferFormat = NV_ENC_BUFFER_FORMAT_ARGB; checkf(NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncRegisterResource(pEncoderInterface, ®isterResource))), @@ -237,13 +260,38 @@ namespace WebRTC StringFormat("nvEncCreateBitstreamBuffer error is %d", errorCode).c_str()); return createBitstreamBuffer.bitstreamBuffer; } + + void NvEncoder::DestroyEncoderTexture() + { + for (std::list::iterator it = nvEncoderInputTextureList.begin(); it != nvEncoderInputTextureList.end(); ++it) + { + delete (*it); + } + nvEncoderInputTextureList.clear(); + } + + UnityFrameBuffer* NvEncoder::getEncoderTexture(int width, int height) + { + for (std::list::iterator it = nvEncoderInputTextureList.begin(); it!= nvEncoderInputTextureList.end(); ++it) + { + if ( (*it)->width==width && (*it)->height==height ) + { + return (*it)->texture; + } + } + + EncoderInputTexture* pEncoderInputTexture = new EncoderInputTexture(width, height); + nvEncoderInputTextureList.push_back(pEncoderInputTexture); + return pEncoderInputTexture->texture; + } + void NvEncoder::InitEncoderResources() { - for (uint32 i = 0; i < bufferedFrameNum; i++) + nvEncoderTexture = getEncoderTexture(encodeWidth, encodeHeight); + for (int i = 0; i < bufferedFrameNum; i++) { - renderTextures[i] = AllocateInputBuffers(); Frame& frame = bufferedFrames[i]; - frame.inputFrame.registeredResource = RegisterResource(renderTextures[i]); + frame.inputFrame.registeredResource = RegisterResource(nvEncoderTexture); frame.inputFrame.bufferFormat = NV_ENC_BUFFER_FORMAT_ARGB; MapResources(frame.inputFrame); frame.outputFrame = InitializeBitstreamBuffer(); @@ -263,10 +311,13 @@ namespace WebRTC { for (Frame& frame : bufferedFrames) { - ReleaseFrameInputBuffer(frame); - bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncDestroyBitstreamBuffer(pEncoderInterface, frame.outputFrame)); - checkf(result, "Failed to destroy output buffer bit stream"); - frame.outputFrame = nullptr; + if (frame.outputFrame!=nullptr) + { + ReleaseFrameInputBuffer(frame); + bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncDestroyBitstreamBuffer(pEncoderInterface, frame.outputFrame)); + checkf(result, "Failed to destroy output buffer bit stream"); + frame.outputFrame = nullptr; + } } } } diff --git a/Plugin/WebRTCPlugin/NvEncoder.h b/Plugin/WebRTCPlugin/NvEncoder.h index 490b92f8e..6a119765b 100644 --- a/Plugin/WebRTCPlugin/NvEncoder.h +++ b/Plugin/WebRTCPlugin/NvEncoder.h @@ -4,11 +4,12 @@ #include "nvEncodeAPI.h" #include #include +#include "UnityEncoder.h" namespace WebRTC { using OutputFrame = NV_ENC_OUTPUT_PTR; - class NvEncoder + class NvEncoder : public UnityEncoder { private: struct InputFrame @@ -28,21 +29,55 @@ namespace WebRTC std::atomic isEncoding = false; }; + struct EncoderInputTexture + { + UnityFrameBuffer* texture; + int width; + int height; + EncoderInputTexture(int w, int h) + { + width = w; + height = h; + D3D11_TEXTURE2D_DESC desc = { 0 }; + desc.Width = width; + desc.Height = height; + desc.MipLevels = 1; + desc.ArraySize = 1; + desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM; + desc.SampleDesc.Count = 1; + desc.Usage = D3D11_USAGE_DEFAULT; + desc.BindFlags = D3D11_BIND_RENDER_TARGET; + desc.CPUAccessFlags = 0; + HRESULT r = g_D3D11Device->CreateTexture2D(&desc, NULL, &texture); + } + + ~EncoderInputTexture() + { + texture->Release(); + texture = nullptr; + } + }; + public: - NvEncoder(int width, int height); + NvEncoder(); ~NvEncoder(); void SetRate(uint32 rate); - void UpdateSettings(); - void EncodeFrame(); + void UpdateSettings(int width, int height); + void EncodeFrame(int width, int height); bool IsSupported() const { return isNvEncoderSupported; } void SetIdrFrame() { isIdrFrame = true; } uint64 GetCurrentFrameCount() { return frameCount; } - sigslot::signal1&> CaptureFrame; + void InitEncoder(int width, int height, int _bitRate); void InitEncoderResources(); - + void* getRenderTexture() { return nvEncoderTexture; } + int getEncodeWidth() { return encodeWidth; } + int getEncodeHeight() { return encodeHeight; } + int getBitRate() { return bitRate; } + static void DestroyEncoderTexture(); + private: + static UnityFrameBuffer* getEncoderTexture(int width, int height); private: - void LoadNvEncApi(); void ReleaseFrameInputBuffer(Frame& frame); void ReleaseEncoderResources(); void ProcessEncodedFrame(Frame& frame); @@ -54,12 +89,15 @@ namespace WebRTC NV_ENC_CONFIG nvEncConfig = {}; _NVENCSTATUS errorCode; Frame bufferedFrames[bufferedFrameNum]; + static std::list nvEncoderInputTextureList; + UnityFrameBuffer* nvEncoderTexture; uint64 frameCount = 0; void* pEncoderInterface = nullptr; bool isNvEncoderSupported = false; + bool isInitialize = false; bool isIdrFrame = false; - int width = 1920; - int height = 1080; + int encodeWidth; + int encodeHeight; //10Mbps int bitRate = 10000000; //100Mbps diff --git a/Plugin/WebRTCPlugin/UnityEncoder.cpp b/Plugin/WebRTCPlugin/UnityEncoder.cpp new file mode 100644 index 000000000..52c58150e --- /dev/null +++ b/Plugin/WebRTCPlugin/UnityEncoder.cpp @@ -0,0 +1,13 @@ +#include "pch.h" +#include "UnityEncoder.h" + +namespace WebRTC +{ + UnityEncoder::UnityEncoder() + { + } + + UnityEncoder::~UnityEncoder() + { + } +} diff --git a/Plugin/WebRTCPlugin/UnityEncoder.h b/Plugin/WebRTCPlugin/UnityEncoder.h new file mode 100644 index 000000000..ecc80a058 --- /dev/null +++ b/Plugin/WebRTCPlugin/UnityEncoder.h @@ -0,0 +1,25 @@ +#pragma once + +namespace WebRTC +{ + class UnityEncoder + { + public: + UnityEncoder(); + virtual ~UnityEncoder(); + sigslot::signal1&> captureFrame; + virtual void SetRate(uint32 rate) = 0; + virtual void UpdateSettings(int width, int height) = 0; + virtual void EncodeFrame(int width, int height) = 0; + virtual bool IsSupported() const = 0; + virtual void SetIdrFrame() = 0; + virtual uint64 GetCurrentFrameCount() = 0; + virtual void InitEncoder(int width, int height, int _bitRate) = 0; + virtual void InitEncoderResources() = 0; + virtual void* getRenderTexture() = 0; + virtual int getEncodeWidth() = 0; + virtual int getEncodeHeight() = 0; + virtual int getBitRate() = 0; + }; +} + diff --git a/Plugin/WebRTCPlugin/NvVideoCapturer.cpp b/Plugin/WebRTCPlugin/UnityVideoCapturer.cpp similarity index 55% rename from Plugin/WebRTCPlugin/NvVideoCapturer.cpp rename to Plugin/WebRTCPlugin/UnityVideoCapturer.cpp index 9e9a43012..2383a7897 100644 --- a/Plugin/WebRTCPlugin/NvVideoCapturer.cpp +++ b/Plugin/WebRTCPlugin/UnityVideoCapturer.cpp @@ -1,23 +1,22 @@ #include "pch.h" -#include "NvVideoCapturer.h" +#include "UnityVideoCapturer.h" namespace WebRTC { - NvVideoCapturer::NvVideoCapturer() + UnityVideoCapturer::UnityVideoCapturer(UnityEncoder* pEncoder, int _width, int _height) : nvEncoder(pEncoder), width(_width), height(_height) { set_enable_video_adapter(false); SetSupportedFormats(std::vector(1, cricket::VideoFormat(width, height, cricket::VideoFormat::FpsToInterval(framerate), cricket::FOURCC_H264))); } - void NvVideoCapturer::EncodeVideoData() + void UnityVideoCapturer::EncodeVideoData() { if (captureStarted && !captureStopped) { - int curFrameNum = nvEncoder->GetCurrentFrameCount() % bufferedFrameNum; - context->CopyResource(renderTextures[curFrameNum], unityRT); - nvEncoder->EncodeFrame(); + context->CopyResource((ID3D11Resource*)nvEncoder->getRenderTexture(), unityRT); + nvEncoder->EncodeFrame(width, height); } } - void NvVideoCapturer::CaptureFrame(std::vector& data) + void UnityVideoCapturer::CaptureFrame(std::vector& data) { rtc::scoped_refptr buffer = new rtc::RefCountedObject(width, height, data); int64 timestamp = rtc::TimeMillis(); @@ -25,23 +24,22 @@ namespace WebRTC videoFrame.set_ntp_time_ms(timestamp); OnFrame(videoFrame, width, height); } - void NvVideoCapturer::StartEncoder() + void UnityVideoCapturer::StartEncoder() { captureStarted = true; SetKeyFrame(); } - void NvVideoCapturer::SetKeyFrame() + void UnityVideoCapturer::SetKeyFrame() { nvEncoder->SetIdrFrame(); } - void NvVideoCapturer::SetRate(uint32 rate) + void UnityVideoCapturer::SetRate(uint32 rate) { nvEncoder->SetRate(rate); } - void NvVideoCapturer::InitializeEncoder(int32 width, int32 height) + void UnityVideoCapturer::InitializeEncoder() { - nvEncoder = std::make_unique(width, height); - nvEncoder->CaptureFrame.connect(this, &NvVideoCapturer::CaptureFrame); + nvEncoder->captureFrame.connect(this, &UnityVideoCapturer::CaptureFrame); } } diff --git a/Plugin/WebRTCPlugin/NvVideoCapturer.h b/Plugin/WebRTCPlugin/UnityVideoCapturer.h similarity index 90% rename from Plugin/WebRTCPlugin/NvVideoCapturer.h rename to Plugin/WebRTCPlugin/UnityVideoCapturer.h index 5686f2216..11777dcce 100644 --- a/Plugin/WebRTCPlugin/NvVideoCapturer.h +++ b/Plugin/WebRTCPlugin/UnityVideoCapturer.h @@ -1,12 +1,12 @@ #pragma once -#include "NvEncoder.h" +#include "UnityEncoder.h" namespace WebRTC { - class NvVideoCapturer : public cricket::VideoCapturer + class UnityVideoCapturer : public cricket::VideoCapturer { public: - NvVideoCapturer(); + UnityVideoCapturer(UnityEncoder* pEncoder, int _width, int _height); void EncodeVideoData(); // Start the video capturer with the specified capture format. virtual cricket::CaptureState Start(const cricket::VideoFormat& Format) override @@ -17,7 +17,6 @@ namespace WebRTC virtual void Stop() override { captureStopped = true; - nvEncoder.reset(); } // Check if the video capturer is running. virtual bool IsRunning() override @@ -31,7 +30,7 @@ namespace WebRTC return false; } void StartEncoder(); - void InitializeEncoder(int32 width, int32 height); + void InitializeEncoder(); void SetKeyFrame(); void SetRate(uint32 rate); void CaptureFrame(std::vector& data); @@ -46,11 +45,11 @@ namespace WebRTC fourccs->push_back(cricket::FOURCC_H264); return true; } - std::unique_ptr nvEncoder; + UnityEncoder* nvEncoder; //just fake info - const int32 width = 1280; - const int32 height = 720; + int32 width; + int32 height; const int32 framerate = 60; bool captureStarted = false; diff --git a/Plugin/WebRTCPlugin/WebRTCPlugin.cpp b/Plugin/WebRTCPlugin/WebRTCPlugin.cpp index 9ce258472..71c141e3f 100644 --- a/Plugin/WebRTCPlugin/WebRTCPlugin.cpp +++ b/Plugin/WebRTCPlugin/WebRTCPlugin.cpp @@ -29,11 +29,21 @@ namespace WebRTC extern "C" { - UNITY_INTERFACE_EXPORT webrtc::MediaStreamInterface* CaptureVideoStream(Context* context, UnityFrameBuffer* rt, int32 width, int32 height) + UNITY_INTERFACE_EXPORT webrtc::MediaStreamInterface* CreateMediaStream(Context* context, const char* label) { - context->InitializeEncoder(width, height); - return context->CreateVideoStream(rt); + return context->CreateMediaStream(label); } + + UNITY_INTERFACE_EXPORT webrtc::MediaStreamTrackInterface* CreateVideoTrack(Context* context, const char* label, UnityFrameBuffer* frameBuffer, int32 width, int32 height, int32 bitRate) + { + return context->CreateVideoTrack(label, frameBuffer, width, height, bitRate); + } + + UNITY_INTERFACE_EXPORT webrtc::MediaStreamTrackInterface* CreateAudioTrack(Context* context, const char* label) + { + return context->CreateAudioTrack(label); + } + //TODO: Multi-track support UNITY_INTERFACE_EXPORT void StopMediaStreamTrack(webrtc::MediaStreamTrackInterface* track) { @@ -45,11 +55,6 @@ extern "C" return ContextManager::GetNvEncSupported(); } - UNITY_INTERFACE_EXPORT webrtc::MediaStreamInterface* CaptureAudioStream(Context* context) - { - return context->CreateAudioStream(); - } - UNITY_INTERFACE_EXPORT void MediaStreamAddTrack(webrtc::MediaStreamInterface* stream, webrtc::MediaStreamTrackInterface* track) { if (track->kind() == "audio") @@ -61,6 +66,7 @@ extern "C" stream->AddTrack((webrtc::VideoTrackInterface*)track); } } + UNITY_INTERFACE_EXPORT void MediaStreamRemoveTrack(webrtc::MediaStreamInterface* stream, webrtc::MediaStreamTrackInterface* track) { if (track->kind() == "audio") @@ -184,9 +190,9 @@ extern "C" obj->Close(); ContextManager::GetInstance()->curContext->DeleteClient(id); } - UNITY_INTERFACE_EXPORT webrtc::RtpSenderInterface* PeerConnectionAddTrack(PeerConnectionObject* obj, webrtc::MediaStreamTrackInterface* track) + UNITY_INTERFACE_EXPORT webrtc::RtpSenderInterface* PeerConnectionAddTrack(PeerConnectionObject* obj, webrtc::MediaStreamTrackInterface* track, const char* mediaStreamId) { - return obj->connection->AddTrack(rtc::scoped_refptr (track), { "unity" }).value().get(); + return obj->connection->AddTrack(rtc::scoped_refptr (track), { mediaStreamId }).value().get(); } UNITY_INTERFACE_EXPORT void PeerConnectionRemoveTrack(PeerConnectionObject* obj, webrtc::RtpSenderInterface* sender) diff --git a/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj b/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj index 54636a0a8..e295f9077 100644 --- a/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj +++ b/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj @@ -177,7 +177,8 @@ - + + @@ -190,7 +191,8 @@ - + + Create Create diff --git a/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj.filters b/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj.filters index 0a0d2a3a4..d3885ee91 100644 --- a/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj.filters +++ b/Plugin/WebRTCPlugin/WebRTCPlugin.vcxproj.filters @@ -42,9 +42,6 @@ Header Files - - Header Files - Header Files @@ -60,6 +57,12 @@ Header Files\Unity + + Header Files + + + Header Files + @@ -83,9 +86,6 @@ Source Files - - Source Files - Source Files @@ -95,5 +95,11 @@ Source Files + + Source Files + + + Source Files + \ No newline at end of file diff --git a/Plugin/WebRTCPlugin/pch.h b/Plugin/WebRTCPlugin/pch.h index 0be103dff..231ba92ba 100644 --- a/Plugin/WebRTCPlugin/pch.h +++ b/Plugin/WebRTCPlugin/pch.h @@ -91,8 +91,7 @@ namespace WebRTC using int32 = signed int; using int64 = signed long long; - const uint32 bufferedFrameNum = 3; - extern UnityFrameBuffer* renderTextures[bufferedFrameNum]; + const uint32 bufferedFrameNum = 1; extern ID3D11DeviceContext* context; extern ID3D11Device* g_D3D11Device; } diff --git a/WebApp/public/scripts/app.js b/WebApp/public/scripts/app.js index 200b276b1..ff55c0bd3 100644 --- a/WebApp/public/scripts/app.js +++ b/WebApp/public/scripts/app.js @@ -24,11 +24,20 @@ function onClickPlayButton() { const playerDiv = document.getElementById('player'); // add video player - const elementVideo = document.createElement('video'); - elementVideo.id = 'Video'; - elementVideo.style.touchAction = 'none'; - playerDiv.appendChild(elementVideo); - setupVideoPlayer(elementVideo).then(value => videoPlayer = value); + let elementVideos = []; + for (let i=0; i<2; i++) + { + const elementVideo = document.createElement('video'); + elementVideo.id = "Video"+i; + elementVideo.style.touchAction = 'none'; + playerDiv.appendChild(elementVideo); + + elementVideos.push(elementVideo); + } + + + setupVideoPlayer(elementVideos).then(value => videoPlayer = value); + // add green button const elementBlueButton = document.createElement('button'); @@ -74,15 +83,117 @@ function onClickPlayButton() { elementFullscreenButton.style.display = 'block'; } } + } -async function setupVideoPlayer(element, config) { - const videoPlayer = new VideoPlayer(element, config); +function setupMediaSelector(options) +{ + const playerDiv = document.getElementById('player'); + let mediaSelectDiv = document.createElement("div"); + mediaSelectDiv.id = "mediaSelect"; + mediaSelectDiv.setAttribute("style", "width:200px;"); + mediaSelectDiv.className = "custom-select"; + playerDiv.appendChild(mediaSelectDiv); + const mediaSelect = document.createElement("select"); + mediaSelectDiv.appendChild(mediaSelect); + let index = 0; + options.forEach(option=>{ + let optionItem = document.createElement("Option"); + optionItem.value = index++; + optionItem.innerHTML = option; + mediaSelect.appendChild(optionItem); + }) + + + let customSelects, selElmnt; + /*look for any elements with the class "custom-select":*/ + customSelects = document.getElementsByClassName("custom-select"); + for (let i = 0; i < customSelects.length; i++) { + selElmnt = customSelects[i].getElementsByTagName("select")[0]; + /*for each element, create a new DIV that will act as the selected item:*/ + let a = document.createElement("DIV"); + a.setAttribute("class", "select-selected"); + a.innerHTML = selElmnt.options[selElmnt.selectedIndex].innerHTML; + customSelects[i].appendChild(a); + /*for each element, create a new DIV that will contain the option list:*/ + let b = document.createElement("DIV"); + b.setAttribute("class", "select-items select-hide"); + for (let j = 1; j < selElmnt.length; j++) { + /*for each option in the original select element, + create a new DIV that will act as an option item:*/ + let c = document.createElement("DIV"); + c.innerHTML = selElmnt.options[j].innerHTML; + c.addEventListener("click", function(e) { + /*when an item is clicked, update the original select box, + and the selected item:*/ + let y, i, k, s, h; + s = this.parentNode.parentNode.getElementsByTagName("select")[0]; + + videoPlayer.selectMediaStream(this.innerHTML); + console.log(this.innerHTML); + + h = this.parentNode.previousSibling; + for (i = 0; i < s.length; i++) { + if (s.options[i].innerHTML == this.innerHTML) { + s.selectedIndex = i; + h.innerHTML = this.innerHTML; + y = this.parentNode.getElementsByClassName("same-as-selected"); + for (k = 0; k < y.length; k++) { + y[k].removeAttribute("class"); + } + this.setAttribute("class", "same-as-selected"); + break; + } + } + h.click(); + }); + b.appendChild(c); + } + customSelects[i].appendChild(b); + a.addEventListener("click", function(e) { + /*when the select box is clicked, close any other select boxes, + and open/close the current select box:*/ + e.stopPropagation(); + closeAllSelect(this); + this.nextSibling.classList.toggle("select-hide"); + this.classList.toggle("select-arrow-active"); + }); + } + function closeAllSelect(elmnt) { + /*a function that will close all select boxes in the document, + except the current select box:*/ + var x, y, i, arrNo = []; + x = document.getElementsByClassName("select-items"); + y = document.getElementsByClassName("select-selected"); + for (i = 0; i < y.length; i++) { + if (elmnt == y[i]) { + arrNo.push(i) + } else { + y[i].classList.remove("select-arrow-active"); + } + } + for (i = 0; i < x.length; i++) { + if (arrNo.indexOf(i)) { + x[i].classList.add("select-hide"); + } + } + } + /*if the user clicks anywhere outside the select box, + then close all select boxes:*/ + document.addEventListener("click", closeAllSelect); +} + +async function setupVideoPlayer(elements, config) { + const videoPlayer = new VideoPlayer(elements, config); await videoPlayer.setupConnection(); videoPlayer.ondisconnect = onDisconnect; + videoPlayer.onaddtrackfinish = onAddTrackFinish; registerKeyboardEvents(videoPlayer); - registerMouseEvents(videoPlayer, element); + + elements.forEach(element=>{ + registerMouseEvents(videoPlayer, element); + }); return videoPlayer; } @@ -94,6 +205,17 @@ function onDisconnect() { showPlayButton(); } +function onAddTrackFinish(mediaStreams) { + + let options = ["Select a media"]; + for (let i=0; i{ + v.playsInline = true; + v.addEventListener('loadedmetadata', function () { + v.play(); + }, true); + }) + this.interval = 3000; this.signaling = new Signaling(); this.ondisconnect = function(){}; + this.onaddtrackfinish = function (mediaStreams) {}; this.sleep = msec => new Promise(resolve => setTimeout(resolve, msec)); } @@ -27,6 +34,7 @@ export class VideoPlayer { } config.sdpSemantics = 'unified-plan'; config.iceServers = [{urls: ['stun:stun.l.google.com:19302']}]; + config.bundlePolicy = "max-bundle"; return config; } @@ -48,6 +56,13 @@ export class VideoPlayer { // Create peerConnection with proxy server and set up handlers this.pc = new RTCPeerConnection(this.cfg); + + this.pc.addTransceiver("video"); + this.pc.addTransceiver("audio"); + this.pc.addTransceiver("video"); + this.pc.addTransceiver("audio"); + + this.pc.onsignalingstatechange = function (e) { console.log('signalingState changed:', e); }; @@ -61,10 +76,32 @@ export class VideoPlayer { this.pc.onicegatheringstatechange = function (e) { console.log('iceGatheringState changed:', e); }; + let tempCount = 0; this.pc.ontrack = function (e) { + console.log('New track added: ', e.streams); - _this.video.srcObject = e.streams[0]; + console.log(e.track); + + if (_this.UnityStreams.indexOf(e.streams[0])==-1) + { + _this.UnityStreams.push(e.streams[0]); + if ( _this.UnityStreamCount==_this.UnityStreams.length ) + { + _this.onaddtrackfinish(_this.UnityStreams); + } + } }; + + _this.videos[0].onresize = function () { + console.log("video 0 width:=" + _this.videos[0].videoWidth); + console.log("video 0 height:=" + _this.videos[0].videoHeight); + } + + _this.videos[1].onresize = function () { + console.log("video 1 width:=" + _this.videos[1].videoWidth); + console.log("video 1 height:=" + _this.videos[1].videoHeight); + } + this.pc.onicecandidate = function (e) { if(e.candidate != null) { _this.signaling.sendCandidate(_this.sessionId, _this.connectionId, e.candidate.candidate, e.candidate.sdpMid, e.candidate.sdpMLineIndex); @@ -92,6 +129,7 @@ export class VideoPlayer { await this.createConnection(); // set local sdp offer.sdp = offer.sdp.replace(/useinbandfec=1/, 'useinbandfec=1;stereo=1;maxaveragebitrate=1048576'); + const desc = new RTCSessionDescription({sdp:offer.sdp, type:"offer"}); await this.pc.setLocalDescription(desc); await this.sendOffer(offer); @@ -124,6 +162,7 @@ export class VideoPlayer { if(answers.length > 0) { const answer = answers[0]; await this.setAnswer(sessionId, answer.sdp); + } await this.sleep(interval); } @@ -162,6 +201,18 @@ export class VideoPlayer { } }; + selectMediaStream(streamId){ + + for (let i=0; i