Esempio n. 1
0
        public IEnumerator MediaStreamTrackEnabled()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
            var rt     = new RenderTexture(width, height, 0, format);

            rt.Create();
            var track = new VideoStreamTrack("video", rt);

            Assert.NotNull(track);
            yield return(new WaitForSeconds(0.1f));

            // Enabled property
            Assert.True(track.Enabled);
            track.Enabled = false;
            Assert.False(track.Enabled);

            // ReadyState property
            Assert.AreEqual(track.ReadyState, TrackState.Live);
            track.Dispose();
            yield return(new WaitForSeconds(0.1f));

            Object.DestroyImmediate(rt);
        }
Esempio n. 2
0
        public IEnumerator MediaStreamAddTrack()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var stream = new MediaStream();
            var track  = new VideoStreamTrack("video", rt);

            yield return(new WaitForSeconds(0.1f));

            Assert.AreEqual(TrackKind.Video, track.Kind);
            Assert.AreEqual(0, stream.GetVideoTracks().Count());
            Assert.True(stream.AddTrack(track));
            Assert.AreEqual(1, stream.GetVideoTracks().Count());
            Assert.NotNull(stream.GetVideoTracks().First());
            Assert.True(stream.RemoveTrack(track));
            Assert.AreEqual(0, stream.GetVideoTracks().Count());
            track.Dispose();
            yield return(new WaitForSeconds(0.1f));

            stream.Dispose();
            Object.DestroyImmediate(rt);
        }
Esempio n. 3
0
        public void AddTransceiverWithInit()
        {
            var peer      = new RTCPeerConnection();
            var stream    = new MediaStream();
            var direction = RTCRtpTransceiverDirection.SendOnly;
            var width     = 256;
            var height    = 256;
            var format    = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt        = new RenderTexture(width, height, 0, format);

            rt.Create();
            var track = new VideoStreamTrack(rt);
            var init  = new RTCRtpTransceiverInit()
            {
                direction     = direction,
                sendEncodings = new RTCRtpEncodingParameters[] {
                    new RTCRtpEncodingParameters {
                        maxFramerate = 30
                    }
                },
                streams = new MediaStream[] { stream }
            };
            var transceiver = peer.AddTransceiver(track, init);

            Assert.That(transceiver, Is.Not.Null);
            Assert.That(transceiver.CurrentDirection, Is.Null);
            Assert.That(transceiver.Direction, Is.EqualTo(RTCRtpTransceiverDirection.SendOnly));
            Assert.That(transceiver.Sender, Is.Not.Null);

            var parameters = transceiver.Sender.GetParameters();

            Assert.That(parameters, Is.Not.Null);
            Assert.That(parameters.codecs, Is.Not.Null.And.Empty);
            peer.Dispose();
        }
Esempio n. 4
0
        IEnumerator Start()
        {
            if (WebCamTexture.devices.Length == 0)
            {
                Debug.LogFormat("WebCam device not found");
                yield break;
            }

            yield return(Application.RequestUserAuthorization(UserAuthorization.WebCam));

            if (!Application.HasUserAuthorization(UserAuthorization.WebCam))
            {
                Debug.LogFormat("authorization for using the device is denied");
                yield break;
            }

            WebCamDevice userCameraDevice = WebCamTexture.devices[deviceIndex];

            m_webCamTexture = new WebCamTexture(userCameraDevice.name, streamingSize.x, streamingSize.y);
            m_webCamTexture.Play();
            yield return(new WaitUntil(() => m_webCamTexture.didUpdateThisFrame));

            m_track = new VideoStreamTrack(gameObject.name, m_webCamTexture);
            RenderStreaming.Instance?.AddVideoStreamTrack(m_track);
        }
Esempio n. 5
0
        public void AddTrack()
        {
            var peer   = new RTCPeerConnection();
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();

            var track  = new VideoStreamTrack(rt);
            var sender = peer.AddTrack(track);

            Assert.That(sender, Is.Not.Null);
            Assert.That(track, Is.EqualTo(sender.Track));

            RTCRtpSendParameters parameters = sender.GetParameters();

            Assert.That(parameters, Is.Not.Null);
            Assert.That(parameters.encodings, Is.Empty);

            track.Dispose();
            peer.Dispose();
            Object.DestroyImmediate(rt);
        }
Esempio n. 6
0
        public IEnumerator VideoStreamAddTrackAndRemoveTrack()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var stream = new MediaStream();
            var track  = new VideoStreamTrack("video", rt);

            // wait for the end of the initialization for encoder on the render thread.
            yield return(0);

            Assert.That(track.Kind, Is.EqualTo(TrackKind.Video));
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0));
            Assert.That(stream.AddTrack(track), Is.True);
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(1));
            Assert.That(stream.GetVideoTracks(), Has.All.Not.Null);
            Assert.That(stream.RemoveTrack(track), Is.True);
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0));
            track.Dispose();
            // wait for disposing video track.
            yield return(0);

            stream.Dispose();
            Object.DestroyImmediate(rt);
        }
    IEnumerator CaptureVideoStart()
    {
        if (!useWebCamToggle.isOn)
        {
            videoStreamTrack    = cam.CaptureStreamTrack(1280, 720, 1000000);
            sourceImage.texture = cam.targetTexture;
            yield break;
        }

        if (WebCamTexture.devices.Length == 0)
        {
            Debug.LogFormat("WebCam device not found");
            yield break;
        }

        yield return(Application.RequestUserAuthorization(UserAuthorization.WebCam));

        if (!Application.HasUserAuthorization(UserAuthorization.WebCam))
        {
            Debug.LogFormat("authorization for using the device is denied");
            yield break;
        }

        WebCamDevice userCameraDevice = WebCamTexture.devices[webCamLListDropdown.value];

        webCamTexture = new WebCamTexture(userCameraDevice.name, 1280, 720, 30);
        webCamTexture.Play();
        yield return(new WaitUntil(() => webCamTexture.didUpdateThisFrame));

        videoStreamTrack    = new VideoStreamTrack("video", webCamTexture);
        sourceImage.texture = webCamTexture;
    }
    private void HangUp()
    {
        if (webCamTexture != null)
        {
            webCamTexture.Stop();
            webCamTexture = null;
        }

        videoStreamTrack.Dispose();
        videoStreamTrack = null;

        _pc1.Close();
        _pc2.Close();
        Debug.Log("Close local/remote peer connection");
        _pc1.Dispose();
        _pc2.Dispose();
        _pc1 = null;
        _pc2 = null;
        sourceImage.texture              = null;
        receiveImage.texture             = null;
        useWebCamToggle.interactable     = true;
        webCamLListDropdown.interactable = useWebCamToggle.isOn;
        callButton.interactable          = true;
        hangUpButton.interactable        = false;
        addTracksButton.interactable     = false;
        removeTracksButton.interactable  = false;
    }
Esempio n. 9
0
        public IEnumerator VideoStreamTrackEnabled()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
            var rt     = new RenderTexture(width, height, 0, format);

            rt.Create();
            var track = new VideoStreamTrack("video", rt);

            Assert.NotNull(track);

            // wait for the end of the initialization for encoder on the render thread.
            yield return(0);

            // todo:: returns always false.
            // Assert.True(track.IsInitialized);

            // Enabled property
            Assert.True(track.Enabled);
            track.Enabled = false;
            Assert.False(track.Enabled);

            // ReadyState property
            Assert.AreEqual(track.ReadyState, TrackState.Live);

            track.Dispose();

            // wait for disposing video track.
            yield return(0);

            Object.DestroyImmediate(rt);
        }
Esempio n. 10
0
        public IEnumerator OnAddTrackDelegatesWithEvent()
        {
            var camObj      = new GameObject("Camera");
            var cam         = camObj.AddComponent <Camera>();
            var videoStream = cam.CaptureStream(1280, 720, 1000000);

            yield return(new WaitForSeconds(0.1f));

            var test = new MonoBehaviourTest <SignalingPeers>();

            test.component.SetStream(videoStream);
            yield return(test);

            test.component.CoroutineUpdate();
            yield return(new WaitForSeconds(0.1f));

            bool isCalledOnAddTrack    = false;
            bool isCalledOnRemoveTrack = false;

            videoStream.OnAddTrack = e =>
            {
                Assert.That(e.Track, Is.Not.Null);
                isCalledOnAddTrack = true;
            };
            videoStream.OnRemoveTrack = e =>
            {
                Assert.That(e.Track, Is.Not.Null);
                isCalledOnRemoveTrack = true;
            };

            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var track2 = new VideoStreamTrack("video2", rt);

            videoStream.AddTrack(track2);
            var op1 = new WaitUntilWithTimeout(() => isCalledOnAddTrack, 5000);

            yield return(op1);

            videoStream.RemoveTrack(track2);
            var op2 = new WaitUntilWithTimeout(() => isCalledOnRemoveTrack, 5000);

            yield return(op2);

            test.component.Dispose();
            track2.Dispose();
            // wait for disposing video track.
            yield return(0);

            videoStream.Dispose();
            Object.DestroyImmediate(camObj);
            Object.DestroyImmediate(rt);
        }
Esempio n. 11
0
        public IEnumerator VideoReceive()
        {
            var config = new RTCConfiguration
            {
                iceServers = new[] { new RTCIceServer {
                                         urls = new[] { "stun:stun.l.google.com:19302" }
                                     } }
            };
            var pc1           = new RTCPeerConnection(ref config);
            var pc2           = new RTCPeerConnection(ref config);
            var sendStream    = new MediaStream();
            var receiveStream = new MediaStream();
            VideoStreamTrack receiveVideoTrack = null;
            RenderTexture    receiveImage      = null;

            receiveStream.OnAddTrack = e =>
            {
                if (e.Track is VideoStreamTrack track)
                {
                    receiveVideoTrack = track;
                    receiveImage      = receiveVideoTrack.InitializeReceiver(640, 320);
                }
            };
            pc2.OnTrack = e => receiveStream.AddTrack(e.Track);

            var camObj = new GameObject("Camera");
            var cam    = camObj.AddComponent <Camera>();

            cam.backgroundColor = Color.red;
            var sendVideoTrack = cam.CaptureStreamTrack(1280, 720, 1000000);

            yield return(new WaitForSeconds(0.1f));

            pc1.AddTrack(sendVideoTrack, sendStream);
            pc2.AddTransceiver(TrackKind.Video);

            yield return(SignalingPeers(pc1, pc2));

            yield return(new WaitUntil(() => receiveVideoTrack != null && receiveVideoTrack.IsDecoderInitialized));

            Assert.NotNull(receiveImage);

            sendVideoTrack.Update();
            yield return(new WaitForSeconds(0.1f));

            receiveVideoTrack.UpdateReceiveTexture();
            yield return(new WaitForSeconds(0.1f));

            receiveVideoTrack.Dispose();
            receiveStream.Dispose();
            sendVideoTrack.Dispose();
            sendStream.Dispose();
            pc2.Dispose();
            pc1.Dispose();
            Object.DestroyImmediate(receiveImage);
        }
        public IEnumerator VideoReceive()
        {
            const int width  = 256;
            const int height = 256;

            var config = new RTCConfiguration
            {
                iceServers = new[] { new RTCIceServer {
                                         urls = new[] { "stun:stun.l.google.com:19302" }
                                     } }
            };
            var pc1 = new RTCPeerConnection(ref config);
            var pc2 = new RTCPeerConnection(ref config);

            VideoStreamTrack receiveVideoTrack = null;
            Texture          receiveImage      = null;

            pc2.OnTrack = e =>
            {
                if (e.Track is VideoStreamTrack track && !track.IsDecoderInitialized)
                {
                    receiveVideoTrack = track;
                    receiveImage      = track.InitializeReceiver(width, height);
                }
            };

            var camObj = new GameObject("Camera");
            var cam    = camObj.AddComponent <Camera>();

            cam.backgroundColor = Color.red;
            var sendVideoTrack = cam.CaptureStreamTrack(width, height, 1000000);

            yield return(new WaitForSeconds(0.1f));

            pc1.AddTrack(sendVideoTrack);

            yield return(SignalingPeers(pc1, pc2));

            yield return(new WaitUntil(() => receiveVideoTrack != null && receiveVideoTrack.IsDecoderInitialized));

            Assert.That(receiveImage, Is.Not.Null);

            sendVideoTrack.Update();
            yield return(new WaitForSeconds(0.1f));

            receiveVideoTrack.UpdateReceiveTexture();
            yield return(new WaitForSeconds(0.1f));

            receiveVideoTrack.Dispose();
            sendVideoTrack.Dispose();
            yield return(0);

            pc2.Dispose();
            pc1.Dispose();
            Object.DestroyImmediate(camObj);
        }
        public void EqualIdWithVideoTrack()
        {
            var guid   = Guid.NewGuid().ToString();
            var source = new VideoTrackSource();
            var track  = new VideoStreamTrack(WebRTC.Context.CreateVideoTrack(guid, source.self));

            Assert.That(track, Is.Not.Null);
            Assert.That(track.Id, Is.EqualTo(guid));
            track.Dispose();
            source.Dispose();
        }
    private void createPeer()
    {
        log(LogLevel.Log, "Create RTCPeerConnection");
        var peerConfig = new RTCConfiguration {
            iceServers = iceServers
        };

        peer = new RTCPeerConnection(ref peerConfig);
        peer.OnConnectionStateChange = connectionState =>
        {
            log(LogLevel.Log, $"[OnConnectionStateChange] connectionState: {connectionState}");
        };
        peer.OnDataChannel = channel =>
        {
            dataChannel = channel;
            setupDataChannelEventHandler();
            log(LogLevel.Log, $"[OnDataChannel] label: {channel.Label}");
        };
        peer.OnIceCandidate = candidate =>
        {
            log(LogLevel.Log, $"[OnIceCandidate]");
            log(LogLevel.Log, $">>> Send \"takeCandidate\" Command (iceCandidate: '{candidate.Candidate.Substring(0, 10)} ...')");
            signaling.SendIceCandidate(streamId, candidate.Candidate, candidate.SdpMLineIndex.Value, candidate.SdpMid);
        };
        peer.OnIceGatheringStateChange = state =>
        {
            log(LogLevel.Log, $"[OnIceGatheringStateChange] iceGatheringState: {state}");
        };
        peer.OnNegotiationNeeded = () =>
        {
            log(LogLevel.Log, $"[OnNegotiationNeeded]");
        };
        peer.OnTrack = evt =>
        {
            log(LogLevel.Log, $"[OnTrack] kind: {evt.Track.Kind}");
            if (evt.Track is VideoStreamTrack track)
            {
                var texture = track.InitializeReceiver(videoWidth, videoHeight);
                playerDisplay.GetComponent <Renderer>().material.mainTexture = texture;
            }
        };

        var dcOptions = new RTCDataChannelInit();

        log(LogLevel.Log, $"CreateDataChannel label: {dataChannelLabel}");
        dataChannel = peer.CreateDataChannel(dataChannelLabel, dcOptions);
        setupDataChannelEventHandler();
        if (clientType == ClientType.Publisher)
        {
            var videoTrack = new VideoStreamTrack("VideoTrack", videoPlayer.targetTexture);
            peer.AddTrack(videoTrack);
            StartCoroutine(createDesc(RTCSdpType.Offer));
        }
    }
 public Peer(
     ReplaceTrackSample parent,
     bool polite,
     Camera source1,
     Vector2Int size1,
     Camera source2,
     Vector2Int size2)
     : this(parent, polite)
 {
     sourceVideoTrack1 = source1?.CaptureStreamTrack(size1.x, size1.y);
     sourceVideoTrack2 = source2?.CaptureStreamTrack(size2.x, size2.y);
 }
Esempio n. 16
0
        public IEnumerator AddTrackMultiple(TestMode mode)
        {
            MockSignaling.Reset(mode == TestMode.PrivateMode);

            var dependencies = CreateDependencies();
            var target       = new RenderStreamingInternal(ref dependencies);

            bool isStarted = false;

            target.onStart += () => { isStarted = true; };
            yield return(new WaitUntil(() => isStarted));

            Assert.That(isStarted, Is.True);

            var connectionId = "12345";

            target.CreateConnection(connectionId);
            bool isCreatedConnection = false;

            target.onCreatedConnection += _ => { isCreatedConnection = true; };
            yield return(new WaitUntil(() => isCreatedConnection));

            Assert.That(isCreatedConnection, Is.True);

            var camObj             = new GameObject("Camera");
            var camera             = camObj.AddComponent <Camera>();
            VideoStreamTrack track = camera.CaptureStreamTrack(1280, 720, 0);
            var transceiver1       = target.AddTrack(connectionId, track);

            Assert.That(transceiver1.Direction, Is.EqualTo(RTCRtpTransceiverDirection.SendOnly));

            var camObj2             = new GameObject("Camera2");
            var camera2             = camObj2.AddComponent <Camera>();
            VideoStreamTrack track2 = camera2.CaptureStreamTrack(1280, 720, 0);
            var transceiver2        = target.AddTrack(connectionId, track2);

            Assert.That(transceiver2.Direction, Is.EqualTo(RTCRtpTransceiverDirection.SendOnly));

            target.DeleteConnection(connectionId);
            bool isDeletedConnection = false;

            target.onDeletedConnection += _ => { isDeletedConnection = true; };
            yield return(new WaitUntil(() => isDeletedConnection));

            Assert.That(isDeletedConnection, Is.True);

            target.Dispose();
            track.Dispose();
            track2.Dispose();
            UnityEngine.Object.Destroy(camObj);
            UnityEngine.Object.Destroy(camObj2);
        }
Esempio n. 17
0
        public void VideoStreamTrackDisposeImmediately()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var track = new VideoStreamTrack("video", rt);

            track.Dispose();
            Object.DestroyImmediate(rt);
        }
Esempio n. 18
0
        private void connectPeer()
        {
            OnLogEvent.Invoke("new RTCPeerConnection", "");
            peer = new RTCPeerConnection(ref peerConfig);
            peer.OnConnectionStateChange = connectionState =>
            {
                OnLogEvent.Invoke("OnConnectionStateChange", connectionState.ToString());
            };
            peer.OnDataChannel = channel =>
            {
                dataChannel = channel;
                setupDataChannelEventHandler();
                OnLogEvent.Invoke("OnDataChannel", channel.Label);
            };
            peer.OnIceCandidate = candidate =>
            {
                OnLogEvent.Invoke("OnIceCandidate", "");
                OnLogEvent.Invoke("Send IceCandidate", "");
                signaling.SendIceCandidate(streamId, candidate.Candidate, candidate.SdpMLineIndex.Value, candidate.SdpMid);
            };
            peer.OnIceGatheringStateChange = state =>
            {
                OnLogEvent.Invoke("OnIceGatheringStateChange", state.ToString());
            };
            peer.OnNegotiationNeeded = () =>
            {
                OnLogEvent.Invoke("OnNegotiationNeeded", "");
            };
            peer.OnTrack = evt =>
            {
                OnLogEvent.Invoke("OnTrack", evt.Track.Kind.ToString());
                if (evt.Track is VideoStreamTrack track)
                {
                    var texture = track.InitializeReceiver(videoWidth, videoHeight);
                    OnVideoTrack.Invoke(texture);
                }
            };

            var dcOptions = new RTCDataChannelInit();

            OnLogEvent.Invoke("CreateDataChannel", "testDC");
            dataChannel = peer.CreateDataChannel("testDC", dcOptions);
            setupDataChannelEventHandler();
            if (clientType == ClientType.Publisher)
            {
                var videoTrack = new VideoStreamTrack("VideoTrack", renderTexture);
                peer.AddTrack(videoTrack);
                CoroutineHandler.StartStaticCoroutine(sendDesc(RTCSdpType.Offer));
            }
        }
Esempio n. 19
0
        public void ConstructorThrowsException()
        {
            var width  = 256;
            var height = 256;
            var format = RenderTextureFormat.R8;
            var rt     = new RenderTexture(width, height, 0, format);

            rt.Create();
            Assert.Throws <ArgumentException>(() =>
            {
                var track = new VideoStreamTrack("video", rt);
            });
            Object.DestroyImmediate(rt);
        }
Esempio n. 20
0
        public void Construct()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
            var rt     = new RenderTexture(width, height, 0, format);

            rt.Create();
            var track = new VideoStreamTrack("video", rt);

            Assert.That(track, Is.Not.Null);
            track.Dispose();
            Object.DestroyImmediate(rt);
        }
Esempio n. 21
0
        public IEnumerator VideoStreamAddTrackAndRemoveTrack()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var stream = new MediaStream();
            var track  = new VideoStreamTrack(rt);

            bool isCalledOnAddTrack    = false;
            bool isCalledOnRemoveTrack = false;

            stream.OnAddTrack = e =>
            {
                Assert.That(e.Track, Is.EqualTo(track));
                isCalledOnAddTrack = true;
            };
            stream.OnRemoveTrack = e =>
            {
                Assert.That(e.Track, Is.EqualTo(track));
                isCalledOnRemoveTrack = true;
            };

            // wait for the end of the initialization for encoder on the render thread.
            yield return(0);

            Assert.That(track.Kind, Is.EqualTo(TrackKind.Video));
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0));
            Assert.That(stream.AddTrack(track), Is.True);
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(1));
            Assert.That(stream.GetVideoTracks(), Has.All.Not.Null);
            Assert.That(stream.RemoveTrack(track), Is.True);
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0));

            var op1 = new WaitUntilWithTimeout(() => isCalledOnAddTrack, 5000);

            yield return(op1);

            var op2 = new WaitUntilWithTimeout(() => isCalledOnRemoveTrack, 5000);

            yield return(op2);

            track.Dispose();

            stream.Dispose();
            Object.DestroyImmediate(rt);
        }
Esempio n. 22
0
        public void AccessAfterDisposed()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
            var rt     = new RenderTexture(width, height, 0, format);

            rt.Create();
            var track = new VideoStreamTrack("video", rt);

            Assert.That(track, Is.Not.Null);
            track.Dispose();
            Assert.That(() => { var id = track.Id; }, Throws.TypeOf <InvalidOperationException>());
            Object.DestroyImmediate(rt);
        }
Esempio n. 23
0
    public Peer(
        PerfectNegotiationSample parent,
        bool polite,
        Camera source1,
        Camera source2,
        RawImage receive)
    {
        this.parent = parent;
        this.polite = polite;


        var config = GetSelectedSdpSemantics();

        pc = new RTCPeerConnection(ref config);

        pc.OnTrack = e =>
        {
            Debug.Log($"{this} OnTrack");
            if (e.Track is VideoStreamTrack video)
            {
                if (video.IsDecoderInitialized)
                {
                    receive.texture = video.Texture;
                    return;
                }

                receive.texture = video.InitializeReceiver(width, height);
            }
        };

        pc.OnIceCandidate = candidate =>
        {
            var message = new Message {
                candidate = candidate
            };
            this.parent.PostMessage(this, message);
        };

        pc.OnNegotiationNeeded = () =>
        {
            this.parent.StartCoroutine(NegotiationProcess());
        };

        sourceVideoTrack1 = source1.CaptureStreamTrack(width, height, 0);
        sourceVideoTrack2 = source2.CaptureStreamTrack(width, height, 0);
    }
Esempio n. 24
0
        public void AddTransceiver()
        {
            var peer   = new RTCPeerConnection();
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();

            var track = new VideoStreamTrack(rt);

            Assert.That(peer.GetTransceivers(), Is.Empty);
            var transceiver = peer.AddTransceiver(track);

            Assert.That(transceiver, Is.Not.Null);
            Assert.That(transceiver.Mid, Is.Null);
            Assert.That(transceiver.CurrentDirection, Is.Null);
            RTCRtpSender sender = transceiver.Sender;

            Assert.That(sender, Is.Not.Null);
            Assert.That(track, Is.EqualTo(sender.Track));

            RTCRtpSendParameters parameters = sender.GetParameters();

            Assert.That(parameters, Is.Not.Null);
            Assert.That(parameters.encodings, Is.Empty);
            Assert.That(parameters.transactionId, Is.Not.Empty);
            Assert.That(peer.GetTransceivers(), Has.Count.EqualTo(1));
            Assert.That(peer.GetTransceivers().First(), Is.Not.Null);
            Assert.That(parameters.codecs, Is.Empty);
            Assert.That(parameters.rtcp, Is.Not.Null);

            // Some platforms return an empty list
            Assert.That(parameters.headerExtensions, Is.Not.Null);
            foreach (var extension in parameters.headerExtensions)
            {
                Assert.That(extension, Is.Not.Null);
                Assert.That(extension.uri, Is.Not.Empty);
            }

            track.Dispose();
            peer.Dispose();
            Object.DestroyImmediate(rt);
        }
Esempio n. 25
0
    private void InitializeWebRTC()
    {
        WebRTC.Initialize(EncoderType.Software);
        var configuration = GetSelectedSdpSemantics();

        pc = new RTCPeerConnection(ref configuration);
        pc.OnIceCandidate        = OnIceCandidate;
        pc.OnIceConnectionChange = OnIceConnectionChange;

        videoStreamTrack = cam.CaptureStreamTrack(width, height, 1000000);
        pc.AddTrack(videoStreamTrack);

        if (!videoUpdateStarted)
        {
            StartCoroutine(WebRTC.Update());
            videoUpdateStarted = true;
        }
    }
Esempio n. 26
0
        public void GetTransceiversReturnsNotEmptyAfterCallingRemoveTrack()
        {
            // Also, `RTCPeerConnection.AddTrack` and `RTCPeerConnection.RemoveTrack` method is not intuitive.
            var peer   = new RTCPeerConnection();
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var track  = new VideoStreamTrack(rt);
            var sender = peer.AddTrack(track);

            Assert.That(peer.GetTransceivers(), Has.Count.EqualTo(1));
            Assert.That(peer.RemoveTrack(sender), Is.EqualTo(RTCErrorType.None));
            Assert.That(peer.GetTransceivers(), Has.Count.EqualTo(1));
            peer.Dispose();
        }
Esempio n. 27
0
 public void ChangeVideoParameters(VideoStreamTrack track, ulong?bitrate, uint?framerate)
 {
     foreach (var sender in m_mapTrackAndSenderList[track])
     {
         RTCRtpSendParameters parameters = sender.GetParameters();
         foreach (var encoding in parameters.Encodings)
         {
             if (bitrate != null)
             {
                 encoding.maxBitrate = bitrate;
             }
             if (framerate != null)
             {
                 encoding.maxFramerate = framerate;
             }
         }
         sender.SetParameters(parameters);
     }
 }
        private void HangUp()
        {
            if (webCamTexture != null)
            {
                webCamTexture.Stop();
                webCamTexture = null;
            }

            receiveAudioStream?.Dispose();
            receiveAudioStream = null;
            receiveVideoStream?.Dispose();
            receiveVideoStream = null;

            videoStreamTrack?.Dispose();
            videoStreamTrack = null;
            audioStreamTrack?.Dispose();
            audioStreamTrack = null;

            Debug.Log("Close local/remote peer connection");
            _pc1?.Dispose();
            _pc2?.Dispose();
            _pc1 = null;
            _pc2 = null;
            sourceImage.texture = null;
            sourceAudio.Stop();
            sourceAudio.clip     = null;
            receiveImage.texture = null;
            receiveAudio.Stop();
            receiveAudio.clip               = null;
            useWebCamToggle.interactable    = true;
            webCamListDropdown.interactable = useWebCamToggle.isOn;
            useMicToggle.interactable       = true;
            micListDropdown.interactable    = useMicToggle.isOn;
            callButton.interactable         = true;
            hangUpButton.interactable       = false;
            addTracksButton.interactable    = false;
            removeTracksButton.interactable = false;
        }
Esempio n. 29
0
        public IEnumerator VideoStreamTrackInstantiateMultiple()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt1    = new UnityEngine.RenderTexture(width, height, 0, format);

            rt1.Create();
            var track1 = new VideoStreamTrack("video1", rt1);

            var rt2 = new UnityEngine.RenderTexture(width, height, 0, format);

            rt2.Create();
            var track2 = new VideoStreamTrack("video2", rt2);

            // wait for initialization encoder on render thread.
            yield return(new WaitForSeconds(0.1f));

            track1.Dispose();
            track2.Dispose();
            Object.DestroyImmediate(rt1);
            Object.DestroyImmediate(rt2);
        }
Esempio n. 30
0
        public IEnumerator AddTrack(TestMode mode)
        {
            MockSignaling.Reset(mode == TestMode.PrivateMode);

            var dependencies = CreateDependencies();
            var target       = new RenderStreamingInternal(ref dependencies);

            bool isStarted = false;

            target.onStart += () => { isStarted = true; };
            yield return(new WaitUntil(() => isStarted));

            var  connectionId        = "12345";
            bool isCreatedConnection = false;

            target.onCreatedConnection += _ => { isCreatedConnection = true; };
            target.CreateConnection(connectionId);
            yield return(new WaitUntil(() => isCreatedConnection));

            var camObj             = new GameObject("Camera");
            var camera             = camObj.AddComponent <Camera>();
            VideoStreamTrack track = camera.CaptureStreamTrack(1280, 720, 0);

            target.AddTrack(connectionId, track);
            target.RemoveTrack(connectionId, track);

            bool isDeletedConnection = false;

            target.onDeletedConnection += _ => { isDeletedConnection = true; };
            target.DeleteConnection(connectionId);
            yield return(new WaitUntil(() => isDeletedConnection));

            target.Dispose();
            track.Dispose();
            UnityEngine.Object.Destroy(camObj);
        }