示例#1
0
        /// <summary>
        /// Accesses the local video track, specified by
        /// this.selectedDevice and this.selectedProfile.
        /// MUST NOT BE CALLED FROM THE UI THREAD.
        /// </summary>
        /// <param name="factory"></param>
        /// <returns></returns>
        private IMediaStreamTrack getLocalVideo(IWebRtcFactory factory)
        {
            IReadOnlyList <IConstraint> mandatoryConstraints = new List <IConstraint>()
            {
                new Constraint("maxWidth", this.selectedProfile.Width.ToString()),
                new Constraint("minWidth", this.selectedProfile.Width.ToString()),
                new Constraint("maxHeight", this.selectedProfile.Height.ToString()),
                new Constraint("minHeight", this.selectedProfile.Height.ToString()),
                new Constraint("maxFrameRate", this.selectedProfile.FrameRate.ToString()),
                new Constraint("minFrameRate", this.selectedProfile.FrameRate.ToString())
            };
            IReadOnlyList <IConstraint> optionalConstraints = new List <IConstraint>();
            var mediaConstraints = new MediaConstraints(mandatoryConstraints, optionalConstraints);

            // this will throw a very unhelpful exception if called from the UI thread
            var videoCapturer = VideoCapturer.Create(this.selectedDevice.Name, this.selectedDevice.Id, false);

            var options = new VideoOptions()
            {
                Factory     = factory,
                Capturer    = videoCapturer,
                Constraints = mediaConstraints
            };
            var videoTrackSource = VideoTrackSource.Create(options);

            return(MediaStreamTrack.CreateVideoTrack("LocalVideo", videoTrackSource));
        }
示例#2
0
        public void CaptureStartsOnActivate()
        {
            // Create the peer connections
            var pc_go = new GameObject("pc1");

            pc_go.SetActive(false); // prevent auto-activation of components
            var pc = pc_go.AddComponent <PeerConnection>();

            pc.AutoInitializeOnStart = false;

            // Batch changes manually
            pc.AutoCreateOfferOnRenegotiationNeeded = false;

            // Create the video track source
            VideoTrackSource source = pc_go.AddComponent <MockVideoSource>();

            // Create the media line
            MediaLine ml = pc.AddMediaLine(MediaKind.Video);

            ml.SenderTrackName = "track_name";

            // Assign the video source to the media line
            ml.Source = source;
            Assert.IsTrue(source.MediaLines.Contains(ml));

            // Add event handlers to check IsStreaming state
            source.VideoStreamStarted.AddListener((IVideoSource self) =>
            {
                // Becomes true *before* this handler by design
                Assert.IsTrue(self.IsStreaming);
            });
            source.VideoStreamStopped.AddListener((IVideoSource self) =>
            {
                // Still true until *after* this handler by design
                Assert.IsTrue(self.IsStreaming);
            });

            // Confirm the source is not capturing yet because the component is inactive
            Assert.IsFalse(source.IsStreaming);

            // Confirm the sender has no track because the component is inactive
            Assert.IsNull(ml.SenderTrack);

            // Activate the game object and the video track source component on it
            pc_go.SetActive(true);

            // Confirm the sender is capturing because the component is now active
            Assert.IsTrue(source.IsStreaming);

            // Confirm the sender still has no track because there's no connection
            Assert.IsNull(ml.SenderTrack);

            // Deactivate the game object and the video track source component on it
            pc_go.SetActive(false);

            // Confirm the source stops streaming
            Assert.IsFalse(source.IsStreaming);
        }
示例#3
0
 public async Task CreateFromDevice()
 {
     using (VideoTrackSource source = await DeviceVideoTrackSource.CreateAsync())
     {
         Assert.IsNotNull(source);
         Assert.AreEqual(string.Empty, source.Name);
         Assert.AreEqual(0, source.Tracks.Count);
     }
 }
        public void EqualIdWithVideoTrack()
        {
            var guid   = Guid.NewGuid().ToString();
            var source = new VideoTrackSource();
            var track  = new VideoStreamTrack(WebRTC.Context.CreateVideoTrack(guid, source.self));

            Assert.That(track, Is.Not.Null);
            Assert.That(track.Id, Is.EqualTo(guid));
            track.Dispose();
            source.Dispose();
        }
示例#5
0
        public async Task Name()
        {
            using (VideoTrackSource source = await DeviceVideoTrackSource.CreateAsync())
            {
                Assert.IsNotNull(source);

                const string kTestName = "test_video_track_source_name";
                source.Name = kTestName;
                Assert.AreEqual(kTestName, source.Name);
            }
        }
示例#6
0
        public static PeerConnection AddVideoTransceiver(PeerConnection connection, VideoTrackSource source)
        {
            Console.WriteLine("Added video transceiver to peer connection in thread {0}", Thread.CurrentThread.ManagedThreadId);
            LocalVideoTrack localVideoTrack = LocalVideoTrack.CreateFromSource(source, new LocalVideoTrackInitConfig {
                trackName = "webcam_track"
            });

            Console.WriteLine("Create video transceiver and add webcam track...");
            TransceiverInitSettings option = new TransceiverInitSettings();

            option.Name      = "webcam_track";
            option.StreamIDs = new List <string> {
                "webcam_name"
            };

            Transceiver videoTransceiver = connection.AddTransceiver(MediaKind.Video, option);

            videoTransceiver.DesiredDirection = Transceiver.Direction.SendOnly;
            videoTransceiver.LocalVideoTrack  = localVideoTrack;
            return(connection);
        }
示例#7
0
        public void TestCapturingVideoDevice()
        {
            try
            {
                VideoSource = WebRTC_Remote_FPGA_stand.Camera.CreateAsync().GetAwaiter().GetResult();

                // Adding captured video frames
                VideoSource.I420AVideoFrameReady += (frame) => { Captured_Frames++; };
                Thread.Sleep(450);
                VideoSource.Dispose();
            }
            catch (Exception ex)
            {
                VideoSource?.Dispose();
                throw new AssertionException("Some low-level problems with video device");
            }
            finally {
                VideoSource?.Dispose();
            }
            Assert.AreEqual(true, Captured_Frames > 0, "Frames of video source capturing");
            Assert.Pass();
        }
示例#8
0
        private void GetUserMedia()
        {
            Debug.WriteLine("Getting user media.");

            MediaDevice _selectedVideoDevice = (MediaDevice)Devices.Instance.VideoMediaDevicesList[0];

            for (int i = 0; i < Devices.Instance.VideoMediaDevicesList.Count; i++)
            {
                if (Devices.Instance.VideoMediaDevicesList[i].DisplayName == (string)_localSettings.Values["SelectedCameraName"])
                {
                    _selectedVideoDevice = (MediaDevice)Devices.Instance.VideoMediaDevicesList[i];
                }
            }

            List <int> widths     = new List <int>();
            List <int> heights    = new List <int>();
            List <int> frameRates = new List <int>();

            foreach (var videoFormat in _selectedVideoDevice.VideoFormats)
            {
                widths.Add(videoFormat.Dimension.Width);
                heights.Add(videoFormat.Dimension.Height);

                foreach (var frameRate in videoFormat.FrameRates)
                {
                    frameRates.Add(frameRate);
                }
            }

            // Maximum and minimum values for the selected camera
            IReadOnlyList <IConstraint> mandatoryConstraints = new List <IConstraint>()
            {
                new Constraint("maxWidth", widths.Max().ToString()),
                new Constraint("minWidth", widths.Min().ToString()),
                new Constraint("maxHeight", heights.Max().ToString()),
                new Constraint("minHeight", heights.Min().ToString()),
                new Constraint("maxFrameRate", frameRates.Max().ToString()),
                new Constraint("minFrameRate", frameRates.Min().ToString())
            };

            // Add optional constrains
            IReadOnlyList <IConstraint> optionalConstraints = new List <IConstraint>();

            IMediaConstraints mediaConstraints = new MediaConstraints(mandatoryConstraints, optionalConstraints);

            var videoCapturer = VideoCapturer.Create(_selectedVideoDevice.DisplayName, _selectedVideoDevice.Id, false);

            var videoOptions = new VideoOptions();

            videoOptions.Factory     = _factory;
            videoOptions.Capturer    = videoCapturer;
            videoOptions.Constraints = mediaConstraints;

            var videoTrackSource = VideoTrackSource.Create(videoOptions);

            _selfVideoTrack = MediaStreamTrack.CreateVideoTrack("SELF_VIDEO", videoTrackSource);

            var audioOptions = new AudioOptions();

            audioOptions.Factory = _factory;

            var audioTrackSource = AudioTrackSource.Create(audioOptions);

            _selfAudioTrack = MediaStreamTrack.CreateAudioTrack("SELF_AUDIO", audioTrackSource);
        }
示例#9
0
        static async Task Main(string[] args)
        {
            Transceiver      audioTransceiver = null;
            Transceiver      videoTransceiver = null;
            AudioTrackSource audioTrackSource = null;
            VideoTrackSource videoTrackSource = null;
            LocalAudioTrack  localAudioTrack  = null;
            LocalVideoTrack  localVideoTrack  = null;

            try
            {
                bool needVideo = Array.Exists(args, arg => (arg == "-v") || (arg == "--video"));
                bool needAudio = Array.Exists(args, arg => (arg == "-a") || (arg == "--audio"));

                // Asynchronously retrieve a list of available video capture devices (webcams).
                var deviceList = await PeerConnection.GetVideoCaptureDevicesAsync();

                // For example, print them to the standard output
                foreach (var device in deviceList)
                {
                    Console.WriteLine($"Found webcam {device.name} (id: {device.id})");
                }

                // Create a new peer connection automatically disposed at the end of the program
                using var pc = new PeerConnection();

                // Initialize the connection with a STUN server to allow remote access
                var config = new PeerConnectionConfiguration
                {
                    IceServers = new List <IceServer> {
                        new IceServer {
                            Urls = { "stun:stun.l.google.com:19302" }
                        }
                    }
                };
                await pc.InitializeAsync(config);

                Console.WriteLine("Peer connection initialized.");

                // Record video from local webcam, and send to remote peer
                if (needVideo)
                {
                    Console.WriteLine("Opening local webcam...");
                    videoTrackSource = await DeviceVideoTrackSource.CreateAsync();

                    Console.WriteLine("Create local video track...");
                    var trackSettings = new LocalVideoTrackInitConfig {
                        trackName = "webcam_track"
                    };
                    localVideoTrack = LocalVideoTrack.CreateFromSource(videoTrackSource, trackSettings);

                    Console.WriteLine("Create video transceiver and add webcam track...");
                    videoTransceiver = pc.AddTransceiver(MediaKind.Video);
                    videoTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
                    videoTransceiver.LocalVideoTrack  = localVideoTrack;
                }

                // Record audio from local microphone, and send to remote peer
                if (needAudio)
                {
                    Console.WriteLine("Opening local microphone...");
                    audioTrackSource = await DeviceAudioTrackSource.CreateAsync();

                    Console.WriteLine("Create local audio track...");
                    var trackSettings = new LocalAudioTrackInitConfig {
                        trackName = "mic_track"
                    };
                    localAudioTrack = LocalAudioTrack.CreateFromSource(audioTrackSource, trackSettings);

                    Console.WriteLine("Create audio transceiver and add mic track...");
                    audioTransceiver = pc.AddTransceiver(MediaKind.Audio);
                    audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
                    audioTransceiver.LocalAudioTrack  = localAudioTrack;
                }

                // Setup signaling
                Console.WriteLine("Starting signaling...");
                var signaler = new NamedPipeSignaler.NamedPipeSignaler(pc, "testpipe");
                signaler.SdpMessageReceived += async(SdpMessage message) =>
                {
                    await pc.SetRemoteDescriptionAsync(message);

                    if (message.Type == SdpMessageType.Offer)
                    {
                        pc.CreateAnswer();
                    }
                };
                signaler.IceCandidateReceived += (IceCandidate candidate) =>
                {
                    pc.AddIceCandidate(candidate);
                };
                await signaler.StartAsync();

                // Start peer connection
                pc.Connected       += () => { Console.WriteLine("PeerConnection: connected."); };
                pc.IceStateChanged += (IceConnectionState newState) => { Console.WriteLine($"ICE state: {newState}"); };
                int numFrames = 0;
                pc.VideoTrackAdded += (RemoteVideoTrack track) =>
                {
                    track.I420AVideoFrameReady += (I420AVideoFrame frame) =>
                    {
                        ++numFrames;
                        if (numFrames % 60 == 0)
                        {
                            Console.WriteLine($"Received video frames: {numFrames}");
                        }
                    };
                };
                if (signaler.IsClient)
                {
                    Console.WriteLine("Connecting to remote peer...");
                    pc.CreateOffer();
                }
                else
                {
                    Console.WriteLine("Waiting for offer from remote peer...");
                }

                Console.WriteLine("Press a key to stop recording...");
                Console.ReadKey(true);

                signaler.Stop();
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }

            localAudioTrack?.Dispose();
            localVideoTrack?.Dispose();

            Console.WriteLine("Program termined.");

            localAudioTrack.Dispose();
            localVideoTrack.Dispose();
            audioTrackSource.Dispose();
            videoTrackSource.Dispose();
        }
示例#10
0
        static private async Task StartStend()
        {
            var             autoEvent        = new AutoResetEvent(false);
            bool            video_translator = true;
            bool            file_created     = false;
            FileStream      file             = null;
            Quartus         quartus          = Quartus.GetInstance();
            Microcontroller arduino          = Microcontroller.Create();

            if (video_translator)
            {
                // Asynchronously retrieve a list of available video capture devices (webcams).
                var deviceList = await DeviceVideoTrackSource.GetCaptureDevicesAsync();


                // For example, print them to the standard output
                foreach (var device in deviceList)
                {
                    Console.WriteLine($"Found webcam {device.name} (id: {device.id})");
                }
            }

            // Create a new peer connection automatically disposed at the end of the program
            var pc = new PeerConnection();
            // Initialize the connection with a STUN server to allow remote access
            var config = SystemConfiguration.PeerConnectionSettings;


            await pc.InitializeAsync(config);

            Console.WriteLine("Peer connection initialized.");
            //var chen = await pc.AddDataChannelAsync("sendDataChannel", true, true, cancellationToken: default);
            Console.WriteLine("Opening local webcam...");


            // pc - PeerConnection object
            Transceiver                videoTransceiver = null;
            VideoTrackSource           videoTrackSource = null;
            LocalVideoTrack            localVideoTrack  = null;
            LocalVideoDeviceInitConfig c = new LocalVideoDeviceInitConfig();

            await VideoDeviceSelection();

            videoTrackSource = await Camera.CreateAsync(SystemConfiguration.VideoDeviceSettings);


            WebSocketSharp.WebSocket signaling = new WebSocketSharp.WebSocket(CreateSignalingServerUrl(), "id_token", "alpine");
            pc.LocalSdpReadytoSend += (SdpMessage message) =>
            {
                //Console.WriteLine(SdpMessage.TypeToString(message.Type));
                Console.WriteLine(message.Content);
                //Console.WriteLine(HttpUtility.JavaScriptStringEncode(message.Content));
                Console.WriteLine("Sdp offer to send: {\"data\":{\"description\":{\"type\":\"" + SdpMessage.TypeToString(message.Type) + "\",\"sdp\":\"" + HttpUtility.JavaScriptStringEncode(message.Content) + "\"}}}");
                signaling.Send(message.ToABJson());
            };

            pc.RenegotiationNeeded += () =>
            {
                Console.WriteLine("Regotiation needed");
                bool OfferCreated = pc.CreateOffer();
                Console.WriteLine("OfferCreated? {0}", OfferCreated);
            };
            pc.DataChannelAdded += (DataChannel channel) =>
            {
                Console.WriteLine("Added data channel ID: {0}, Label: {1}; Reliable: {2}, Ordered: {3}", channel.ID, channel.Label, channel.Reliable, channel.Ordered);

                if (channel.Label == "sendDataChannel")
                {
                    channel.MessageReceived += (byte[] mess) => {
                        try
                        {
                            CTP_packet command = JsonSerializer.Deserialize <CTP_packet>(mess);
                            Console.WriteLine(arduino.SendCTP_Command(command));
                        }
                        catch (Exception e)
                        {
                            Console.WriteLine(e.Message);
                        }
                    };
                }
                else
                {
                    if (file_created == false)
                    {
                        file         = new FileStream(channel.Label, FileMode.Append);
                        file_created = true;
                    }
                    channel.MessageReceived += async(byte[] mess) =>
                    {
                        // Console.WriteLine(System.Text.Encoding.Default.GetString(mess));
                        if (mess.Length == 3 && System.Text.Encoding.Default.GetString(mess) == "EOF")
                        {
                            string file_name = file.Name;
                            file.Close();
                            string t = await quartus.RunQuartusCommandAsync($"quartus_pgm -m jtag –o \"p;{file_name}@1\"");

                            File.Delete(file_name);
                            file_created = false;
                        }
                        else
                        {
                            WriteFileSegment(mess, file);
                        }
                    };
                }

                channel.StateChanged += () =>
                {
                    Console.WriteLine("State change: {0}", channel.State);
                };
            };

            pc.IceCandidateReadytoSend += (IceCandidate candidate) =>
            {
                //Console.WriteLine("Content: {0}, SdpMid: {1}, SdpMlineIndex: {2}", candidate.Content, candidate.SdpMid, candidate.SdpMlineIndex);
                try
                {
                    Console.WriteLine("Candidate to send: Content: {0}, SdpMid: {1}, SdpMlineIndex: {2}", candidate.Content, candidate.SdpMid, candidate.SdpMlineIndex);
                    signaling.Send(candidate.ToABJson());
                }
                catch (Exception e)
                {
                    Console.WriteLine("Error to send local ice candidate");
                }
            };
            //videoTrackSource.I420AVideoFrameReady += (frame) =>
            //{
            //    Console.WriteLine("Argb32 frame ready. {0} : {1}", frame.width, frame.height);
            //    Console.WriteLine("DataA: {0}, DataU: {1}, DataV: {2}, DataY: {3}", Marshal.SizeOf(frame.dataA),
            //                        Marshal.SizeOf(frame.dataU),
            //                        Marshal.SizeOf(frame.dataV),
            //                        Marshal.SizeOf(frame.dataY));
            //};

            signaling.OnMessage += async(sender, message) =>
            {
                (string header, string correct_message) = message.Data.DivideHeaderAndOriginalJSON();
                Console.WriteLine("Correct message: {0}", correct_message);
                Console.WriteLine("Header: {0}", header);
                if (header == "{\"data\":{\"getRemoteMedia\":" && correct_message == "true")
                {
                    Console.WriteLine("Create local video track...");
                    var trackSettings = new LocalVideoTrackInitConfig {
                        trackName = "webcam_track"
                    };
                    localVideoTrack = LocalVideoTrack.CreateFromSource(videoTrackSource, new LocalVideoTrackInitConfig {
                        trackName = "webcam_track"
                    });
                    Console.WriteLine("Create video transceiver and add webcam track...");
                    TransceiverInitSettings option = new TransceiverInitSettings();
                    option.Name      = "webcam_track";
                    option.StreamIDs = new List <string> {
                        "webcam_name"
                    };
                    videoTransceiver = pc.AddTransceiver(MediaKind.Video, option);
                    videoTransceiver.DesiredDirection = Transceiver.Direction.SendOnly;
                    videoTransceiver.LocalVideoTrack  = localVideoTrack;

                    bool OfferCreated = pc.CreateOffer();
                    Console.WriteLine("OfferCreated? {0}", OfferCreated);
                }
                //Console.WriteLine(message.Data);
                if (header.IndexOf("candidate") != -1 && correct_message != "null")
                {
                    try
                    {
                        var candidate = JsonSerializer.Deserialize <ICEJavaScriptNotation>(correct_message);
                        Console.WriteLine("Content of ice: {0}, SdpMid: {1}, SdpMLineIndex: {2}", candidate.candidate, candidate.sdpMid, candidate.sdpMLineIndex);
                        pc.AddIceCandidate(candidate.ToMRNetCoreNotation());
                        Console.WriteLine("Deserialized by ice_candidate");
                        //return;
                    }
                    catch (Exception)
                    {
                        Console.WriteLine("Could not deserialize as ice candidate");
                    }
                }

                if (header.IndexOf("description") != -1)
                {
                    try
                    {
                        SdpMessage received_description = JsonSerializer.Deserialize <SDPJavaScriptNotation>(correct_message).ToMRNetCoreNotation();
                        await pc.SetRemoteDescriptionAsync(received_description);

                        if (received_description.Type == SdpMessageType.Offer)
                        {
                            bool res = pc.CreateAnswer();
                            Console.WriteLine("Answer created? {0}", res);
                        }
                        Console.WriteLine("Deserialized by sdp_message");
                    }
                    catch (Exception)
                    {
                        Console.WriteLine("Could not deserialize as sdp message");
                    }
                }
            };


            pc.Connected += () =>
            {
                Console.WriteLine("Connected");
            };
            pc.IceStateChanged += (IceConnectionState newState) =>
            {
                if (newState == IceConnectionState.Disconnected)
                {
                    Console.WriteLine("Disconected");
                }
            };


            signaling.Connect();
            if (!video_translator)
            {
                signaling.Send("{\"data\":{\"getRemoteMedia\":true}}");
            }

            //Console.WriteLine("Press a key to terminate the application...");
            Console.ReadKey(true);
            Console.WriteLine("Program termined.");
            file?.Close();
            pc?.Close();
            signaling?.Close();
            //arduino?.Close();
            //(var a, var b) = ConvertString("{\"data\":{\"candidate\":null}}");
            //Console.WriteLine("{0}, {1}", a, b);
        }
示例#11
0
        private IEnumerator SingleTwoWaysImpl(bool withSender1, bool withReceiver1, bool withSender2, bool withReceiver2)
        {
            // Create the peer connections
            var pc1_go = new GameObject("pc1");

            pc1_go.SetActive(false); // prevent auto-activation of components
            var pc1 = pc1_go.AddComponent <PeerConnection>();

            pc1.AutoInitializeOnStart = false;
            var pc2_go = new GameObject("pc2");

            pc2_go.SetActive(false); // prevent auto-activation of components
            var pc2 = pc2_go.AddComponent <PeerConnection>();

            pc2.AutoInitializeOnStart = false;

            // Batch changes manually
            pc1.AutoCreateOfferOnRenegotiationNeeded = false;
            pc2.AutoCreateOfferOnRenegotiationNeeded = false;

            // Create the signaler
            var sig_go = new GameObject("signaler");
            var sig    = sig_go.AddComponent <LocalOnlySignaler>();

            sig.Peer1 = pc1;
            sig.Peer2 = pc2;

            // Create the video sources on peer #1
            VideoTrackSource source1   = null;
            VideoReceiver    receiver1 = null;

            if (withSender1)
            {
                source1 = pc1_go.AddComponent <UniformColorVideoSource>();
            }
            if (withReceiver1)
            {
                receiver1 = pc1_go.AddComponent <VideoReceiver>();
            }
            MediaLine ml1 = pc1.AddMediaLine(MediaKind.Video);

            ml1.SenderTrackName = "video_track_1";
            ml1.Source          = source1;
            ml1.Receiver        = receiver1;

            // Create the video sources on peer #2
            VideoTrackSource source2   = null;
            VideoReceiver    receiver2 = null;

            if (withSender2)
            {
                source2 = pc2_go.AddComponent <UniformColorVideoSource>();
            }
            if (withReceiver2)
            {
                receiver2 = pc1_go.AddComponent <VideoReceiver>();
            }
            MediaLine ml2 = pc2.AddMediaLine(MediaKind.Video);

            ml2.SenderTrackName = "video_track_2";
            ml2.Source          = source2;
            ml2.Receiver        = receiver2;

            // Activate
            pc1_go.SetActive(true);
            pc2_go.SetActive(true);

            // Initialize
            var initializedEvent1 = new ManualResetEventSlim(initialState: false);

            pc1.OnInitialized.AddListener(() => initializedEvent1.Set());
            Assert.IsNull(pc1.Peer);
            pc1.InitializeAsync().Wait(millisecondsTimeout: 50000);
            var initializedEvent2 = new ManualResetEventSlim(initialState: false);

            pc2.OnInitialized.AddListener(() => initializedEvent2.Set());
            Assert.IsNull(pc2.Peer);
            pc2.InitializeAsync().Wait(millisecondsTimeout: 50000);

            // Wait a frame so that the Unity event OnInitialized can propagate
            yield return(null);

            // Check the event was raised
            Assert.IsTrue(initializedEvent1.Wait(millisecondsTimeout: 50000));
            Assert.IsNotNull(pc1.Peer);
            Assert.IsTrue(initializedEvent2.Wait(millisecondsTimeout: 50000));
            Assert.IsNotNull(pc2.Peer);

            // Confirm the sources are ready
            if (withSender1)
            {
                Assert.IsTrue(source1.IsStreaming);
            }
            if (withSender2)
            {
                Assert.IsTrue(source2.IsStreaming);
            }

            // Confirm the sender track is not created yet; it will be when the connection starts
            Assert.IsNull(ml1.SenderTrack);
            Assert.IsNull(ml2.SenderTrack);

            // Confirm the receiver track is not added yet, since remote tracks are only instantiated
            // as the result of a session negotiation.
            if (withReceiver1)
            {
                Assert.IsNull(receiver1.Track);
            }
            if (withReceiver2)
            {
                Assert.IsNull(receiver2.Track);
            }

            // Connect
            Assert.IsTrue(sig.StartConnection());
            yield return(sig.WaitForConnection(millisecondsTimeout: 10000));

            Assert.IsTrue(sig.IsConnected);

            // Wait a frame so that the Unity events for streams started can propagate
            yield return(null);

            // Check pairing
            {
                bool hasSend1 = false;
                bool hasSend2 = false;
                bool hasRecv1 = false;
                bool hasRecv2 = false;

                // Local tracks exist if manually added (independently of negotiation)
                Assert.AreEqual(withSender1 ? 1 : 0, pc1.Peer.LocalVideoTracks.Count());
                Assert.AreEqual(withSender2 ? 1 : 0, pc2.Peer.LocalVideoTracks.Count());

                // Remote tracks exist if paired with a sender on the remote peer
                if (withReceiver1 && withSender2) // R <= S
                {
                    Assert.IsNotNull(receiver1.Track);
                    Assert.IsNotNull(ml2.SenderTrack);
                    hasRecv1 = true;
                    hasSend2 = true;
                }
                if (withSender1 && withReceiver2) // S => R
                {
                    Assert.IsNotNull(ml1.SenderTrack);
                    Assert.IsNotNull(receiver2.Track);
                    hasSend1 = true;
                    hasRecv2 = true;
                }
                Assert.AreEqual(hasRecv1 ? 1 : 0, pc1.Peer.RemoteVideoTracks.Count());
                Assert.AreEqual(hasRecv2 ? 1 : 0, pc2.Peer.RemoteVideoTracks.Count());

                // Transceivers are consistent with pairing
                Assert.IsTrue(ml1.Transceiver.NegotiatedDirection.HasValue);
                Assert.AreEqual(hasSend1, Transceiver.HasSend(ml1.Transceiver.NegotiatedDirection.Value));
                Assert.AreEqual(hasRecv1, Transceiver.HasRecv(ml1.Transceiver.NegotiatedDirection.Value));
                Assert.IsTrue(ml2.Transceiver.NegotiatedDirection.HasValue);
                Assert.AreEqual(hasSend2, Transceiver.HasSend(ml2.Transceiver.NegotiatedDirection.Value));
                Assert.AreEqual(hasRecv2, Transceiver.HasRecv(ml2.Transceiver.NegotiatedDirection.Value));
            }
        }
示例#12
0
        public IEnumerator CaptureStartsOnActivate()
        {
            // Create the peer connections
            var pc_go = new GameObject("pc1");

            pc_go.SetActive(false); // prevent auto-activation of components
            var pc = pc_go.AddComponent <PeerConnection>();

            // Batch changes manually
            pc.AutoCreateOfferOnRenegotiationNeeded = false;

            // Create the video track source
            VideoTrackSource source = pc_go.AddComponent <MockVideoSource>();

            // Create the media line
            MediaLine ml = pc.AddMediaLine(MediaKind.Video);

            ml.SenderTrackName = "track_name";

            // Assign the video source to the media line
            ml.Source = source;

            // MediaLine has not been connected yet.
            Assert.IsEmpty(source.MediaLines);

            // Add event handlers to check IsStreaming state
            source.VideoStreamStarted.AddListener((IVideoSource self) =>
            {
                // Becomes true *before* this handler by design
                Assert.IsTrue(source.IsLive);
            });
            source.VideoStreamStopped.AddListener((IVideoSource self) =>
            {
                // Still true until *after* this handler by design
                Assert.IsTrue(source.IsLive);
            });

            // Confirm the source is not capturing yet because the component is inactive
            Assert.IsFalse(source.IsLive);

            // Confirm the sender has no track because the component is inactive
            Assert.IsNull(ml.LocalTrack);

            // Activate the game object and the video track source component on it
            pc_go.SetActive(true);

            // MediaLine is connected.
            Assert.AreEqual(source.MediaLines.Single(), ml);

            // Confirm the sender is capturing because the component is now active
            Assert.IsTrue(source.IsLive);

            // Confirm the sender still has no track because there's no connection
            Assert.IsNull(ml.LocalTrack);

            // Deactivate the game object and the video track source component on it
            pc_go.SetActive(false);

            // Confirm the source stops streaming
            Assert.IsFalse(source.IsLive);

            Object.Destroy(pc_go);

            // Terminate the coroutine.
            yield return(null);
        }
示例#13
0
    async void OnClientConnected()
    {
        var pc = signaler.PeerConnection;

        // Record video from local webcam, and send to remote peer
        if (NeedVideo)
        {
            // For example, print them to the standard output

            var deviceSettings = new LocalVideoDeviceInitConfig
            {
                width  = VideoWidth,
                height = VideoHeight,
            };
            if (VideoFps > 0)
            {
                deviceSettings.framerate = VideoFps;
            }
            if (VideoProfileId.Length > 0)
            {
                deviceSettings.videoProfileId = VideoProfileId;
            }

            Debug.Log($"Attempt to grab Camera - {deviceSettings.videoProfileId}: {deviceSettings.width}x{deviceSettings.height}@{deviceSettings.framerate}fps");
            videoTrackSource = await DeviceVideoTrackSource.CreateAsync(deviceSettings);

            Debug.Log($"Create local video track... {videoTrackSource}");
            var trackSettings = new LocalVideoTrackInitConfig
            {
                trackName = "webcam_track"
            };
            localVideoTrack = LocalVideoTrack.CreateFromSource(videoTrackSource, trackSettings);

            Debug.Log("Create video transceiver and add webcam track...");
            videoTransceiver = pc.AddTransceiver(MediaKind.Video);
            videoTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
            videoTransceiver.LocalVideoTrack  = localVideoTrack;
        }

        // Record audio from local microphone, and send to remote peer
        if (NeedAudio)
        {
            Debug.Log("Opening local microphone...");
            audioTrackSource = await DeviceAudioTrackSource.CreateAsync();

            Debug.Log("Create local audio track...");
            var trackSettings = new LocalAudioTrackInitConfig {
                trackName = "mic_track"
            };
            localAudioTrack = LocalAudioTrack.CreateFromSource(audioTrackSource, trackSettings);

            Debug.Log("Create audio transceiver and add mic track...");
            audioTransceiver = pc.AddTransceiver(MediaKind.Audio);
            audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
            audioTransceiver.LocalAudioTrack  = localAudioTrack;
        }

        // Start peer connection
        int numFrames = 0;

        pc.VideoTrackAdded += (RemoteVideoTrack track) =>
        {
            Debug.Log($"Attach Frame Listener...");
            track.I420AVideoFrameReady += (I420AVideoFrame frame) =>
            {
                ++numFrames;
                if (numFrames % 60 == 0)
                {
                    Debug.Log($"Received video frames: {numFrames}");
                }
            };
        };
        // we need a short delay here for the video stream to settle...
        // I assume my Logitech webcam is sending some garbage frames in the beginning.
        await Task.Delay(200);

        pc.CreateOffer();
        Debug.Log("Send offer to remote peer");
    }
        public IEnumerator EnableAndDisableWithTracks()
        {
            var pc1_go = new GameObject("pc1");

            pc1_go.SetActive(false); // prevent auto-activation of components
            var pc1    = pc1_go.AddComponent <PeerConnection>();
            var pc2_go = new GameObject("pc2");

            pc2_go.SetActive(false); // prevent auto-activation of components
            var pc2 = pc2_go.AddComponent <PeerConnection>();

            // Create the signaler
            var sig_go = new GameObject("signaler");
            var sig    = sig_go.AddComponent <LocalOnlySignaler>();

            sig.Peer1 = pc1;
            sig.Peer2 = pc2;

            // Create the video source on peer #1
            VideoTrackSource source1   = pc1_go.AddComponent <UniformColorVideoSource>();
            VideoReceiver    receiver1 = pc1_go.AddComponent <VideoReceiver>();
            MediaLine        ml1       = pc1.AddMediaLine(MediaKind.Video);

            ml1.SenderTrackName = "video_track_1";
            ml1.Source          = source1;
            ml1.Receiver        = receiver1;

            // Create the video source on peer #2
            VideoTrackSource source2   = pc2_go.AddComponent <UniformColorVideoSource>();
            VideoReceiver    receiver2 = pc2_go.AddComponent <VideoReceiver>();
            MediaLine        ml2       = pc2.AddMediaLine(MediaKind.Video);

            ml2.SenderTrackName = "video_track_2";
            ml2.Source          = source2;
            ml2.Receiver        = receiver2;

            // Init/quit twice.
            for (int i = 0; i < 2; ++i)
            {
                // Initialize
                yield return(InitializeAndWait(pc1));

                yield return(InitializeAndWait(pc2));

                // Confirm the sources are ready.
                Assert.IsTrue(source1.IsLive);
                Assert.IsTrue(source2.IsLive);

                // Sender tracks will be created on connection.
                Assert.IsNull(ml1.LocalTrack);
                Assert.IsNull(ml2.LocalTrack);

                // Connect
                Assert.IsTrue(sig.StartConnection());
                yield return(sig.WaitForConnection(millisecondsTimeout: 10000));

                Assert.IsTrue(sig.IsConnected);

                // Wait a frame so that the Unity events for streams started can propagate
                yield return(null);

                // Check pairing
                Assert.IsNotNull(receiver1.Transceiver);
                Assert.IsTrue(receiver1.IsLive);
                Assert.AreEqual(1, pc1.Peer.RemoteVideoTracks.Count());
                Assert.IsNotNull(receiver2.Transceiver);
                Assert.IsTrue(receiver2.IsLive);
                Assert.AreEqual(1, pc2.Peer.RemoteVideoTracks.Count());

                // Shutdown peer #1
                pc1.enabled = false;
                Assert.IsNull(pc1.Peer);

                // We cannot reliably detect remote shutdown, so only check local peer.
                VerifyLocalShutdown(ml1);

                // Shutdown peer #2
                pc2.enabled = false;
                Assert.IsNull(pc2.Peer);

                VerifyLocalShutdown(ml2);
            }
            UnityEngine.Object.Destroy(pc1_go);
            UnityEngine.Object.Destroy(pc2_go);
            UnityEngine.Object.Destroy(sig_go);
        }
示例#15
0
        public IEnumerator SwapReceiver()
        {
            // Create the peer connections
            var pc1_go = new GameObject("pc1");

            pc1_go.SetActive(false); // prevent auto-activation of components
            var pc1    = pc1_go.AddComponent <PeerConnection>();
            var pc2_go = new GameObject("pc2");

            pc2_go.SetActive(false); // prevent auto-activation of components
            var pc2 = pc2_go.AddComponent <PeerConnection>();

            // Batch changes manually
            pc1.AutoCreateOfferOnRenegotiationNeeded = false;
            pc2.AutoCreateOfferOnRenegotiationNeeded = false;

            // Create the signaler
            var sig_go = new GameObject("signaler");
            var sig    = sig_go.AddComponent <LocalOnlySignaler>();

            sig.Peer1 = pc1;
            sig.Peer2 = pc2;

            // Create the video source on peer #1
            {
                VideoTrackSource source   = pc1_go.AddComponent <UniformColorVideoSource>();
                MediaLine        senderMl = pc1.AddMediaLine(MediaKind.Video);
                senderMl.SenderTrackName = "video_track_1";
                senderMl.Source          = source;
            }

            // Create the receivers on peer #2
            VideoReceiver receiver1 = pc2_go.AddComponent <VideoReceiver>();
            VideoReceiver receiver2 = pc2_go.AddComponent <VideoReceiver>();
            MediaLine     ml        = pc2.AddMediaLine(MediaKind.Video);

            ml.Receiver = receiver1;

            // Initialize
            yield return(PeerConnectionTests.InitializeAndWait(pc1));

            yield return(PeerConnectionTests.InitializeAndWait(pc2));

            // Connect
            Assert.IsTrue(sig.StartConnection());
            yield return(sig.WaitForConnection(millisecondsTimeout: 10000));

            // Wait a frame so that the Unity events for streams started can propagate
            yield return(null);

            // receiver1 is correctly wired.
            Assert.AreEqual(ml.Transceiver.DesiredDirection, Transceiver.Direction.ReceiveOnly);
            Assert.AreEqual(pc2.Peer.RemoteVideoTracks.Count(), 1);
            Assert.IsTrue(receiver1.IsLive);
            Assert.AreEqual(receiver1.Track, ml.Transceiver.RemoteTrack);
            Assert.AreEqual(receiver1.MediaLine, ml);

            // Reset receiver
            ml.Receiver = null;

            // receiver1 has been detached.
            Assert.AreEqual(ml.Transceiver.DesiredDirection, Transceiver.Direction.Inactive);
            Assert.IsFalse(receiver1.IsLive);
            Assert.IsNull(receiver1.MediaLine);

            // Set receiver2.
            ml.Receiver = receiver2;

            // receiver2 is correctly wired.
            Assert.AreEqual(ml.Transceiver.DesiredDirection, Transceiver.Direction.ReceiveOnly);
            Assert.AreEqual(pc2.Peer.RemoteVideoTracks.Count(), 1);
            Assert.IsTrue(receiver2.IsLive);
            Assert.AreEqual(receiver2.Track, ml.Transceiver.RemoteTrack);
            Assert.AreEqual(receiver2.MediaLine, ml);

            // Swap receiver2 with receiver1.
            ml.Receiver = receiver1;

            // receiver1 is correctly wired.
            Assert.AreEqual(ml.Transceiver.DesiredDirection, Transceiver.Direction.ReceiveOnly);
            Assert.AreEqual(pc2.Peer.RemoteVideoTracks.Count(), 1);
            Assert.IsTrue(receiver1.IsLive);
            Assert.AreEqual(receiver1.Track, ml.Transceiver.RemoteTrack);
            Assert.AreEqual(receiver1.MediaLine, ml);

            // receiver2 has been detached.
            Assert.IsFalse(receiver2.IsLive);
            Assert.IsNull(receiver2.MediaLine);

            Object.Destroy(pc1_go);
            Object.Destroy(pc2_go);
        }