Esempio n. 1
0
        public async Task RefreshVideoCaptureFormatsAsync(VideoCaptureDeviceInfo item)
        {
            var formats = new CollectionViewModel <VideoCaptureFormatViewModel>();

            if (item != null)
            {
                IReadOnlyList <VideoCaptureFormat> formatsList;
                string profileId = VideoProfiles.SelectedItem?.uniqueId;
                if (string.IsNullOrEmpty(profileId))
                {
                    // Device doesn't support video profiles; fall back on flat list of capture formats.
                    formatsList = await DeviceVideoTrackSource.GetCaptureFormatsAsync(item.Id);
                }
                else
                {
                    // Enumerate formats for the specified profile only
                    formatsList = await DeviceVideoTrackSource.GetCaptureFormatsAsync(item.Id, profileId);
                }
                foreach (var format in formatsList)
                {
                    formats.Add(new VideoCaptureFormatViewModel
                    {
                        Format = format,
                        FormatEncodingDisplayName = FourCCToString(format.fourcc)
                    });
                }
            }
            VideoCaptureFormats = formats;

            // Select first item for convenience
            VideoCaptureFormats.SelectFirstItemIfAny();
        }
Esempio n. 2
0
    async void Start()
    {
        deviceList = await DeviceVideoTrackSource.GetCaptureDevicesAsync();

        Debug.Log($"Found {deviceList.Count} devices.");
        foreach (var device in deviceList)
        {
            Debug.Log($"Found webcam {device.name} (id: {device.id}):");
            var profiles = await DeviceVideoTrackSource.GetCaptureProfilesAsync(device.id);

            if (profiles.Count > 0)
            {
                foreach (var profile in profiles)
                {
                    Debug.Log($"+ Profile '{profile.uniqueId}'");
                    var configs = await DeviceVideoTrackSource.GetCaptureFormatsAsync(device.id, profile.uniqueId);

                    foreach (var config in configs)
                    {
                        Debug.Log($"  - {config.width}x{config.height}@{config.framerate}");
                    }
                }
            }
            else
            {
                var configs = await DeviceVideoTrackSource.GetCaptureFormatsAsync(device.id);

                foreach (var config in configs)
                {
                    Debug.Log($"- {config.width}x{config.height}@{config.framerate}");
                }
            }
        }

        // Setup signaling
        Debug.Log("Starting signaling...");
        switch (ConnectionType)
        {
        case SignalerType.TCP:
            signaler = new TCPSignaler(Port);
            break;

        case SignalerType.WebSocket:
            signaler = new WebSocketSignaler(Port);
            break;

        default:
            throw new System.Exception($"Signaler connection type {ConnectionType} is not valid!");
        }
        signaler.ClientConnected    += OnClientConnected;
        signaler.ClientDisconnected += OnClientDisconnected;
        if (UseRemoteStun)
        {
            signaler.IceServers.Add(new IceServer {
                Urls = { "stun:stun.l.google.com:19302" }
            });
        }
        signaler.Start();
    }
        public async Task AddVideoTrackFromDeviceAsync(string trackName)
        {
            await RequestMediaAccessAsync(StreamingCaptureMode.Video);

            // Create the source
            VideoCaptureDeviceInfo deviceInfo = VideoCaptureDevices.SelectedItem;

            if (deviceInfo == null)
            {
                throw new InvalidOperationException("No video capture device selected");
            }
            var deviceConfig = new LocalVideoDeviceInitConfig
            {
                videoDevice = new VideoCaptureDevice {
                    id = deviceInfo.Id
                },
            };
            VideoCaptureFormatViewModel formatInfo = VideoCaptureFormats.SelectedItem;

            if (formatInfo != null)
            {
                deviceConfig.width     = formatInfo.Format.width;
                deviceConfig.height    = formatInfo.Format.height;
                deviceConfig.framerate = formatInfo.Format.framerate;
            }
            if (deviceInfo.SupportsVideoProfiles)
            {
                MediaCaptureVideoProfile profile = VideoProfiles.SelectedItem;
                deviceConfig.videoProfileId   = profile?.Id;
                deviceConfig.videoProfileKind = SelectedVideoProfileKind;
            }
            var source = await DeviceVideoTrackSource.CreateAsync(deviceConfig);

            // FIXME - this leaks the source, never disposed

            // Crate the track
            var trackConfig = new LocalVideoTrackInitConfig
            {
                trackName = trackName,
            };
            var track = LocalVideoTrack.CreateFromSource(source, trackConfig);

            // FIXME - this probably leaks the track, never disposed

            SessionModel.Current.VideoTracks.Add(new VideoTrackViewModel
            {
                Source     = source,
                Track      = track,
                TrackImpl  = track,
                IsRemote   = false,
                DeviceName = deviceInfo.DisplayName
            });
            SessionModel.Current.LocalTracks.Add(new TrackViewModel(Symbol.Video)
            {
                DisplayName = deviceInfo.DisplayName
            });
        }
Esempio n. 4
0
 public async Task CreateFromDevice()
 {
     using (VideoTrackSource source = await DeviceVideoTrackSource.CreateAsync())
     {
         Assert.IsNotNull(source);
         Assert.AreEqual(string.Empty, source.Name);
         Assert.AreEqual(0, source.Tracks.Count);
     }
 }
Esempio n. 5
0
        public async Task EnumVideoDevices()
        {
            List <VideoCaptureDevice> devices = await DeviceVideoTrackSource.GetCaptureDevicesAsync();

            foreach (var device in devices)
            {
                Assert.That(device.id.Length, Is.GreaterThan(0));
                Assert.That(device.name.Length, Is.GreaterThan(0));
            }
        }
Esempio n. 6
0
        public async Task Name()
        {
            using (VideoTrackSource source = await DeviceVideoTrackSource.CreateAsync())
            {
                Assert.IsNotNull(source);

                const string kTestName = "test_video_track_source_name";
                source.Name = kTestName;
                Assert.AreEqual(kTestName, source.Name);
            }
        }
Esempio n. 7
0
        static public async Task <VideoTrackSource> CreateAsync(LocalVideoDeviceInitConfig config = null)
        {
            Console.WriteLine("Calling Create Camera Method in thread {0}", Thread.CurrentThread.ManagedThreadId);

            await Gate.WaitAsync();

            if (VideoSource != null)
            {
                throw new SystemException("Camera instance already created and using");
            }
            Gate.Release();
            VideoSource = await DeviceVideoTrackSource.CreateAsync(config);

            return(VideoSource);
        }
Esempio n. 8
0
        public async Task VideoDeviceSelection()
        {
            Console.WriteLine("Select availiable video device:");
            var devices_list = await DeviceVideoTrackSource.GetCaptureDevicesAsync();

            var VideoDevice = ApplicationInterface.VideoDeviceSelector(devices_list);

            Console.WriteLine("Select video format:");
            var formats_list = await DeviceVideoTrackSource.GetCaptureFormatsAsync(VideoDevice.id);

            var SelectedFormat = ApplicationInterface.CapturingFormatSelector(formats_list);

            SystemConfiguration.VideoDeviceSettings.framerate = SelectedFormat.framerate;
            SystemConfiguration.VideoDeviceSettings.height    = SelectedFormat.height;
            SystemConfiguration.VideoDeviceSettings.width     = SelectedFormat.width;
            Source = await Camera.CreateAsync(SystemConfiguration.VideoDeviceSettings);
        }
Esempio n. 9
0
        public async Task RefreshVideoCaptureDevicesAsync()
        {
            Logger.Log($"Refreshing list of video capture devices");

            ErrorMessage = null;
            try
            {
                await Utils.RequestMediaAccessAsync(StreamingCaptureMode.Video);
            }
            catch (UnauthorizedAccessException uae)
            {
                ErrorMessage = "This application is not authorized to access the local camera device. Change permission settings and restart the application.";
                throw uae;
            }
            catch (Exception ex)
            {
                ErrorMessage = ex.Message;
                throw ex;
            }

            // Populate the list of video capture devices (webcams).
            // On UWP this uses internally the API:
            //   Devices.Enumeration.DeviceInformation.FindAllAsync(VideoCapture)
            // Note that there's no API to pass a given device to WebRTC,
            // so there's no way to monitor and update that list if a device
            // gets plugged or unplugged. Even using DeviceInformation.CreateWatcher()
            // would yield some devices that might become unavailable by the time
            // WebRTC internally opens the video capture device.
            // This is more for demo purpose here because using the UWP API is nicer.
            var devices = await DeviceVideoTrackSource.GetCaptureDevicesAsync();

            var deviceList = new CollectionViewModel <VideoCaptureDeviceInfo>();

            foreach (var device in devices)
            {
                Logger.Log($"Found video capture device: id={device.id} name={device.name}");
                deviceList.Add(new VideoCaptureDeviceInfo(id: device.id, displayName: device.name));
            }
            VideoCaptureDevices = deviceList;

            // Auto-select first device for convenience
            VideoCaptureDevices.SelectFirstItemIfAny();
        }
Esempio n. 10
0
        public async Task EnumVideoFormats()
        {
            List <VideoCaptureDevice> devices = await DeviceVideoTrackSource.GetCaptureDevicesAsync();

            if (devices.Count == 0)
            {
                Assert.Inconclusive("Host device has no available video capture device.");
            }
            foreach (var device in devices)
            {
                List <VideoCaptureFormat> formats = await DeviceVideoTrackSource.GetCaptureFormatsAsync(device.id);

                foreach (var format in formats)
                {
                    Assert.That(format.width, Is.GreaterThan(0));
                    Assert.That(format.height, Is.GreaterThan(0));
                    Assert.That(format.framerate, Is.GreaterThan(0.0));
                }
            }
        }
        public async Task RefreshVideoCaptureFormatsAsync(VideoCaptureDeviceInfo item)
        {
            var formats = new CollectionViewModel <VideoCaptureFormatViewModel>();

            if (item != null)
            {
                if (MediaCapture.IsVideoProfileSupported(item.Id))
                {
                    foreach (var desc in VideoProfiles.SelectedItem?.SupportedRecordMediaDescription)
                    {
                        var formatVM = new VideoCaptureFormatViewModel();
                        formatVM.Format.width     = desc.Width;
                        formatVM.Format.height    = desc.Height;
                        formatVM.Format.framerate = desc.FrameRate;
                        //formatVM.Format.fourcc = desc.Subtype; // TODO: string => FOURCC
                        formatVM.FormatEncodingDisplayName = desc.Subtype;
                        formats.Add(formatVM);
                    }
                }
                else
                {
                    // Device doesn't support video profiles; fall back on flat list of capture formats.
                    List <VideoCaptureFormat> formatsList = await DeviceVideoTrackSource.GetCaptureFormatsAsync(item.Id);

                    foreach (var format in formatsList)
                    {
                        formats.Add(new VideoCaptureFormatViewModel
                        {
                            Format = format,
                            FormatEncodingDisplayName = FourCCToString(format.fourcc)
                        });
                    }
                }
            }
            VideoCaptureFormats = formats;

            // Select first item for convenience
            VideoCaptureFormats.SelectFirstItemIfAny();
        }
Esempio n. 12
0
        public async void RefreshVideoProfiles(VideoCaptureDeviceInfo item, VideoProfileKind kind)
        {
            // Clear formats, which are profile-dependent. This ensures the former list doesn't
            // stay visible if the current profile kind is not supported (does not return any profile).
            VideoCaptureFormats.Clear();

            var videoProfiles = new CollectionViewModel <VideoProfile>();

            if (item != null)
            {
                IReadOnlyList <VideoProfile> profiles = await DeviceVideoTrackSource.GetCaptureProfilesAsync(item.Id, kind);

                foreach (var profile in profiles)
                {
                    videoProfiles.Add(profile);
                }
            }
            VideoProfiles = videoProfiles;

            // Select first item for convenience
            VideoProfiles.SelectFirstItemIfAny();
        }
Esempio n. 13
0
        private static async Task VideoDeviceSelection()
        {
            Console.WriteLine("Select availiable video device:");
            SystemConfiguration.VideoDeviceSettings = new LocalVideoDeviceInitConfig();
            int i           = 0;
            var device_list = (await DeviceVideoTrackSource.GetCaptureDevicesAsync()).ToList();

            foreach (var device in device_list)
            {
                Console.WriteLine("{0}: Name: {1} ID: ", i, device.name, device.id);
                i++;
            }
            int Selected = 0;

            while ((Selected = Convert.ToInt32(Console.ReadLine())) >= device_list.Count() && Selected < 0)
            {
                Console.WriteLine("Unknown device, try again.");
            }
            Console.WriteLine("Select video format");
            var formats_list = (await DeviceVideoTrackSource.GetCaptureFormatsAsync(device_list[0].id)).ToList();

            SystemConfiguration.VideoDeviceSettings.videoDevice = device_list[Selected];
            i        = 0;
            Selected = 0;
            foreach (var format in await DeviceVideoTrackSource.GetCaptureFormatsAsync(device_list[0].id))
            {
                Console.WriteLine("{0}: Framerate: {1} Width: {2}, Height: {3}", i, format.framerate, format.width, format.height);
                i++;
            }
            while ((Selected = Convert.ToInt32(Console.ReadLine())) >= formats_list.Count() && Selected < 0)
            {
                Console.WriteLine("Unknown format, try again.");
            }
            var SelectedFormat = formats_list[Selected];

            SystemConfiguration.VideoDeviceSettings.framerate = SelectedFormat.framerate;
            SystemConfiguration.VideoDeviceSettings.height    = SelectedFormat.height;
            SystemConfiguration.VideoDeviceSettings.width     = SelectedFormat.width;
        }
Esempio n. 14
0
        public MediaPlayerViewModel()
        {
            _videoPlayer.CurrentStateChanged += OnMediaStateChanged;
            _videoPlayer.MediaOpened         += OnMediaOpened;
            _videoPlayer.MediaFailed         += OnMediaFailed;
            _videoPlayer.MediaEnded          += OnMediaEnded;
            _videoPlayer.RealTimePlayback     = true;
            _videoPlayer.AutoPlay             = false;

            AudioTrackTypeList.Add(new AudioTrackTypeViewModel
            {
                DisplayName = "Local microphone (default device)",
                Factory     = async() =>
                {
                    // FIXME - this leaks 'source', never disposed (and is the track itself disposed??)
                    var source   = await DeviceAudioTrackSource.CreateAsync();
                    var settings = new LocalAudioTrackInitConfig();
                    return(LocalAudioTrack.CreateFromSource(source, settings));
                }
            });

            VideoTrackTypeList.Add(new VideoTrackTypeViewModel
            {
                DisplayName = "Local webcam (default device)",
                Factory     = async() =>
                {
                    // FIXME - this leaks 'source', never disposed (and is the track itself disposed??)
                    var source   = await DeviceVideoTrackSource.CreateAsync();
                    var settings = new LocalVideoTrackInitConfig();
                    return(LocalVideoTrack.CreateFromSource(source, settings));
                }
            });

            _videoStatsTimer.Interval = TimeSpan.FromMilliseconds(300);
            _videoStatsTimer.Tick    += (_1, _2) => UpdateVideoStats();
        }
Esempio n. 15
0
        public async Task AfterConnect()
        {
            // Connect
            StartOfferWith(pc1_);
            WaitForTransportsWritable();
            Assert.True(pc1_.IsConnected);
            Assert.True(pc2_.IsConnected);

            // Wait for all transceivers to be updated on both peers
            WaitForSdpExchangeCompleted();
            Assert.True(remoteDescAppliedEvent1_.Wait(TimeSpan.FromSeconds(20.0)));
            Assert.True(remoteDescAppliedEvent2_.Wait(TimeSpan.FromSeconds(20.0)));
            remoteDescAppliedEvent1_.Reset();
            remoteDescAppliedEvent2_.Reset();

            // No track yet
            Assert.AreEqual(0, pc1_.LocalVideoTracks.Count());
            Assert.AreEqual(0, pc1_.RemoteVideoTracks.Count());
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(0, pc2_.RemoteVideoTracks.Count());

            // Create video transceiver on #1 -- this generates a renegotiation
            renegotiationEvent1_.Reset();
            var transceiver_settings = new TransceiverInitSettings
            {
                Name = "transceiver1",
                InitialDesiredDirection = Transceiver.Direction.SendReceive
            };
            var transceiver1 = pc1_.AddTransceiver(MediaKind.Video, transceiver_settings);

            Assert.NotNull(transceiver1);
            Assert.IsTrue(pc1_.Transceivers.Contains(transceiver1));

            // Confirm (inactive) remote track was added on #2 due to transceiver being added
            Assert.True(videoTrackAddedEvent2_.Wait(TimeSpan.FromSeconds(60.0)));

            // Wait until SDP renegotiation finished
            Assert.True(renegotiationEvent1_.Wait(TimeSpan.FromSeconds(60.0)));
            WaitForSdpExchangeCompleted();

            // Now remote peer #2 has a 1 remote track (which is inactive).
            // Note that tracks are updated before transceivers here. This might be unintuitive, so we might
            // want to revisit this later.
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(1, pc2_.RemoteVideoTracks.Count());

            // Transceiver has been updated to Send+Receive (desired direction when added), but since peer #2
            // doesn't intend to send, the actually negotiated direction on #1 is Send only.
            Assert.AreEqual(Transceiver.Direction.SendReceive, transceiver1.DesiredDirection);
            Assert.AreEqual(Transceiver.Direction.SendOnly, transceiver1.NegotiatedDirection);

            // Create video track source
            var source1 = await DeviceVideoTrackSource.CreateAsync();

            Assert.IsNotNull(source1);

            // Create local video track
            renegotiationEvent1_.Reset();
            var             settings = new LocalVideoTrackInitConfig();
            LocalVideoTrack track1   = LocalVideoTrack.CreateFromSource(source1, settings);

            Assert.IsNotNull(track1);
            Assert.IsNull(track1.PeerConnection);
            Assert.IsNull(track1.Transceiver);
            Assert.IsFalse(renegotiationEvent1_.IsSet); // renegotiation not needed

            // Add local video track to #1
            renegotiationEvent1_.Reset();
            transceiver1.LocalVideoTrack = track1;
            Assert.IsFalse(renegotiationEvent1_.IsSet); // renegotiation not needed
            Assert.AreEqual(pc1_, track1.PeerConnection);
            Assert.AreEqual(track1, transceiver1.LocalTrack);
            Assert.IsNull(transceiver1.RemoteTrack);
            Assert.IsTrue(pc1_.Transceivers.Contains(transceiver1));
            Assert.IsTrue(pc1_.LocalVideoTracks.Contains(track1));
            Assert.AreEqual(0, pc1_.RemoteVideoTracks.Count());

            // SetLocalTrack() does not change the transceiver directions
            Assert.AreEqual(Transceiver.Direction.SendReceive, transceiver1.DesiredDirection);
            Assert.AreEqual(Transceiver.Direction.SendOnly, transceiver1.NegotiatedDirection);

            // Remove the track from #1
            renegotiationEvent1_.Reset();
            transceiver1.LocalVideoTrack = null;
            Assert.IsFalse(renegotiationEvent1_.IsSet); // renegotiation not needed
            Assert.IsNull(track1.PeerConnection);
            Assert.IsNull(track1.Transceiver);
            Assert.AreEqual(0, pc1_.LocalVideoTracks.Count());
            Assert.AreEqual(0, pc1_.RemoteVideoTracks.Count());
            Assert.IsTrue(pc1_.Transceivers.Contains(transceiver1)); // never removed
            Assert.IsNull(transceiver1.LocalTrack);
            Assert.IsNull(transceiver1.RemoteTrack);
            track1.Dispose();

            // Destroy the video source
            source1.Dispose();

            // SetLocalTrack() does not change the transceiver directions, even when the local
            // sending track is disposed of.
            Assert.AreEqual(Transceiver.Direction.SendReceive, transceiver1.DesiredDirection);
            Assert.AreEqual(Transceiver.Direction.SendOnly, transceiver1.NegotiatedDirection);

            // Remote peer #2 still has a track, because the transceiver is still receiving,
            // even if there is no track on the sending side (so effectively it receives only
            // black frames).
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(1, pc2_.RemoteVideoTracks.Count());

            // Change the transceiver direction to stop receiving. This requires a renegotiation
            // to take effect, so nothing changes for now.
            // Note: In Plan B, a renegotiation needed event is manually forced for parity with
            // Unified Plan. However setting the transceiver to inactive removes the remote peer's
            // remote track, which causes another renegotiation needed event. So we suspend the
            // automatic offer to trigger it manually.
            suspendOffer1_ = true;
            remoteDescAppliedEvent1_.Reset();
            remoteDescAppliedEvent2_.Reset();
            transceiver1.DesiredDirection = Transceiver.Direction.Inactive;
            Assert.True(renegotiationEvent1_.Wait(TimeSpan.FromSeconds(60.0)));

            // Renegotiate
            await DoNegotiationStartFrom(pc1_);

            Assert.True(remoteDescAppliedEvent1_.Wait(TimeSpan.FromSeconds(60.0)));
            Assert.True(remoteDescAppliedEvent2_.Wait(TimeSpan.FromSeconds(60.0)));

            // Now the remote track got removed from #2
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(0, pc2_.RemoteVideoTracks.Count());
        }
Esempio n. 16
0
        static private async Task StartStend()
        {
            var             autoEvent        = new AutoResetEvent(false);
            bool            video_translator = true;
            bool            file_created     = false;
            FileStream      file             = null;
            Quartus         quartus          = Quartus.GetInstance();
            Microcontroller arduino          = Microcontroller.Create();

            if (video_translator)
            {
                // Asynchronously retrieve a list of available video capture devices (webcams).
                var deviceList = await DeviceVideoTrackSource.GetCaptureDevicesAsync();


                // For example, print them to the standard output
                foreach (var device in deviceList)
                {
                    Console.WriteLine($"Found webcam {device.name} (id: {device.id})");
                }
            }

            // Create a new peer connection automatically disposed at the end of the program
            var pc = new PeerConnection();
            // Initialize the connection with a STUN server to allow remote access
            var config = SystemConfiguration.PeerConnectionSettings;


            await pc.InitializeAsync(config);

            Console.WriteLine("Peer connection initialized.");
            //var chen = await pc.AddDataChannelAsync("sendDataChannel", true, true, cancellationToken: default);
            Console.WriteLine("Opening local webcam...");


            // pc - PeerConnection object
            Transceiver                videoTransceiver = null;
            VideoTrackSource           videoTrackSource = null;
            LocalVideoTrack            localVideoTrack  = null;
            LocalVideoDeviceInitConfig c = new LocalVideoDeviceInitConfig();

            await VideoDeviceSelection();

            videoTrackSource = await Camera.CreateAsync(SystemConfiguration.VideoDeviceSettings);


            WebSocketSharp.WebSocket signaling = new WebSocketSharp.WebSocket(CreateSignalingServerUrl(), "id_token", "alpine");
            pc.LocalSdpReadytoSend += (SdpMessage message) =>
            {
                //Console.WriteLine(SdpMessage.TypeToString(message.Type));
                Console.WriteLine(message.Content);
                //Console.WriteLine(HttpUtility.JavaScriptStringEncode(message.Content));
                Console.WriteLine("Sdp offer to send: {\"data\":{\"description\":{\"type\":\"" + SdpMessage.TypeToString(message.Type) + "\",\"sdp\":\"" + HttpUtility.JavaScriptStringEncode(message.Content) + "\"}}}");
                signaling.Send(message.ToABJson());
            };

            pc.RenegotiationNeeded += () =>
            {
                Console.WriteLine("Regotiation needed");
                bool OfferCreated = pc.CreateOffer();
                Console.WriteLine("OfferCreated? {0}", OfferCreated);
            };
            pc.DataChannelAdded += (DataChannel channel) =>
            {
                Console.WriteLine("Added data channel ID: {0}, Label: {1}; Reliable: {2}, Ordered: {3}", channel.ID, channel.Label, channel.Reliable, channel.Ordered);

                if (channel.Label == "sendDataChannel")
                {
                    channel.MessageReceived += (byte[] mess) => {
                        try
                        {
                            CTP_packet command = JsonSerializer.Deserialize <CTP_packet>(mess);
                            Console.WriteLine(arduino.SendCTP_Command(command));
                        }
                        catch (Exception e)
                        {
                            Console.WriteLine(e.Message);
                        }
                    };
                }
                else
                {
                    if (file_created == false)
                    {
                        file         = new FileStream(channel.Label, FileMode.Append);
                        file_created = true;
                    }
                    channel.MessageReceived += async(byte[] mess) =>
                    {
                        // Console.WriteLine(System.Text.Encoding.Default.GetString(mess));
                        if (mess.Length == 3 && System.Text.Encoding.Default.GetString(mess) == "EOF")
                        {
                            string file_name = file.Name;
                            file.Close();
                            string t = await quartus.RunQuartusCommandAsync($"quartus_pgm -m jtag –o \"p;{file_name}@1\"");

                            File.Delete(file_name);
                            file_created = false;
                        }
                        else
                        {
                            WriteFileSegment(mess, file);
                        }
                    };
                }

                channel.StateChanged += () =>
                {
                    Console.WriteLine("State change: {0}", channel.State);
                };
            };

            pc.IceCandidateReadytoSend += (IceCandidate candidate) =>
            {
                //Console.WriteLine("Content: {0}, SdpMid: {1}, SdpMlineIndex: {2}", candidate.Content, candidate.SdpMid, candidate.SdpMlineIndex);
                try
                {
                    Console.WriteLine("Candidate to send: Content: {0}, SdpMid: {1}, SdpMlineIndex: {2}", candidate.Content, candidate.SdpMid, candidate.SdpMlineIndex);
                    signaling.Send(candidate.ToABJson());
                }
                catch (Exception e)
                {
                    Console.WriteLine("Error to send local ice candidate");
                }
            };
            //videoTrackSource.I420AVideoFrameReady += (frame) =>
            //{
            //    Console.WriteLine("Argb32 frame ready. {0} : {1}", frame.width, frame.height);
            //    Console.WriteLine("DataA: {0}, DataU: {1}, DataV: {2}, DataY: {3}", Marshal.SizeOf(frame.dataA),
            //                        Marshal.SizeOf(frame.dataU),
            //                        Marshal.SizeOf(frame.dataV),
            //                        Marshal.SizeOf(frame.dataY));
            //};

            signaling.OnMessage += async(sender, message) =>
            {
                (string header, string correct_message) = message.Data.DivideHeaderAndOriginalJSON();
                Console.WriteLine("Correct message: {0}", correct_message);
                Console.WriteLine("Header: {0}", header);
                if (header == "{\"data\":{\"getRemoteMedia\":" && correct_message == "true")
                {
                    Console.WriteLine("Create local video track...");
                    var trackSettings = new LocalVideoTrackInitConfig {
                        trackName = "webcam_track"
                    };
                    localVideoTrack = LocalVideoTrack.CreateFromSource(videoTrackSource, new LocalVideoTrackInitConfig {
                        trackName = "webcam_track"
                    });
                    Console.WriteLine("Create video transceiver and add webcam track...");
                    TransceiverInitSettings option = new TransceiverInitSettings();
                    option.Name      = "webcam_track";
                    option.StreamIDs = new List <string> {
                        "webcam_name"
                    };
                    videoTransceiver = pc.AddTransceiver(MediaKind.Video, option);
                    videoTransceiver.DesiredDirection = Transceiver.Direction.SendOnly;
                    videoTransceiver.LocalVideoTrack  = localVideoTrack;

                    bool OfferCreated = pc.CreateOffer();
                    Console.WriteLine("OfferCreated? {0}", OfferCreated);
                }
                //Console.WriteLine(message.Data);
                if (header.IndexOf("candidate") != -1 && correct_message != "null")
                {
                    try
                    {
                        var candidate = JsonSerializer.Deserialize <ICEJavaScriptNotation>(correct_message);
                        Console.WriteLine("Content of ice: {0}, SdpMid: {1}, SdpMLineIndex: {2}", candidate.candidate, candidate.sdpMid, candidate.sdpMLineIndex);
                        pc.AddIceCandidate(candidate.ToMRNetCoreNotation());
                        Console.WriteLine("Deserialized by ice_candidate");
                        //return;
                    }
                    catch (Exception)
                    {
                        Console.WriteLine("Could not deserialize as ice candidate");
                    }
                }

                if (header.IndexOf("description") != -1)
                {
                    try
                    {
                        SdpMessage received_description = JsonSerializer.Deserialize <SDPJavaScriptNotation>(correct_message).ToMRNetCoreNotation();
                        await pc.SetRemoteDescriptionAsync(received_description);

                        if (received_description.Type == SdpMessageType.Offer)
                        {
                            bool res = pc.CreateAnswer();
                            Console.WriteLine("Answer created? {0}", res);
                        }
                        Console.WriteLine("Deserialized by sdp_message");
                    }
                    catch (Exception)
                    {
                        Console.WriteLine("Could not deserialize as sdp message");
                    }
                }
            };


            pc.Connected += () =>
            {
                Console.WriteLine("Connected");
            };
            pc.IceStateChanged += (IceConnectionState newState) =>
            {
                if (newState == IceConnectionState.Disconnected)
                {
                    Console.WriteLine("Disconected");
                }
            };


            signaling.Connect();
            if (!video_translator)
            {
                signaling.Send("{\"data\":{\"getRemoteMedia\":true}}");
            }

            //Console.WriteLine("Press a key to terminate the application...");
            Console.ReadKey(true);
            Console.WriteLine("Program termined.");
            file?.Close();
            pc?.Close();
            signaling?.Close();
            //arduino?.Close();
            //(var a, var b) = ConvertString("{\"data\":{\"candidate\":null}}");
            //Console.WriteLine("{0}, {1}", a, b);
        }
Esempio n. 17
0
        public async Task BeforeConnect()
        {
            // Create video transceiver on #1
            var transceiver_settings = new TransceiverInitSettings
            {
                Name = "transceiver1",
                InitialDesiredDirection = Transceiver.Direction.SendReceive
            };
            var transceiver1 = pc1_.AddTransceiver(MediaKind.Video, transceiver_settings);

            Assert.NotNull(transceiver1);

            // Wait for local SDP re-negotiation event on #1.
            // This will not create an offer, since we're not connected yet.
            Assert.True(renegotiationEvent1_.Wait(TimeSpan.FromSeconds(60.0)));

            // Create video track source
            var source1 = await DeviceVideoTrackSource.CreateAsync();

            Assert.IsNotNull(source1);

            // Create local video track
            var             settings = new LocalVideoTrackInitConfig();
            LocalVideoTrack track1   = LocalVideoTrack.CreateFromSource(source1, settings);

            Assert.IsNotNull(track1);

            // Add local video track to #1
            renegotiationEvent1_.Reset();
            transceiver1.LocalVideoTrack = track1;
            Assert.IsFalse(renegotiationEvent1_.IsSet); // renegotiation not needed
            Assert.AreEqual(pc1_, track1.PeerConnection);
            Assert.AreEqual(track1, transceiver1.LocalTrack);
            Assert.IsNull(transceiver1.RemoteTrack);
            Assert.IsTrue(pc1_.Transceivers.Contains(transceiver1));
            Assert.IsTrue(pc1_.LocalVideoTracks.Contains(track1));
            Assert.AreEqual(0, pc1_.RemoteVideoTracks.Count());

            // Connect
            StartOfferWith(pc1_);
            WaitForTransportsWritable();
            Assert.True(pc1_.IsConnected);
            Assert.True(pc2_.IsConnected);

            // Now remote peer #2 has a 1 remote track
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(1, pc2_.RemoteVideoTracks.Count());

            // Wait until the SDP exchange is completed
            WaitForSdpExchangeCompleted();

            // Remove the track from #1
            renegotiationEvent1_.Reset();
            transceiver1.LocalVideoTrack = null;
            Assert.IsFalse(renegotiationEvent1_.IsSet); // renegotiation not needed
            Assert.IsNull(track1.PeerConnection);
            Assert.IsNull(track1.Transceiver);
            Assert.AreEqual(0, pc1_.LocalVideoTracks.Count());
            Assert.IsTrue(pc1_.Transceivers.Contains(transceiver1)); // never removed
            Assert.IsNull(transceiver1.LocalTrack);
            Assert.IsNull(transceiver1.RemoteTrack);
            track1.Dispose();

            // Destroy the video source
            source1.Dispose();

            // SetLocalTrack() does not change the transceiver directions, even when the local
            // sending track is disposed of.
            Assert.AreEqual(Transceiver.Direction.SendReceive, transceiver1.DesiredDirection);
            Assert.AreEqual(Transceiver.Direction.SendOnly, transceiver1.NegotiatedDirection);

            // Remote peer #2 still has a track, because the transceiver is still receiving,
            // even if there is no track on the sending side (so effectively it receives only
            // black frames).
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(1, pc2_.RemoteVideoTracks.Count());

            // Change the transceiver direction to stop receiving. This requires a renegotiation
            // to take effect, so nothing changes for now.
            // Note: In Plan B, a renegotiation needed event is manually forced for parity with
            // Unified Plan. However setting the transceiver to inactive removes the remote peer's
            // remote track, which causes another renegotiation needed event. So we suspend the
            // automatic offer to trigger it manually.
            suspendOffer1_ = true;
            remoteDescAppliedEvent1_.Reset();
            remoteDescAppliedEvent2_.Reset();
            transceiver1.DesiredDirection = Transceiver.Direction.Inactive;
            Assert.True(renegotiationEvent1_.Wait(TimeSpan.FromSeconds(60.0)));

            // Renegotiate
            await DoNegotiationStartFrom(pc1_);

            Assert.True(remoteDescAppliedEvent1_.Wait(TimeSpan.FromSeconds(60.0)));
            Assert.True(remoteDescAppliedEvent2_.Wait(TimeSpan.FromSeconds(60.0)));

            // Now the remote track got removed from #2
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(0, pc2_.RemoteVideoTracks.Count());
        }
Esempio n. 18
0
        static async Task Main(string[] args)
        {
            Transceiver      audioTransceiver = null;
            Transceiver      videoTransceiver = null;
            AudioTrackSource audioTrackSource = null;
            VideoTrackSource videoTrackSource = null;
            LocalAudioTrack  localAudioTrack  = null;
            LocalVideoTrack  localVideoTrack  = null;

            try
            {
                bool needVideo = Array.Exists(args, arg => (arg == "-v") || (arg == "--video"));
                bool needAudio = Array.Exists(args, arg => (arg == "-a") || (arg == "--audio"));

                // Asynchronously retrieve a list of available video capture devices (webcams).
                var deviceList = await PeerConnection.GetVideoCaptureDevicesAsync();

                // For example, print them to the standard output
                foreach (var device in deviceList)
                {
                    Console.WriteLine($"Found webcam {device.name} (id: {device.id})");
                }

                // Create a new peer connection automatically disposed at the end of the program
                using var pc = new PeerConnection();

                // Initialize the connection with a STUN server to allow remote access
                var config = new PeerConnectionConfiguration
                {
                    IceServers = new List <IceServer> {
                        new IceServer {
                            Urls = { "stun:stun.l.google.com:19302" }
                        }
                    }
                };
                await pc.InitializeAsync(config);

                Console.WriteLine("Peer connection initialized.");

                // Record video from local webcam, and send to remote peer
                if (needVideo)
                {
                    Console.WriteLine("Opening local webcam...");
                    videoTrackSource = await DeviceVideoTrackSource.CreateAsync();

                    Console.WriteLine("Create local video track...");
                    var trackSettings = new LocalVideoTrackInitConfig {
                        trackName = "webcam_track"
                    };
                    localVideoTrack = LocalVideoTrack.CreateFromSource(videoTrackSource, trackSettings);

                    Console.WriteLine("Create video transceiver and add webcam track...");
                    videoTransceiver = pc.AddTransceiver(MediaKind.Video);
                    videoTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
                    videoTransceiver.LocalVideoTrack  = localVideoTrack;
                }

                // Record audio from local microphone, and send to remote peer
                if (needAudio)
                {
                    Console.WriteLine("Opening local microphone...");
                    audioTrackSource = await DeviceAudioTrackSource.CreateAsync();

                    Console.WriteLine("Create local audio track...");
                    var trackSettings = new LocalAudioTrackInitConfig {
                        trackName = "mic_track"
                    };
                    localAudioTrack = LocalAudioTrack.CreateFromSource(audioTrackSource, trackSettings);

                    Console.WriteLine("Create audio transceiver and add mic track...");
                    audioTransceiver = pc.AddTransceiver(MediaKind.Audio);
                    audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
                    audioTransceiver.LocalAudioTrack  = localAudioTrack;
                }

                // Setup signaling
                Console.WriteLine("Starting signaling...");
                var signaler = new NamedPipeSignaler.NamedPipeSignaler(pc, "testpipe");
                signaler.SdpMessageReceived += async(SdpMessage message) =>
                {
                    await pc.SetRemoteDescriptionAsync(message);

                    if (message.Type == SdpMessageType.Offer)
                    {
                        pc.CreateAnswer();
                    }
                };
                signaler.IceCandidateReceived += (IceCandidate candidate) =>
                {
                    pc.AddIceCandidate(candidate);
                };
                await signaler.StartAsync();

                // Start peer connection
                pc.Connected       += () => { Console.WriteLine("PeerConnection: connected."); };
                pc.IceStateChanged += (IceConnectionState newState) => { Console.WriteLine($"ICE state: {newState}"); };
                int numFrames = 0;
                pc.VideoTrackAdded += (RemoteVideoTrack track) =>
                {
                    track.I420AVideoFrameReady += (I420AVideoFrame frame) =>
                    {
                        ++numFrames;
                        if (numFrames % 60 == 0)
                        {
                            Console.WriteLine($"Received video frames: {numFrames}");
                        }
                    };
                };
                if (signaler.IsClient)
                {
                    Console.WriteLine("Connecting to remote peer...");
                    pc.CreateOffer();
                }
                else
                {
                    Console.WriteLine("Waiting for offer from remote peer...");
                }

                Console.WriteLine("Press a key to stop recording...");
                Console.ReadKey(true);

                signaler.Stop();
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }

            localAudioTrack?.Dispose();
            localVideoTrack?.Dispose();

            Console.WriteLine("Program termined.");

            localAudioTrack.Dispose();
            localVideoTrack.Dispose();
            audioTrackSource.Dispose();
            videoTrackSource.Dispose();
        }
Esempio n. 19
0
    async void OnClientConnected()
    {
        var pc = signaler.PeerConnection;

        // Record video from local webcam, and send to remote peer
        if (NeedVideo)
        {
            // For example, print them to the standard output

            var deviceSettings = new LocalVideoDeviceInitConfig
            {
                width  = VideoWidth,
                height = VideoHeight,
            };
            if (VideoFps > 0)
            {
                deviceSettings.framerate = VideoFps;
            }
            if (VideoProfileId.Length > 0)
            {
                deviceSettings.videoProfileId = VideoProfileId;
            }

            Debug.Log($"Attempt to grab Camera - {deviceSettings.videoProfileId}: {deviceSettings.width}x{deviceSettings.height}@{deviceSettings.framerate}fps");
            videoTrackSource = await DeviceVideoTrackSource.CreateAsync(deviceSettings);

            Debug.Log($"Create local video track... {videoTrackSource}");
            var trackSettings = new LocalVideoTrackInitConfig
            {
                trackName = "webcam_track"
            };
            localVideoTrack = LocalVideoTrack.CreateFromSource(videoTrackSource, trackSettings);

            Debug.Log("Create video transceiver and add webcam track...");
            videoTransceiver = pc.AddTransceiver(MediaKind.Video);
            videoTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
            videoTransceiver.LocalVideoTrack  = localVideoTrack;
        }

        // Record audio from local microphone, and send to remote peer
        if (NeedAudio)
        {
            Debug.Log("Opening local microphone...");
            audioTrackSource = await DeviceAudioTrackSource.CreateAsync();

            Debug.Log("Create local audio track...");
            var trackSettings = new LocalAudioTrackInitConfig {
                trackName = "mic_track"
            };
            localAudioTrack = LocalAudioTrack.CreateFromSource(audioTrackSource, trackSettings);

            Debug.Log("Create audio transceiver and add mic track...");
            audioTransceiver = pc.AddTransceiver(MediaKind.Audio);
            audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
            audioTransceiver.LocalAudioTrack  = localAudioTrack;
        }

        // Start peer connection
        int numFrames = 0;

        pc.VideoTrackAdded += (RemoteVideoTrack track) =>
        {
            Debug.Log($"Attach Frame Listener...");
            track.I420AVideoFrameReady += (I420AVideoFrame frame) =>
            {
                ++numFrames;
                if (numFrames % 60 == 0)
                {
                    Debug.Log($"Received video frames: {numFrames}");
                }
            };
        };
        // we need a short delay here for the video stream to settle...
        // I assume my Logitech webcam is sending some garbage frames in the beginning.
        await Task.Delay(200);

        pc.CreateOffer();
        Debug.Log("Send offer to remote peer");
    }
Esempio n. 20
0
        protected async void OnEnable()
        {
            if (Source != null)
            {
                return;
            }

#if UNITY_WSA && !UNITY_EDITOR
            // Request UWP access to video capture. The OS may show some popup dialog to the
            // user to request permission. This will succeed only if the user grants permission.
            try
            {
                // Note that the UWP UI thread and the main Unity app thread are always different.
                // https://docs.unity3d.com/Manual/windowsstore-appcallbacks.html
                // We leave the code below as an example of generic handling in case this would be used in
                // some other place, and in case a future version of Unity decided to change that assumption,
                // but currently OnEnable() is always invoked from the main Unity app thread so here the first
                // branch is never taken.
                if (UnityEngine.WSA.Application.RunningOnUIThread())
                {
                    await RequestAccessAsync();
                }
                else
                {
                    UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true);
                }
            }
            catch (Exception ex)
            {
                // Log an error and prevent activation
                Debug.LogError($"Video access failure: {ex.Message}.");
                this.enabled = false;
                return;
            }
#endif

            string videoProfileId   = VideoProfileId;
            var    videoProfileKind = VideoProfileKind;
            int    width            = Constraints.width;
            int    height           = Constraints.height;
            double framerate        = Constraints.framerate;
#if ENABLE_WINMD_SUPPORT
            if (FormatMode == LocalVideoSourceFormatMode.Automatic)
            {
                // Do not constrain resolution by default, unless the device calls for it (see below).
                width  = 0; // auto
                height = 0; // auto

                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                framerate = 0; // auto

                // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth
                if (global::Windows.Graphics.Holographic.HolographicSpace.IsAvailable)
                {
                    if (!global::Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque)
                    {
                        if (global::Windows.ApplicationModel.Package.Current.Id.Architecture == global::Windows.System.ProcessorArchitecture.X86)
                        {
                            // Holographic AR (transparent) x86 platform - Assume HoloLens 1
                            videoProfileKind = WebRTC.VideoProfileKind.VideoRecording; // No profile in VideoConferencing
                            width            = 896;                                    // Target 896 x 504
                        }
                        else
                        {
                            // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2
                            videoProfileKind = WebRTC.VideoProfileKind.VideoConferencing;
                            width            = 960; // Target 960 x 540
                        }
                    }
                }
            }
#endif

            // TODO - Fix codec selection (was as below before change)

            // Force again PreferredVideoCodec right before starting the local capture,
            // so that modifications to the property done after OnPeerInitialized() are
            // accounted for.
            //< FIXME
            //PeerConnection.Peer.PreferredVideoCodec = PreferredVideoCodec;

            // Check H.264 requests on Desktop (not supported)
            //#if !ENABLE_WINMD_SUPPORT
            //            if (PreferredVideoCodec == "H264")
            //            {
            //                Debug.LogError("H.264 encoding is not supported on Desktop platforms. Using VP8 instead.");
            //                PreferredVideoCodec = "VP8";
            //            }
            //#endif

            //// Ensure the track has a valid name
            //string trackName = TrackName;
            //if (trackName.Length == 0)
            //{
            //    trackName = Guid.NewGuid().ToString();
            //    // Re-assign the generated track name for consistency
            //    TrackName = trackName;
            //}
            //SdpTokenAttribute.Validate(trackName, allowEmpty: false);

            // Create the track
            var deviceConfig = new LocalVideoDeviceInitConfig
            {
                videoDevice                 = WebcamDevice,
                videoProfileId              = videoProfileId,
                videoProfileKind            = videoProfileKind,
                width                       = (width > 0 ? (uint?)width : null),
                height                      = (height > 0 ? (uint?)height : null),
                framerate                   = (framerate > 0 ? (double?)framerate : null),
                enableMrc                   = EnableMixedRealityCapture,
                enableMrcRecordingIndicator = EnableMRCRecordingIndicator
            };
            Source = await DeviceVideoTrackSource.CreateAsync(deviceConfig);

            if (Source == null)
            {
                throw new Exception("Failed ot create webcam video source.");
            }

            IsStreaming = true;
            VideoStreamStarted.Invoke(this);
        }
Esempio n. 21
0
        protected async void OnEnable()
        {
            if (Source != null)
            {
                return;
            }

#if PLATFORM_ANDROID
            // Ensure Android binding is initialized before accessing the native implementation
            Android.Initialize();

            // Check for permission to access the camera
            if (!Permission.HasUserAuthorizedPermission(Permission.Camera))
            {
                if (!_androidCameraRequestPending)
                {
                    // Monitor the OnApplicationFocus(true) event during the next 5 minutes,
                    // and check for permission again each time (see below why).
                    _androidCameraRequestPending        = true;
                    _androidCameraRequestRetryUntilTime = Time.time + 300;

                    // Display dialog requesting user permission. This will return immediately,
                    // and unfortunately there's no good way to tell when this completes. As a rule
                    // of thumb, application should lose focus, so check when focus resumes should
                    // be sufficient without having to poll every frame.
                    Permission.RequestUserPermission(Permission.Camera);
                }
                return;
            }
#elif UNITY_WSA && !UNITY_EDITOR
            // Request UWP access to video capture. The OS may show some popup dialog to the
            // user to request permission. This will succeed only if the user grants permission.
            try
            {
                // Note that the UWP UI thread and the main Unity app thread are always different.
                // https://docs.unity3d.com/Manual/windowsstore-appcallbacks.html
                // We leave the code below as an example of generic handling in case this would be used in
                // some other place, and in case a future version of Unity decided to change that assumption,
                // but currently OnEnable() is always invoked from the main Unity app thread so here the first
                // branch is never taken.
                if (UnityEngine.WSA.Application.RunningOnUIThread())
                {
                    await RequestAccessAsync();
                }
                else
                {
                    UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true);
                }
            }
            catch (Exception ex)
            {
                // Log an error and prevent activation
                Debug.LogError($"Video access failure: {ex.Message}.");
                this.enabled = false;
                return;
            }
#endif

            // Handle automatic capture format constraints
            string videoProfileId   = VideoProfileId;
            var    videoProfileKind = VideoProfileKind;
            int    width            = Constraints.width;
            int    height           = Constraints.height;
            double framerate        = Constraints.framerate;
#if ENABLE_WINMD_SUPPORT
            if (FormatMode == LocalVideoSourceFormatMode.Automatic)
            {
                // Do not constrain resolution by default, unless the device calls for it (see below).
                width  = 0; // auto
                height = 0; // auto

                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                framerate = 0; // auto

                // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth
                if (global::Windows.Graphics.Holographic.HolographicSpace.IsAvailable)
                {
                    if (!global::Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque)
                    {
                        if (global::Windows.ApplicationModel.Package.Current.Id.Architecture == global::Windows.System.ProcessorArchitecture.X86)
                        {
                            // Holographic AR (transparent) x86 platform - Assume HoloLens 1
                            videoProfileKind = WebRTC.VideoProfileKind.VideoRecording; // No profile in VideoConferencing
                            width            = 896;                                    // Target 896 x 504
                        }
                        else
                        {
                            // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2
                            videoProfileKind = WebRTC.VideoProfileKind.VideoConferencing;
                            width            = 960; // Target 960 x 540
                        }
                    }
                }
            }
#elif PLATFORM_ANDROID
            if (FormatMode == LocalVideoSourceFormatMode.Automatic)
            {
                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                framerate = 0; // auto

                string deviceId = WebcamDevice.id;
                if (string.IsNullOrEmpty(deviceId))
                {
                    List <VideoCaptureDevice> listedDevices = await PeerConnection.GetVideoCaptureDevicesAsync();

                    if (listedDevices.Count > 0)
                    {
                        deviceId = listedDevices[0].id;
                    }
                }
                if (!string.IsNullOrEmpty(deviceId))
                {
                    // Find the closest format to 720x480, independent of framerate
                    List <VideoCaptureFormat> formats = await DeviceVideoTrackSource.GetCaptureFormatsAsync(deviceId);

                    double smallestDiff = double.MaxValue;
                    bool   hasFormat    = false;
                    foreach (var fmt in formats)
                    {
                        double diff = Math.Abs(fmt.width - 720) + Math.Abs(fmt.height - 480);
                        if ((diff < smallestDiff) || !hasFormat)
                        {
                            hasFormat    = true;
                            smallestDiff = diff;
                            width        = (int)fmt.width;
                            height       = (int)fmt.height;
                        }
                    }
                    if (hasFormat)
                    {
                        Debug.Log($"WebcamSource automated mode selected resolution {width}x{height} for Android video capture device #{deviceId}.");
                    }
                }
            }
#endif

            // TODO - Fix codec selection (was as below before change)

            // Force again PreferredVideoCodec right before starting the local capture,
            // so that modifications to the property done after OnPeerInitialized() are
            // accounted for.
            //< FIXME
            //PeerConnection.Peer.PreferredVideoCodec = PreferredVideoCodec;

            // Check H.264 requests on Desktop (not supported)
            //#if !ENABLE_WINMD_SUPPORT
            //            if (PreferredVideoCodec == "H264")
            //            {
            //                Debug.LogError("H.264 encoding is not supported on Desktop platforms. Using VP8 instead.");
            //                PreferredVideoCodec = "VP8";
            //            }
            //#endif

            // Create the track
            var deviceConfig = new LocalVideoDeviceInitConfig
            {
                videoDevice                 = WebcamDevice,
                videoProfileId              = videoProfileId,
                videoProfileKind            = videoProfileKind,
                width                       = (width > 0 ? (uint?)width : null),
                height                      = (height > 0 ? (uint?)height : null),
                framerate                   = (framerate > 0 ? (double?)framerate : null),
                enableMrc                   = EnableMixedRealityCapture,
                enableMrcRecordingIndicator = EnableMRCRecordingIndicator
            };
            try
            {
                var source = await DeviceVideoTrackSource.CreateAsync(deviceConfig);

                AttachSource(source);
            }
            catch (Exception ex)
            {
                Debug.LogError($"Failed to create device track source for {nameof(WebcamSource)} component '{name}'.");
                Debug.LogException(ex, this);
                return;
            }
        }
Esempio n. 22
0
 /// <summary>
 /// Enumerate the video capture devices available as a WebRTC local video feed source.
 /// </summary>
 /// <returns>The list of local video capture devices available to WebRTC.</returns>
 public static Task <List <VideoCaptureDevice> > GetVideoCaptureDevicesAsync()
 {
     return(DeviceVideoTrackSource.GetCaptureDevicesAsync());
 }
        // This method might be run outside the app thread and should not access the Unity API.
        private static async Task <WebRTC.VideoTrackSource> CreateSourceAsync(
            LocalVideoSourceFormatMode formatMode, LocalVideoDeviceInitConfig deviceConfig)
        {
            // Handle automatic capture format constraints
#if ENABLE_WINMD_SUPPORT
            if (formatMode == LocalVideoSourceFormatMode.Automatic)
            {
                // Do not constrain resolution by default, unless the device calls for it (see below).
                deviceConfig.width  = 0; // auto
                deviceConfig.height = 0; // auto

                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                deviceConfig.framerate = 0; // auto

                // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth
                if (global::Windows.Graphics.Holographic.HolographicSpace.IsAvailable)
                {
                    if (!global::Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque)
                    {
                        if (global::Windows.ApplicationModel.Package.Current.Id.Architecture == global::Windows.System.ProcessorArchitecture.X86)
                        {
                            // Holographic AR (transparent) x86 platform - Assume HoloLens 1
                            deviceConfig.videoProfileKind = WebRTC.VideoProfileKind.VideoRecording; // No profile in VideoConferencing
                            deviceConfig.width            = 896;                                    // Target 896 x 504
                        }
                        else
                        {
                            // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2
                            deviceConfig.videoProfileKind = WebRTC.VideoProfileKind.VideoConferencing;
                            deviceConfig.width            = 960; // Target 960 x 540
                        }
                    }
                }
            }
#elif !UNITY_EDITOR && UNITY_ANDROID
            if (formatMode == LocalVideoSourceFormatMode.Automatic)
            {
                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                deviceConfig.framerate = 0; // auto

                string deviceId = deviceConfig.videoDevice.id;
                if (string.IsNullOrEmpty(deviceId))
                {
                    // Continue the task outside the Unity app context, in order to avoid deadlock
                    // if OnDisable waits on this task.
                    IReadOnlyList <VideoCaptureDevice> listedDevices =
                        await PeerConnection.GetVideoCaptureDevicesAsync()
                        .ConfigureAwait(continueOnCapturedContext: false);

                    if (listedDevices.Count > 0)
                    {
                        deviceId = listedDevices[0].id;
                    }
                }
                if (!string.IsNullOrEmpty(deviceId))
                {
                    // Find the closest format to 720x480, independent of framerate
                    // Continue the task outside the Unity app context, in order to avoid deadlock
                    // if OnDisable waits on this task.
                    IReadOnlyList <VideoCaptureFormat> formats =
                        await DeviceVideoTrackSource.GetCaptureFormatsAsync(deviceId)
                        .ConfigureAwait(continueOnCapturedContext: false);

                    double smallestDiff = double.MaxValue;
                    bool   hasFormat    = false;
                    foreach (var fmt in formats)
                    {
                        double diff = Math.Abs(fmt.width - 720) + Math.Abs(fmt.height - 480);
                        if ((diff < smallestDiff) || !hasFormat)
                        {
                            hasFormat           = true;
                            smallestDiff        = diff;
                            deviceConfig.width  = fmt.width;
                            deviceConfig.height = fmt.height;
                        }
                    }
                    if (hasFormat)
                    {
                        Debug.Log($"WebcamSource automated mode selected resolution {deviceConfig.width}x{deviceConfig.height} for Android video capture device #{deviceId}.");
                    }
                }
            }
#endif

            // TODO - Fix codec selection (was as below before change)

            // Force again PreferredVideoCodec right before starting the local capture,
            // so that modifications to the property done after OnPeerInitialized() are
            // accounted for.
            //< FIXME
            //PeerConnection.Peer.PreferredVideoCodec = PreferredVideoCodec;

            // Check H.264 requests on Desktop (not supported)
            //#if !ENABLE_WINMD_SUPPORT
            //            if (PreferredVideoCodec == "H264")
            //            {
            //                Debug.LogError("H.264 encoding is not supported on Desktop platforms. Using VP8 instead.");
            //                PreferredVideoCodec = "VP8";
            //            }
            //#endif

            // Create the track
            var createTask = DeviceVideoTrackSource.CreateAsync(deviceConfig);

            // Continue the task outside the Unity app context, in order to avoid deadlock
            // if OnDisable waits on this task.
            return(await createTask.ConfigureAwait(continueOnCapturedContext : false));
        }