public async Task AddVideoTrackFromDeviceAsync(string trackName)
        {
            await RequestMediaAccessAsync(StreamingCaptureMode.Video);

            // Create the source
            VideoCaptureDeviceInfo deviceInfo = VideoCaptureDevices.SelectedItem;

            if (deviceInfo == null)
            {
                throw new InvalidOperationException("No video capture device selected");
            }
            var deviceConfig = new LocalVideoDeviceInitConfig
            {
                videoDevice = new VideoCaptureDevice {
                    id = deviceInfo.Id
                },
            };
            VideoCaptureFormatViewModel formatInfo = VideoCaptureFormats.SelectedItem;

            if (formatInfo != null)
            {
                deviceConfig.width     = formatInfo.Format.width;
                deviceConfig.height    = formatInfo.Format.height;
                deviceConfig.framerate = formatInfo.Format.framerate;
            }
            if (deviceInfo.SupportsVideoProfiles)
            {
                MediaCaptureVideoProfile profile = VideoProfiles.SelectedItem;
                deviceConfig.videoProfileId   = profile?.Id;
                deviceConfig.videoProfileKind = SelectedVideoProfileKind;
            }
            var source = await DeviceVideoTrackSource.CreateAsync(deviceConfig);

            // FIXME - this leaks the source, never disposed

            // Crate the track
            var trackConfig = new LocalVideoTrackInitConfig
            {
                trackName = trackName,
            };
            var track = LocalVideoTrack.CreateFromSource(source, trackConfig);

            // FIXME - this probably leaks the track, never disposed

            SessionModel.Current.VideoTracks.Add(new VideoTrackViewModel
            {
                Source     = source,
                Track      = track,
                TrackImpl  = track,
                IsRemote   = false,
                DeviceName = deviceInfo.DisplayName
            });
            SessionModel.Current.LocalTracks.Add(new TrackViewModel(Symbol.Video)
            {
                DisplayName = deviceInfo.DisplayName
            });
        }
Exemplo n.º 2
0
 public async Task CreateFromDevice()
 {
     using (VideoTrackSource source = await DeviceVideoTrackSource.CreateAsync())
     {
         Assert.IsNotNull(source);
         Assert.AreEqual(string.Empty, source.Name);
         Assert.AreEqual(0, source.Tracks.Count);
     }
 }
Exemplo n.º 3
0
        public async Task Name()
        {
            using (VideoTrackSource source = await DeviceVideoTrackSource.CreateAsync())
            {
                Assert.IsNotNull(source);

                const string kTestName = "test_video_track_source_name";
                source.Name = kTestName;
                Assert.AreEqual(kTestName, source.Name);
            }
        }
Exemplo n.º 4
0
        static public async Task <VideoTrackSource> CreateAsync(LocalVideoDeviceInitConfig config = null)
        {
            Console.WriteLine("Calling Create Camera Method in thread {0}", Thread.CurrentThread.ManagedThreadId);

            await Gate.WaitAsync();

            if (VideoSource != null)
            {
                throw new SystemException("Camera instance already created and using");
            }
            Gate.Release();
            VideoSource = await DeviceVideoTrackSource.CreateAsync(config);

            return(VideoSource);
        }
Exemplo n.º 5
0
        public MediaPlayerViewModel()
        {
            _videoPlayer.CurrentStateChanged += OnMediaStateChanged;
            _videoPlayer.MediaOpened         += OnMediaOpened;
            _videoPlayer.MediaFailed         += OnMediaFailed;
            _videoPlayer.MediaEnded          += OnMediaEnded;
            _videoPlayer.RealTimePlayback     = true;
            _videoPlayer.AutoPlay             = false;

            AudioTrackTypeList.Add(new AudioTrackTypeViewModel
            {
                DisplayName = "Local microphone (default device)",
                Factory     = async() =>
                {
                    // FIXME - this leaks 'source', never disposed (and is the track itself disposed??)
                    var source   = await DeviceAudioTrackSource.CreateAsync();
                    var settings = new LocalAudioTrackInitConfig();
                    return(LocalAudioTrack.CreateFromSource(source, settings));
                }
            });

            VideoTrackTypeList.Add(new VideoTrackTypeViewModel
            {
                DisplayName = "Local webcam (default device)",
                Factory     = async() =>
                {
                    // FIXME - this leaks 'source', never disposed (and is the track itself disposed??)
                    var source   = await DeviceVideoTrackSource.CreateAsync();
                    var settings = new LocalVideoTrackInitConfig();
                    return(LocalVideoTrack.CreateFromSource(source, settings));
                }
            });

            _videoStatsTimer.Interval = TimeSpan.FromMilliseconds(300);
            _videoStatsTimer.Tick    += (_1, _2) => UpdateVideoStats();
        }
Exemplo n.º 6
0
        static async Task Main(string[] args)
        {
            Transceiver      audioTransceiver = null;
            Transceiver      videoTransceiver = null;
            AudioTrackSource audioTrackSource = null;
            VideoTrackSource videoTrackSource = null;
            LocalAudioTrack  localAudioTrack  = null;
            LocalVideoTrack  localVideoTrack  = null;

            try
            {
                bool needVideo = Array.Exists(args, arg => (arg == "-v") || (arg == "--video"));
                bool needAudio = Array.Exists(args, arg => (arg == "-a") || (arg == "--audio"));

                // Asynchronously retrieve a list of available video capture devices (webcams).
                var deviceList = await PeerConnection.GetVideoCaptureDevicesAsync();

                // For example, print them to the standard output
                foreach (var device in deviceList)
                {
                    Console.WriteLine($"Found webcam {device.name} (id: {device.id})");
                }

                // Create a new peer connection automatically disposed at the end of the program
                using var pc = new PeerConnection();

                // Initialize the connection with a STUN server to allow remote access
                var config = new PeerConnectionConfiguration
                {
                    IceServers = new List <IceServer> {
                        new IceServer {
                            Urls = { "stun:stun.l.google.com:19302" }
                        }
                    }
                };
                await pc.InitializeAsync(config);

                Console.WriteLine("Peer connection initialized.");

                // Record video from local webcam, and send to remote peer
                if (needVideo)
                {
                    Console.WriteLine("Opening local webcam...");
                    videoTrackSource = await DeviceVideoTrackSource.CreateAsync();

                    Console.WriteLine("Create local video track...");
                    var trackSettings = new LocalVideoTrackInitConfig {
                        trackName = "webcam_track"
                    };
                    localVideoTrack = LocalVideoTrack.CreateFromSource(videoTrackSource, trackSettings);

                    Console.WriteLine("Create video transceiver and add webcam track...");
                    videoTransceiver = pc.AddTransceiver(MediaKind.Video);
                    videoTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
                    videoTransceiver.LocalVideoTrack  = localVideoTrack;
                }

                // Record audio from local microphone, and send to remote peer
                if (needAudio)
                {
                    Console.WriteLine("Opening local microphone...");
                    audioTrackSource = await DeviceAudioTrackSource.CreateAsync();

                    Console.WriteLine("Create local audio track...");
                    var trackSettings = new LocalAudioTrackInitConfig {
                        trackName = "mic_track"
                    };
                    localAudioTrack = LocalAudioTrack.CreateFromSource(audioTrackSource, trackSettings);

                    Console.WriteLine("Create audio transceiver and add mic track...");
                    audioTransceiver = pc.AddTransceiver(MediaKind.Audio);
                    audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
                    audioTransceiver.LocalAudioTrack  = localAudioTrack;
                }

                // Setup signaling
                Console.WriteLine("Starting signaling...");
                var signaler = new NamedPipeSignaler.NamedPipeSignaler(pc, "testpipe");
                signaler.SdpMessageReceived += async(SdpMessage message) =>
                {
                    await pc.SetRemoteDescriptionAsync(message);

                    if (message.Type == SdpMessageType.Offer)
                    {
                        pc.CreateAnswer();
                    }
                };
                signaler.IceCandidateReceived += (IceCandidate candidate) =>
                {
                    pc.AddIceCandidate(candidate);
                };
                await signaler.StartAsync();

                // Start peer connection
                pc.Connected       += () => { Console.WriteLine("PeerConnection: connected."); };
                pc.IceStateChanged += (IceConnectionState newState) => { Console.WriteLine($"ICE state: {newState}"); };
                int numFrames = 0;
                pc.VideoTrackAdded += (RemoteVideoTrack track) =>
                {
                    track.I420AVideoFrameReady += (I420AVideoFrame frame) =>
                    {
                        ++numFrames;
                        if (numFrames % 60 == 0)
                        {
                            Console.WriteLine($"Received video frames: {numFrames}");
                        }
                    };
                };
                if (signaler.IsClient)
                {
                    Console.WriteLine("Connecting to remote peer...");
                    pc.CreateOffer();
                }
                else
                {
                    Console.WriteLine("Waiting for offer from remote peer...");
                }

                Console.WriteLine("Press a key to stop recording...");
                Console.ReadKey(true);

                signaler.Stop();
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }

            localAudioTrack?.Dispose();
            localVideoTrack?.Dispose();

            Console.WriteLine("Program termined.");

            localAudioTrack.Dispose();
            localVideoTrack.Dispose();
            audioTrackSource.Dispose();
            videoTrackSource.Dispose();
        }
Exemplo n.º 7
0
        public async Task BeforeConnect()
        {
            // Create video transceiver on #1
            var transceiver_settings = new TransceiverInitSettings
            {
                Name = "transceiver1",
                InitialDesiredDirection = Transceiver.Direction.SendReceive
            };
            var transceiver1 = pc1_.AddTransceiver(MediaKind.Video, transceiver_settings);

            Assert.NotNull(transceiver1);

            // Wait for local SDP re-negotiation event on #1.
            // This will not create an offer, since we're not connected yet.
            Assert.True(renegotiationEvent1_.Wait(TimeSpan.FromSeconds(60.0)));

            // Create video track source
            var source1 = await DeviceVideoTrackSource.CreateAsync();

            Assert.IsNotNull(source1);

            // Create local video track
            var             settings = new LocalVideoTrackInitConfig();
            LocalVideoTrack track1   = LocalVideoTrack.CreateFromSource(source1, settings);

            Assert.IsNotNull(track1);

            // Add local video track to #1
            renegotiationEvent1_.Reset();
            transceiver1.LocalVideoTrack = track1;
            Assert.IsFalse(renegotiationEvent1_.IsSet); // renegotiation not needed
            Assert.AreEqual(pc1_, track1.PeerConnection);
            Assert.AreEqual(track1, transceiver1.LocalTrack);
            Assert.IsNull(transceiver1.RemoteTrack);
            Assert.IsTrue(pc1_.Transceivers.Contains(transceiver1));
            Assert.IsTrue(pc1_.LocalVideoTracks.Contains(track1));
            Assert.AreEqual(0, pc1_.RemoteVideoTracks.Count());

            // Connect
            StartOfferWith(pc1_);
            WaitForTransportsWritable();
            Assert.True(pc1_.IsConnected);
            Assert.True(pc2_.IsConnected);

            // Now remote peer #2 has a 1 remote track
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(1, pc2_.RemoteVideoTracks.Count());

            // Wait until the SDP exchange is completed
            WaitForSdpExchangeCompleted();

            // Remove the track from #1
            renegotiationEvent1_.Reset();
            transceiver1.LocalVideoTrack = null;
            Assert.IsFalse(renegotiationEvent1_.IsSet); // renegotiation not needed
            Assert.IsNull(track1.PeerConnection);
            Assert.IsNull(track1.Transceiver);
            Assert.AreEqual(0, pc1_.LocalVideoTracks.Count());
            Assert.IsTrue(pc1_.Transceivers.Contains(transceiver1)); // never removed
            Assert.IsNull(transceiver1.LocalTrack);
            Assert.IsNull(transceiver1.RemoteTrack);
            track1.Dispose();

            // Destroy the video source
            source1.Dispose();

            // SetLocalTrack() does not change the transceiver directions, even when the local
            // sending track is disposed of.
            Assert.AreEqual(Transceiver.Direction.SendReceive, transceiver1.DesiredDirection);
            Assert.AreEqual(Transceiver.Direction.SendOnly, transceiver1.NegotiatedDirection);

            // Remote peer #2 still has a track, because the transceiver is still receiving,
            // even if there is no track on the sending side (so effectively it receives only
            // black frames).
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(1, pc2_.RemoteVideoTracks.Count());

            // Change the transceiver direction to stop receiving. This requires a renegotiation
            // to take effect, so nothing changes for now.
            // Note: In Plan B, a renegotiation needed event is manually forced for parity with
            // Unified Plan. However setting the transceiver to inactive removes the remote peer's
            // remote track, which causes another renegotiation needed event. So we suspend the
            // automatic offer to trigger it manually.
            suspendOffer1_ = true;
            remoteDescAppliedEvent1_.Reset();
            remoteDescAppliedEvent2_.Reset();
            transceiver1.DesiredDirection = Transceiver.Direction.Inactive;
            Assert.True(renegotiationEvent1_.Wait(TimeSpan.FromSeconds(60.0)));

            // Renegotiate
            await DoNegotiationStartFrom(pc1_);

            Assert.True(remoteDescAppliedEvent1_.Wait(TimeSpan.FromSeconds(60.0)));
            Assert.True(remoteDescAppliedEvent2_.Wait(TimeSpan.FromSeconds(60.0)));

            // Now the remote track got removed from #2
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(0, pc2_.RemoteVideoTracks.Count());
        }
Exemplo n.º 8
0
        public async Task AfterConnect()
        {
            // Connect
            StartOfferWith(pc1_);
            WaitForTransportsWritable();
            Assert.True(pc1_.IsConnected);
            Assert.True(pc2_.IsConnected);

            // Wait for all transceivers to be updated on both peers
            WaitForSdpExchangeCompleted();
            Assert.True(remoteDescAppliedEvent1_.Wait(TimeSpan.FromSeconds(20.0)));
            Assert.True(remoteDescAppliedEvent2_.Wait(TimeSpan.FromSeconds(20.0)));
            remoteDescAppliedEvent1_.Reset();
            remoteDescAppliedEvent2_.Reset();

            // No track yet
            Assert.AreEqual(0, pc1_.LocalVideoTracks.Count());
            Assert.AreEqual(0, pc1_.RemoteVideoTracks.Count());
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(0, pc2_.RemoteVideoTracks.Count());

            // Create video transceiver on #1 -- this generates a renegotiation
            renegotiationEvent1_.Reset();
            var transceiver_settings = new TransceiverInitSettings
            {
                Name = "transceiver1",
                InitialDesiredDirection = Transceiver.Direction.SendReceive
            };
            var transceiver1 = pc1_.AddTransceiver(MediaKind.Video, transceiver_settings);

            Assert.NotNull(transceiver1);
            Assert.IsTrue(pc1_.Transceivers.Contains(transceiver1));

            // Confirm (inactive) remote track was added on #2 due to transceiver being added
            Assert.True(videoTrackAddedEvent2_.Wait(TimeSpan.FromSeconds(60.0)));

            // Wait until SDP renegotiation finished
            Assert.True(renegotiationEvent1_.Wait(TimeSpan.FromSeconds(60.0)));
            WaitForSdpExchangeCompleted();

            // Now remote peer #2 has a 1 remote track (which is inactive).
            // Note that tracks are updated before transceivers here. This might be unintuitive, so we might
            // want to revisit this later.
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(1, pc2_.RemoteVideoTracks.Count());

            // Transceiver has been updated to Send+Receive (desired direction when added), but since peer #2
            // doesn't intend to send, the actually negotiated direction on #1 is Send only.
            Assert.AreEqual(Transceiver.Direction.SendReceive, transceiver1.DesiredDirection);
            Assert.AreEqual(Transceiver.Direction.SendOnly, transceiver1.NegotiatedDirection);

            // Create video track source
            var source1 = await DeviceVideoTrackSource.CreateAsync();

            Assert.IsNotNull(source1);

            // Create local video track
            renegotiationEvent1_.Reset();
            var             settings = new LocalVideoTrackInitConfig();
            LocalVideoTrack track1   = LocalVideoTrack.CreateFromSource(source1, settings);

            Assert.IsNotNull(track1);
            Assert.IsNull(track1.PeerConnection);
            Assert.IsNull(track1.Transceiver);
            Assert.IsFalse(renegotiationEvent1_.IsSet); // renegotiation not needed

            // Add local video track to #1
            renegotiationEvent1_.Reset();
            transceiver1.LocalVideoTrack = track1;
            Assert.IsFalse(renegotiationEvent1_.IsSet); // renegotiation not needed
            Assert.AreEqual(pc1_, track1.PeerConnection);
            Assert.AreEqual(track1, transceiver1.LocalTrack);
            Assert.IsNull(transceiver1.RemoteTrack);
            Assert.IsTrue(pc1_.Transceivers.Contains(transceiver1));
            Assert.IsTrue(pc1_.LocalVideoTracks.Contains(track1));
            Assert.AreEqual(0, pc1_.RemoteVideoTracks.Count());

            // SetLocalTrack() does not change the transceiver directions
            Assert.AreEqual(Transceiver.Direction.SendReceive, transceiver1.DesiredDirection);
            Assert.AreEqual(Transceiver.Direction.SendOnly, transceiver1.NegotiatedDirection);

            // Remove the track from #1
            renegotiationEvent1_.Reset();
            transceiver1.LocalVideoTrack = null;
            Assert.IsFalse(renegotiationEvent1_.IsSet); // renegotiation not needed
            Assert.IsNull(track1.PeerConnection);
            Assert.IsNull(track1.Transceiver);
            Assert.AreEqual(0, pc1_.LocalVideoTracks.Count());
            Assert.AreEqual(0, pc1_.RemoteVideoTracks.Count());
            Assert.IsTrue(pc1_.Transceivers.Contains(transceiver1)); // never removed
            Assert.IsNull(transceiver1.LocalTrack);
            Assert.IsNull(transceiver1.RemoteTrack);
            track1.Dispose();

            // Destroy the video source
            source1.Dispose();

            // SetLocalTrack() does not change the transceiver directions, even when the local
            // sending track is disposed of.
            Assert.AreEqual(Transceiver.Direction.SendReceive, transceiver1.DesiredDirection);
            Assert.AreEqual(Transceiver.Direction.SendOnly, transceiver1.NegotiatedDirection);

            // Remote peer #2 still has a track, because the transceiver is still receiving,
            // even if there is no track on the sending side (so effectively it receives only
            // black frames).
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(1, pc2_.RemoteVideoTracks.Count());

            // Change the transceiver direction to stop receiving. This requires a renegotiation
            // to take effect, so nothing changes for now.
            // Note: In Plan B, a renegotiation needed event is manually forced for parity with
            // Unified Plan. However setting the transceiver to inactive removes the remote peer's
            // remote track, which causes another renegotiation needed event. So we suspend the
            // automatic offer to trigger it manually.
            suspendOffer1_ = true;
            remoteDescAppliedEvent1_.Reset();
            remoteDescAppliedEvent2_.Reset();
            transceiver1.DesiredDirection = Transceiver.Direction.Inactive;
            Assert.True(renegotiationEvent1_.Wait(TimeSpan.FromSeconds(60.0)));

            // Renegotiate
            await DoNegotiationStartFrom(pc1_);

            Assert.True(remoteDescAppliedEvent1_.Wait(TimeSpan.FromSeconds(60.0)));
            Assert.True(remoteDescAppliedEvent2_.Wait(TimeSpan.FromSeconds(60.0)));

            // Now the remote track got removed from #2
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(0, pc2_.RemoteVideoTracks.Count());
        }
Exemplo n.º 9
0
        protected async void OnEnable()
        {
            if (Source != null)
            {
                return;
            }

#if PLATFORM_ANDROID
            // Ensure Android binding is initialized before accessing the native implementation
            Android.Initialize();

            // Check for permission to access the camera
            if (!Permission.HasUserAuthorizedPermission(Permission.Camera))
            {
                if (!_androidCameraRequestPending)
                {
                    // Monitor the OnApplicationFocus(true) event during the next 5 minutes,
                    // and check for permission again each time (see below why).
                    _androidCameraRequestPending        = true;
                    _androidCameraRequestRetryUntilTime = Time.time + 300;

                    // Display dialog requesting user permission. This will return immediately,
                    // and unfortunately there's no good way to tell when this completes. As a rule
                    // of thumb, application should lose focus, so check when focus resumes should
                    // be sufficient without having to poll every frame.
                    Permission.RequestUserPermission(Permission.Camera);
                }
                return;
            }
#elif UNITY_WSA && !UNITY_EDITOR
            // Request UWP access to video capture. The OS may show some popup dialog to the
            // user to request permission. This will succeed only if the user grants permission.
            try
            {
                // Note that the UWP UI thread and the main Unity app thread are always different.
                // https://docs.unity3d.com/Manual/windowsstore-appcallbacks.html
                // We leave the code below as an example of generic handling in case this would be used in
                // some other place, and in case a future version of Unity decided to change that assumption,
                // but currently OnEnable() is always invoked from the main Unity app thread so here the first
                // branch is never taken.
                if (UnityEngine.WSA.Application.RunningOnUIThread())
                {
                    await RequestAccessAsync();
                }
                else
                {
                    UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true);
                }
            }
            catch (Exception ex)
            {
                // Log an error and prevent activation
                Debug.LogError($"Video access failure: {ex.Message}.");
                this.enabled = false;
                return;
            }
#endif

            // Handle automatic capture format constraints
            string videoProfileId   = VideoProfileId;
            var    videoProfileKind = VideoProfileKind;
            int    width            = Constraints.width;
            int    height           = Constraints.height;
            double framerate        = Constraints.framerate;
#if ENABLE_WINMD_SUPPORT
            if (FormatMode == LocalVideoSourceFormatMode.Automatic)
            {
                // Do not constrain resolution by default, unless the device calls for it (see below).
                width  = 0; // auto
                height = 0; // auto

                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                framerate = 0; // auto

                // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth
                if (global::Windows.Graphics.Holographic.HolographicSpace.IsAvailable)
                {
                    if (!global::Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque)
                    {
                        if (global::Windows.ApplicationModel.Package.Current.Id.Architecture == global::Windows.System.ProcessorArchitecture.X86)
                        {
                            // Holographic AR (transparent) x86 platform - Assume HoloLens 1
                            videoProfileKind = WebRTC.VideoProfileKind.VideoRecording; // No profile in VideoConferencing
                            width            = 896;                                    // Target 896 x 504
                        }
                        else
                        {
                            // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2
                            videoProfileKind = WebRTC.VideoProfileKind.VideoConferencing;
                            width            = 960; // Target 960 x 540
                        }
                    }
                }
            }
#elif PLATFORM_ANDROID
            if (FormatMode == LocalVideoSourceFormatMode.Automatic)
            {
                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                framerate = 0; // auto

                string deviceId = WebcamDevice.id;
                if (string.IsNullOrEmpty(deviceId))
                {
                    List <VideoCaptureDevice> listedDevices = await PeerConnection.GetVideoCaptureDevicesAsync();

                    if (listedDevices.Count > 0)
                    {
                        deviceId = listedDevices[0].id;
                    }
                }
                if (!string.IsNullOrEmpty(deviceId))
                {
                    // Find the closest format to 720x480, independent of framerate
                    List <VideoCaptureFormat> formats = await DeviceVideoTrackSource.GetCaptureFormatsAsync(deviceId);

                    double smallestDiff = double.MaxValue;
                    bool   hasFormat    = false;
                    foreach (var fmt in formats)
                    {
                        double diff = Math.Abs(fmt.width - 720) + Math.Abs(fmt.height - 480);
                        if ((diff < smallestDiff) || !hasFormat)
                        {
                            hasFormat    = true;
                            smallestDiff = diff;
                            width        = (int)fmt.width;
                            height       = (int)fmt.height;
                        }
                    }
                    if (hasFormat)
                    {
                        Debug.Log($"WebcamSource automated mode selected resolution {width}x{height} for Android video capture device #{deviceId}.");
                    }
                }
            }
#endif

            // TODO - Fix codec selection (was as below before change)

            // Force again PreferredVideoCodec right before starting the local capture,
            // so that modifications to the property done after OnPeerInitialized() are
            // accounted for.
            //< FIXME
            //PeerConnection.Peer.PreferredVideoCodec = PreferredVideoCodec;

            // Check H.264 requests on Desktop (not supported)
            //#if !ENABLE_WINMD_SUPPORT
            //            if (PreferredVideoCodec == "H264")
            //            {
            //                Debug.LogError("H.264 encoding is not supported on Desktop platforms. Using VP8 instead.");
            //                PreferredVideoCodec = "VP8";
            //            }
            //#endif

            // Create the track
            var deviceConfig = new LocalVideoDeviceInitConfig
            {
                videoDevice                 = WebcamDevice,
                videoProfileId              = videoProfileId,
                videoProfileKind            = videoProfileKind,
                width                       = (width > 0 ? (uint?)width : null),
                height                      = (height > 0 ? (uint?)height : null),
                framerate                   = (framerate > 0 ? (double?)framerate : null),
                enableMrc                   = EnableMixedRealityCapture,
                enableMrcRecordingIndicator = EnableMRCRecordingIndicator
            };
            try
            {
                var source = await DeviceVideoTrackSource.CreateAsync(deviceConfig);

                AttachSource(source);
            }
            catch (Exception ex)
            {
                Debug.LogError($"Failed to create device track source for {nameof(WebcamSource)} component '{name}'.");
                Debug.LogException(ex, this);
                return;
            }
        }
Exemplo n.º 10
0
        protected async void OnEnable()
        {
            if (Source != null)
            {
                return;
            }

#if UNITY_WSA && !UNITY_EDITOR
            // Request UWP access to video capture. The OS may show some popup dialog to the
            // user to request permission. This will succeed only if the user grants permission.
            try
            {
                // Note that the UWP UI thread and the main Unity app thread are always different.
                // https://docs.unity3d.com/Manual/windowsstore-appcallbacks.html
                // We leave the code below as an example of generic handling in case this would be used in
                // some other place, and in case a future version of Unity decided to change that assumption,
                // but currently OnEnable() is always invoked from the main Unity app thread so here the first
                // branch is never taken.
                if (UnityEngine.WSA.Application.RunningOnUIThread())
                {
                    await RequestAccessAsync();
                }
                else
                {
                    UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true);
                }
            }
            catch (Exception ex)
            {
                // Log an error and prevent activation
                Debug.LogError($"Video access failure: {ex.Message}.");
                this.enabled = false;
                return;
            }
#endif

            string videoProfileId   = VideoProfileId;
            var    videoProfileKind = VideoProfileKind;
            int    width            = Constraints.width;
            int    height           = Constraints.height;
            double framerate        = Constraints.framerate;
#if ENABLE_WINMD_SUPPORT
            if (FormatMode == LocalVideoSourceFormatMode.Automatic)
            {
                // Do not constrain resolution by default, unless the device calls for it (see below).
                width  = 0; // auto
                height = 0; // auto

                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                framerate = 0; // auto

                // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth
                if (global::Windows.Graphics.Holographic.HolographicSpace.IsAvailable)
                {
                    if (!global::Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque)
                    {
                        if (global::Windows.ApplicationModel.Package.Current.Id.Architecture == global::Windows.System.ProcessorArchitecture.X86)
                        {
                            // Holographic AR (transparent) x86 platform - Assume HoloLens 1
                            videoProfileKind = WebRTC.VideoProfileKind.VideoRecording; // No profile in VideoConferencing
                            width            = 896;                                    // Target 896 x 504
                        }
                        else
                        {
                            // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2
                            videoProfileKind = WebRTC.VideoProfileKind.VideoConferencing;
                            width            = 960; // Target 960 x 540
                        }
                    }
                }
            }
#endif

            // TODO - Fix codec selection (was as below before change)

            // Force again PreferredVideoCodec right before starting the local capture,
            // so that modifications to the property done after OnPeerInitialized() are
            // accounted for.
            //< FIXME
            //PeerConnection.Peer.PreferredVideoCodec = PreferredVideoCodec;

            // Check H.264 requests on Desktop (not supported)
            //#if !ENABLE_WINMD_SUPPORT
            //            if (PreferredVideoCodec == "H264")
            //            {
            //                Debug.LogError("H.264 encoding is not supported on Desktop platforms. Using VP8 instead.");
            //                PreferredVideoCodec = "VP8";
            //            }
            //#endif

            //// Ensure the track has a valid name
            //string trackName = TrackName;
            //if (trackName.Length == 0)
            //{
            //    trackName = Guid.NewGuid().ToString();
            //    // Re-assign the generated track name for consistency
            //    TrackName = trackName;
            //}
            //SdpTokenAttribute.Validate(trackName, allowEmpty: false);

            // Create the track
            var deviceConfig = new LocalVideoDeviceInitConfig
            {
                videoDevice                 = WebcamDevice,
                videoProfileId              = videoProfileId,
                videoProfileKind            = videoProfileKind,
                width                       = (width > 0 ? (uint?)width : null),
                height                      = (height > 0 ? (uint?)height : null),
                framerate                   = (framerate > 0 ? (double?)framerate : null),
                enableMrc                   = EnableMixedRealityCapture,
                enableMrcRecordingIndicator = EnableMRCRecordingIndicator
            };
            Source = await DeviceVideoTrackSource.CreateAsync(deviceConfig);

            if (Source == null)
            {
                throw new Exception("Failed ot create webcam video source.");
            }

            IsStreaming = true;
            VideoStreamStarted.Invoke(this);
        }
Exemplo n.º 11
0
    async void OnClientConnected()
    {
        var pc = signaler.PeerConnection;

        // Record video from local webcam, and send to remote peer
        if (NeedVideo)
        {
            // For example, print them to the standard output

            var deviceSettings = new LocalVideoDeviceInitConfig
            {
                width  = VideoWidth,
                height = VideoHeight,
            };
            if (VideoFps > 0)
            {
                deviceSettings.framerate = VideoFps;
            }
            if (VideoProfileId.Length > 0)
            {
                deviceSettings.videoProfileId = VideoProfileId;
            }

            Debug.Log($"Attempt to grab Camera - {deviceSettings.videoProfileId}: {deviceSettings.width}x{deviceSettings.height}@{deviceSettings.framerate}fps");
            videoTrackSource = await DeviceVideoTrackSource.CreateAsync(deviceSettings);

            Debug.Log($"Create local video track... {videoTrackSource}");
            var trackSettings = new LocalVideoTrackInitConfig
            {
                trackName = "webcam_track"
            };
            localVideoTrack = LocalVideoTrack.CreateFromSource(videoTrackSource, trackSettings);

            Debug.Log("Create video transceiver and add webcam track...");
            videoTransceiver = pc.AddTransceiver(MediaKind.Video);
            videoTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
            videoTransceiver.LocalVideoTrack  = localVideoTrack;
        }

        // Record audio from local microphone, and send to remote peer
        if (NeedAudio)
        {
            Debug.Log("Opening local microphone...");
            audioTrackSource = await DeviceAudioTrackSource.CreateAsync();

            Debug.Log("Create local audio track...");
            var trackSettings = new LocalAudioTrackInitConfig {
                trackName = "mic_track"
            };
            localAudioTrack = LocalAudioTrack.CreateFromSource(audioTrackSource, trackSettings);

            Debug.Log("Create audio transceiver and add mic track...");
            audioTransceiver = pc.AddTransceiver(MediaKind.Audio);
            audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
            audioTransceiver.LocalAudioTrack  = localAudioTrack;
        }

        // Start peer connection
        int numFrames = 0;

        pc.VideoTrackAdded += (RemoteVideoTrack track) =>
        {
            Debug.Log($"Attach Frame Listener...");
            track.I420AVideoFrameReady += (I420AVideoFrame frame) =>
            {
                ++numFrames;
                if (numFrames % 60 == 0)
                {
                    Debug.Log($"Received video frames: {numFrames}");
                }
            };
        };
        // we need a short delay here for the video stream to settle...
        // I assume my Logitech webcam is sending some garbage frames in the beginning.
        await Task.Delay(200);

        pc.CreateOffer();
        Debug.Log("Send offer to remote peer");
    }
        // This method might be run outside the app thread and should not access the Unity API.
        private static async Task <WebRTC.VideoTrackSource> CreateSourceAsync(
            LocalVideoSourceFormatMode formatMode, LocalVideoDeviceInitConfig deviceConfig)
        {
            // Handle automatic capture format constraints
#if ENABLE_WINMD_SUPPORT
            if (formatMode == LocalVideoSourceFormatMode.Automatic)
            {
                // Do not constrain resolution by default, unless the device calls for it (see below).
                deviceConfig.width  = 0; // auto
                deviceConfig.height = 0; // auto

                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                deviceConfig.framerate = 0; // auto

                // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth
                if (global::Windows.Graphics.Holographic.HolographicSpace.IsAvailable)
                {
                    if (!global::Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque)
                    {
                        if (global::Windows.ApplicationModel.Package.Current.Id.Architecture == global::Windows.System.ProcessorArchitecture.X86)
                        {
                            // Holographic AR (transparent) x86 platform - Assume HoloLens 1
                            deviceConfig.videoProfileKind = WebRTC.VideoProfileKind.VideoRecording; // No profile in VideoConferencing
                            deviceConfig.width            = 896;                                    // Target 896 x 504
                        }
                        else
                        {
                            // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2
                            deviceConfig.videoProfileKind = WebRTC.VideoProfileKind.VideoConferencing;
                            deviceConfig.width            = 960; // Target 960 x 540
                        }
                    }
                }
            }
#elif !UNITY_EDITOR && UNITY_ANDROID
            if (formatMode == LocalVideoSourceFormatMode.Automatic)
            {
                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                deviceConfig.framerate = 0; // auto

                string deviceId = deviceConfig.videoDevice.id;
                if (string.IsNullOrEmpty(deviceId))
                {
                    // Continue the task outside the Unity app context, in order to avoid deadlock
                    // if OnDisable waits on this task.
                    IReadOnlyList <VideoCaptureDevice> listedDevices =
                        await PeerConnection.GetVideoCaptureDevicesAsync()
                        .ConfigureAwait(continueOnCapturedContext: false);

                    if (listedDevices.Count > 0)
                    {
                        deviceId = listedDevices[0].id;
                    }
                }
                if (!string.IsNullOrEmpty(deviceId))
                {
                    // Find the closest format to 720x480, independent of framerate
                    // Continue the task outside the Unity app context, in order to avoid deadlock
                    // if OnDisable waits on this task.
                    IReadOnlyList <VideoCaptureFormat> formats =
                        await DeviceVideoTrackSource.GetCaptureFormatsAsync(deviceId)
                        .ConfigureAwait(continueOnCapturedContext: false);

                    double smallestDiff = double.MaxValue;
                    bool   hasFormat    = false;
                    foreach (var fmt in formats)
                    {
                        double diff = Math.Abs(fmt.width - 720) + Math.Abs(fmt.height - 480);
                        if ((diff < smallestDiff) || !hasFormat)
                        {
                            hasFormat           = true;
                            smallestDiff        = diff;
                            deviceConfig.width  = fmt.width;
                            deviceConfig.height = fmt.height;
                        }
                    }
                    if (hasFormat)
                    {
                        Debug.Log($"WebcamSource automated mode selected resolution {deviceConfig.width}x{deviceConfig.height} for Android video capture device #{deviceId}.");
                    }
                }
            }
#endif

            // TODO - Fix codec selection (was as below before change)

            // Force again PreferredVideoCodec right before starting the local capture,
            // so that modifications to the property done after OnPeerInitialized() are
            // accounted for.
            //< FIXME
            //PeerConnection.Peer.PreferredVideoCodec = PreferredVideoCodec;

            // Check H.264 requests on Desktop (not supported)
            //#if !ENABLE_WINMD_SUPPORT
            //            if (PreferredVideoCodec == "H264")
            //            {
            //                Debug.LogError("H.264 encoding is not supported on Desktop platforms. Using VP8 instead.");
            //                PreferredVideoCodec = "VP8";
            //            }
            //#endif

            // Create the track
            var createTask = DeviceVideoTrackSource.CreateAsync(deviceConfig);

            // Continue the task outside the Unity app context, in order to avoid deadlock
            // if OnDisable waits on this task.
            return(await createTask.ConfigureAwait(continueOnCapturedContext : false));
        }