Esempio n. 1
0
        /// <summary>
        /// Callback when the Unity component is disabled. This is the proper way to disable the
        /// video source and get it to stop video capture.
        /// </summary>
        protected void OnDisable()
        {
            var nativePeer = PeerConnection.Peer;

            if ((nativePeer != null) && nativePeer.Initialized)
            {
                VideoStreamStopped.Invoke();
                nativePeer.I420LocalVideoFrameReady -= I420LocalVideoFrameReady;
                nativePeer.RemoveLocalVideoTrack();
                FrameQueue.Clear();
            }
        }
        private void AddLocalVideoTrackImpl(WebRTC.PeerConnection nativePeer)
        {
            string videoProfileId   = VideoProfileId;
            var    videoProfileKind = VideoProfileKind;
            int    width            = Constraints.width;
            int    height           = Constraints.height;
            double framerate        = Constraints.framerate;

#if ENABLE_WINMD_SUPPORT
            if (Mode == LocalVideoSourceFormatMode.Automatic)
            {
                // Do not constrain resolution by default, unless the device calls for it (see below).
                width  = 0; // auto
                height = 0; // auto

                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                framerate = 0; // auto

                // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth
                if (Windows.Graphics.Holographic.HolographicSpace.IsAvailable)
                {
                    if (!Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque)
                    {
                        if (Windows.ApplicationModel.Package.Current.Id.Architecture == Windows.System.ProcessorArchitecture.X86)
                        {
                            // Holographic AR (transparent) x86 platform - Assume HoloLens 1
                            videoProfileKind = WebRTC.PeerConnection.VideoProfileKind.VideoRecording; // No profile in VideoConferencing
                            width            = 896;                                                   // Target 896 x 504
                        }
                        else
                        {
                            // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2
                            videoProfileKind = WebRTC.PeerConnection.VideoProfileKind.VideoConferencing;
                            width            = 1280; // Target 1280 x 720
                        }
                    }
                }
            }
#endif
            // Force again PreferredVideoCodec right before starting the local capture,
            // so that modifications to the property done after OnPeerInitialized() are
            // accounted for.
            nativePeer.PreferredVideoCodec = PreferredVideoCodec;

            FrameQueue.Clear();
            var trackSettings = new WebRTC.PeerConnection.LocalVideoTrackSettings
            {
                videoDevice                 = default,
Esempio n. 3
0
        //private Rect BoundingBoxAroundEyes(EyePoints leftEye, EyePoints rightEye, int buffer = 0)
        //{
        //    // get upper left corner
        //    int upperLeftX = leftEye.Points[0].X - buffer;
        //    List<OpenCvSharp.Point> lst = new List<OpenCvSharp.Point>();
        //    lst.Add(leftEye.Points[1]);
        //    lst.Add(leftEye.Points[2]);
        //    lst.Add(rightEye.Points[1]);
        //    lst.Add(rightEye.Points[2]);
        //    int upperLeftY = lst.Min(n => n.Y) - buffer;

        //    int lowerRightX = rightEye.Points[3].X + buffer;
        //    lst.Clear();
        //    lst.Add(leftEye.Points[4]);
        //    lst.Add(leftEye.Points[5]);
        //    lst.Add(rightEye.Points[4]);
        //    lst.Add(rightEye.Points[5]);

        //    int lowerRightY = lst.Max(n => n.Y) + buffer;

        //    Rect boundingBox = new Rect(upperLeftX, upperLeftY, lowerRightX - upperLeftX, lowerRightY - upperLeftY);
        //    return boundingBox;
        //}

        private void WebCamBackgroundWorker_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
        {
            if (e.Cancelled || e.Error != null)
            {
                FrameQueue.Clear();
                //cap.Release();
                IsRunning = false;
            }
            else
            {
                //if (cap != null)
                //{
                //    cap.Release();
                //}
            }
        }
Esempio n. 4
0
        private async void DoAutoStartActions(WebRTC.PeerConnection nativePeer)
        {
            if (AutoStartCapture)
            {
                nativePeer.I420LocalVideoFrameReady += I420LocalVideoFrameReady;

                // TODO - Currently AddLocalVideoTrackAsync() both open the capture device AND add a video track
            }

            if (AutoAddTrack)
            {
                // Force again PreferredVideoCodec right before starting the local capture,
                // so that modifications to the property done after OnPeerInitialized() are
                // accounted for.
                nativePeer.PreferredVideoCodec = PreferredVideoCodec;

                FrameQueue.Clear();
                await nativePeer.AddLocalVideoTrackAsync(default, EnableMixedRealityCapture);