Esempio n. 1
0
        public WriteableBitmap ConvertToWritableBitmap(RawImage frame)
        {
            if (bitmap == null)
            {
                bitmap = frame.CreateWriteableBitmap();
            }

            frame.UpdateWriteableBitmap(bitmap);
            return(bitmap);
        }
        /// <summary>
        /// Convert a RawImage instance to a bitmap.
        /// </summary>
        /// <param name="raw">The RawImage instance to convert.</param>
        /// <returns>The raw image converted to a Bitmap instance.</returns>
        public static Bitmap ToBitmap(this RawImage raw)
        {
            // first create a writeable bitmap
            var wbm = raw.CreateWriteableBitmap();

            raw.UpdateWriteableBitmap(wbm);

            // write the writeablebitmap into a memory stream
            using (var stream = new MemoryStream())
            {
                var encoder = new BmpBitmapEncoder();
                encoder.Frames.Add(BitmapFrame.Create((BitmapSource)wbm));
                encoder.Save(stream);

                // construct a new bitmap from the memorystream data
                return(new Bitmap(stream) as Bitmap);
            }
        }
Esempio n. 3
0
        private void VisualizeFeatures(RawImage frame, Visualizer visualizer, List <Tuple <float, float> > landmarks, List <bool> visibilities, bool detection_succeeding,
                                       bool new_image, bool multi_face, float fx, float fy, float cx, float cy, double progress)
        {
            List <Tuple <Point, Point> > lines         = null;
            List <Tuple <float, float> > eye_landmarks = null;
            List <Tuple <Point, Point> > gaze_lines    = null;
            Tuple <float, float>         gaze_angle    = new Tuple <float, float>(0, 0);

            List <float> pose = new List <float>();

            landmark_detector.GetPose(pose, fx, fy, cx, cy);
            List <float> non_rigid_params = landmark_detector.GetNonRigidParams();

            double confidence = landmark_detector.GetConfidence();

            if (confidence < 0)
            {
                confidence = 0;
            }
            else if (confidence > 1)
            {
                confidence = 1;
            }

            double scale = landmark_detector.GetRigidParams()[0];

            // Helps with recording and showing the visualizations
            if (new_image)
            {
                visualizer.SetImage(frame, fx, fy, cx, cy);
            }
            visualizer.SetObservationHOG(face_analyser.GetLatestHOGFeature(), face_analyser.GetHOGRows(), face_analyser.GetHOGCols());
            visualizer.SetObservationLandmarks(landmarks, confidence, visibilities);
            visualizer.SetObservationPose(pose, confidence);
            visualizer.SetObservationGaze(gaze_analyser.GetGazeCamera().Item1, gaze_analyser.GetGazeCamera().Item2, landmark_detector.CalculateAllEyeLandmarks(), landmark_detector.CalculateAllEyeLandmarks3D(fx, fy, cx, cy), confidence);

            eye_landmarks = landmark_detector.CalculateVisibleEyeLandmarks();
            lines         = landmark_detector.CalculateBox(fx, fy, cx, cy);

            gaze_lines = gaze_analyser.CalculateGazeLines(fx, fy, cx, cy);
            gaze_angle = gaze_analyser.GetGazeAngle();

            // Visualisation (as a separate function)
            Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
            {
                if (ShowAUs)
                {
                    var au_classes = face_analyser.GetCurrentAUsClass();
                    var au_regs = face_analyser.GetCurrentAUsReg();

                    auClassGraph.Update(au_classes);

                    var au_regs_scaled = new Dictionary <String, double>();
                    foreach (var au_reg in au_regs)
                    {
                        au_regs_scaled[au_reg.Key] = au_reg.Value / 5.0;
                        if (au_regs_scaled[au_reg.Key] < 0)
                        {
                            au_regs_scaled[au_reg.Key] = 0;
                        }

                        if (au_regs_scaled[au_reg.Key] > 1)
                        {
                            au_regs_scaled[au_reg.Key] = 1;
                        }
                    }
                    auRegGraph.Update(au_regs_scaled);
                }

                if (ShowGeometry)
                {
                    int yaw = (int)(pose[4] * 180 / Math.PI + 0.5);
                    int roll = (int)(pose[5] * 180 / Math.PI + 0.5);
                    int pitch = (int)(pose[3] * 180 / Math.PI + 0.5);

                    YawLabel.Content = yaw + "°";
                    RollLabel.Content = roll + "°";
                    PitchLabel.Content = pitch + "°";

                    XPoseLabel.Content = (int)pose[0] + " mm";
                    YPoseLabel.Content = (int)pose[1] + " mm";
                    ZPoseLabel.Content = (int)pose[2] + " mm";

                    nonRigidGraph.Update(non_rigid_params);

                    // Update eye gaze
                    String x_angle = String.Format("{0:F0}°", gaze_angle.Item1 * (180.0 / Math.PI));
                    String y_angle = String.Format("{0:F0}°", gaze_angle.Item2 * (180.0 / Math.PI));
                    GazeXLabel.Content = x_angle;
                    GazeYLabel.Content = y_angle;
                }

                if (ShowTrackedVideo)
                {
                    if (new_image)
                    {
                        latest_img = frame.CreateWriteableBitmap();
                        overlay_image.Clear();
                    }

                    frame.UpdateWriteableBitmap(latest_img);

                    // Clear results from previous image
                    overlay_image.Source = latest_img;
                    overlay_image.Confidence.Add(confidence);
                    overlay_image.FPS = processing_fps.GetFPS();
                    overlay_image.Progress = progress;
                    overlay_image.FaceScale.Add(scale);

                    // Update results even if it is not succeeding when in multi-face mode
                    if (detection_succeeding || multi_face)
                    {
                        List <Point> landmark_points = new List <Point>();
                        foreach (var p in landmarks)
                        {
                            landmark_points.Add(new Point(p.Item1, p.Item2));
                        }

                        List <Point> eye_landmark_points = new List <Point>();
                        foreach (var p in eye_landmarks)
                        {
                            eye_landmark_points.Add(new Point(p.Item1, p.Item2));
                        }

                        overlay_image.OverlayLines.Add(lines);
                        overlay_image.OverlayPoints.Add(landmark_points);
                        overlay_image.OverlayPointsVisibility.Add(visibilities);
                        overlay_image.OverlayEyePoints.Add(eye_landmark_points);
                        overlay_image.GazeLines.Add(gaze_lines);
                    }
                }

                if (ShowAppearance)
                {
                    RawImage aligned_face = face_analyser.GetLatestAlignedFace();
                    RawImage hog_face = visualizer.GetHOGVis();

                    if (latest_aligned_face == null)
                    {
                        latest_aligned_face = aligned_face.CreateWriteableBitmap();
                        latest_HOG_descriptor = hog_face.CreateWriteableBitmap();
                    }

                    aligned_face.UpdateWriteableBitmap(latest_aligned_face);
                    hog_face.UpdateWriteableBitmap(latest_HOG_descriptor);

                    AlignedFace.Source = latest_aligned_face;
                    AlignedHOG.Source = latest_HOG_descriptor;
                }
            }));
        }
Esempio n. 4
0
        // The main function call for processing the webcam feed
        private void ProcessingLoop(SequenceReader reader)
        {
            thread_running = true;

            Thread.CurrentThread.IsBackground = true;

            DateTime?startTime = CurrentTime;

            var lastFrameTime = CurrentTime;

            landmark_detector.Reset();
            face_analyser.Reset();

            int frame_id = 0;

            double old_gaze_x = 0;
            double old_gaze_y = 0;

            double smile_cumm     = 0;
            double frown_cumm     = 0;
            double brow_up_cumm   = 0;
            double brow_down_cumm = 0;
            double widen_cumm     = 0;
            double wrinkle_cumm   = 0;

            while (thread_running)
            {
                // Loading an image file
                RawImage frame      = new RawImage(reader.GetNextImage());
                RawImage gray_frame = new RawImage(reader.GetCurrentFrameGray());

                lastFrameTime = CurrentTime;
                processing_fps.AddFrame();

                bool detection_succeeding = landmark_detector.DetectLandmarksInVideo(gray_frame, face_model_params);

                // The face analysis step (only done if recording AUs, HOGs or video)
                face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detection_succeeding, true);
                gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());

                double confidence = landmark_detector.GetConfidence();

                if (confidence < 0)
                {
                    confidence = 0;
                }
                else if (confidence > 1)
                {
                    confidence = 1;
                }

                List <double> pose = new List <double>();

                landmark_detector.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());

                List <double> non_rigid_params = landmark_detector.GetNonRigidParams();
                double        scale            = landmark_detector.GetRigidParams()[0];

                double time_stamp = (DateTime.Now - (DateTime)startTime).TotalMilliseconds;


                List <Tuple <Point, Point> >   lines         = null;
                List <Tuple <double, double> > landmarks     = null;
                List <Tuple <double, double> > eye_landmarks = null;
                List <Tuple <Point, Point> >   gaze_lines    = null;
                Tuple <double, double>         gaze_angle    = gaze_analyser.GetGazeAngle();

                if (detection_succeeding)
                {
                    landmarks     = landmark_detector.CalculateVisibleLandmarks();
                    eye_landmarks = landmark_detector.CalculateVisibleEyeLandmarks();
                    lines         = landmark_detector.CalculateBox(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
                    gaze_lines    = gaze_analyser.CalculateGazeLines(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
                }

                // Visualisation
                Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
                {
                    var au_regs = face_analyser.GetCurrentAUsReg();
                    if (au_regs.Count > 0)
                    {
                        double smile = (au_regs["AU12"] + au_regs["AU06"] + au_regs["AU25"]) / 13.0;
                        double frown = (au_regs["AU15"] + au_regs["AU17"]) / 12.0;

                        double brow_up = (au_regs["AU01"] + au_regs["AU02"]) / 10.0;
                        double brow_down = au_regs["AU04"] / 5.0;

                        double eye_widen = au_regs["AU05"] / 3.0;
                        double nose_wrinkle = au_regs["AU09"] / 4.0;

                        Dictionary <int, double> smileDict = new Dictionary <int, double>();
                        smileDict[0] = 0.7 * smile_cumm + 0.3 * smile;
                        smileDict[1] = 0.7 * frown_cumm + 0.3 * frown;
                        smilePlot.AddDataPoint(new DataPointGraph()
                        {
                            Time = CurrentTime, values = smileDict, Confidence = confidence
                        });

                        Dictionary <int, double> browDict = new Dictionary <int, double>();
                        browDict[0] = 0.7 * brow_up_cumm + 0.3 * brow_up;
                        browDict[1] = 0.7 * brow_down_cumm + 0.3 * brow_down;
                        browPlot.AddDataPoint(new DataPointGraph()
                        {
                            Time = CurrentTime, values = browDict, Confidence = confidence
                        });

                        Dictionary <int, double> eyeDict = new Dictionary <int, double>();
                        eyeDict[0] = 0.7 * widen_cumm + 0.3 * eye_widen;
                        eyeDict[1] = 0.7 * wrinkle_cumm + 0.3 * nose_wrinkle;
                        eyePlot.AddDataPoint(new DataPointGraph()
                        {
                            Time = CurrentTime, values = eyeDict, Confidence = confidence
                        });

                        smile_cumm = smileDict[0];
                        frown_cumm = smileDict[1];
                        brow_up_cumm = browDict[0];
                        brow_down_cumm = browDict[1];
                        widen_cumm = eyeDict[0];
                        wrinkle_cumm = eyeDict[1];
                    }
                    else
                    {
                        // If no AUs present disable the AU visualization
                        MainGrid.ColumnDefinitions[2].Width = new GridLength(0);
                        eyePlot.Visibility = Visibility.Collapsed;
                        browPlot.Visibility = Visibility.Collapsed;
                        smilePlot.Visibility = Visibility.Collapsed;
                    }

                    Dictionary <int, double> poseDict = new Dictionary <int, double>();
                    poseDict[0] = -pose[3];
                    poseDict[1] = pose[4];
                    poseDict[2] = pose[5];
                    headPosePlot.AddDataPoint(new DataPointGraph()
                    {
                        Time = CurrentTime, values = poseDict, Confidence = confidence
                    });

                    Dictionary <int, double> gazeDict = new Dictionary <int, double>();
                    gazeDict[0] = gaze_angle.Item1 * (180.0 / Math.PI);
                    gazeDict[0] = 0.5 * old_gaze_x + 0.5 * gazeDict[0];
                    gazeDict[1] = -gaze_angle.Item2 * (180.0 / Math.PI);
                    gazeDict[1] = 0.5 * old_gaze_y + 0.5 * gazeDict[1];
                    gazePlot.AddDataPoint(new DataPointGraph()
                    {
                        Time = CurrentTime, values = gazeDict, Confidence = confidence
                    });

                    old_gaze_x = gazeDict[0];
                    old_gaze_y = gazeDict[1];

                    if (latest_img == null)
                    {
                        latest_img = frame.CreateWriteableBitmap();
                    }

                    frame.UpdateWriteableBitmap(latest_img);

                    video.Source = latest_img;
                    video.Confidence = confidence;
                    video.FPS = processing_fps.GetFPS();

                    if (!detection_succeeding)
                    {
                        video.OverlayLines.Clear();
                        video.OverlayPoints.Clear();
                        video.OverlayEyePoints.Clear();
                        video.GazeLines.Clear();
                    }
                    else
                    {
                        video.OverlayLines = lines;

                        List <Point> landmark_points = new List <Point>();
                        foreach (var p in landmarks)
                        {
                            landmark_points.Add(new Point(p.Item1, p.Item2));
                        }

                        List <Point> eye_landmark_points = new List <Point>();
                        foreach (var p in eye_landmarks)
                        {
                            eye_landmark_points.Add(new Point(p.Item1, p.Item2));
                        }


                        video.OverlayPoints = landmark_points;
                        video.OverlayEyePoints = eye_landmark_points;
                        video.GazeLines = gaze_lines;
                    }
                }));

                if (reset)
                {
                    if (resetPoint.HasValue)
                    {
                        landmark_detector.Reset(resetPoint.Value.X, resetPoint.Value.Y);
                        resetPoint = null;
                    }
                    else
                    {
                        landmark_detector.Reset();
                    }

                    face_analyser.Reset();
                    reset = false;

                    Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
                    {
                        headPosePlot.ClearDataPoints();
                        headPosePlot.ClearDataPoints();
                        gazePlot.ClearDataPoints();
                        smilePlot.ClearDataPoints();
                        browPlot.ClearDataPoints();
                        eyePlot.ClearDataPoints();
                    }));
                }

                frame_id++;
            }
            reader.Close();
            latest_img = null;
        }
Esempio n. 5
0
        // Capturing and processing the video frame by frame
        private void VideoLoop(UtilitiesOF.SequenceReader reader)
        {
            Thread.CurrentThread.IsBackground = true;

            String root = AppDomain.CurrentDomain.BaseDirectory;
            FaceModelParameters model_params = new FaceModelParameters(root, true, false, false);

            // Initialize the face detector
            FaceDetector face_detector = new FaceDetector(model_params.GetHaarLocation(), model_params.GetMTCNNLocation());

            // If MTCNN model not available, use HOG
            if (!face_detector.IsMTCNNLoaded())
            {
                model_params.SetFaceDetector(false, true, false);
            }

            CLNF face_model = new CLNF(model_params);
            GazeAnalyserManaged gaze_analyser = new GazeAnalyserManaged();

            DateTime?startTime = CurrentTime;

            var lastFrameTime = CurrentTime;

            while (running)
            {
                //////////////////////////////////////////////
                // CAPTURE FRAME AND DETECT LANDMARKS FOLLOWED BY THE REQUIRED IMAGE PROCESSING
                //////////////////////////////////////////////

                RawImage frame = reader.GetNextImage();

                lastFrameTime = CurrentTime;
                processing_fps.AddFrame();

                var grayFrame = reader.GetCurrentFrameGray();

                if (mirror_image)
                {
                    frame.Mirror();
                    grayFrame.Mirror();
                }

                bool detectionSucceeding = ProcessFrame(face_model, gaze_analyser, model_params, frame, grayFrame, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());

                lock (recording_lock)
                {
                    if (recording)
                    {
                        // Add objects to recording queues
                        List <float> pose = new List <float>();
                        face_model.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
                        RawImage image = new RawImage(frame);
                        recording_objects.Enqueue(new Tuple <RawImage, bool, List <float> >(image, detectionSucceeding, pose));
                    }
                }

                List <Tuple <System.Windows.Point, System.Windows.Point> > lines = null;
                List <Tuple <float, float> > eye_landmarks = null;
                List <System.Windows.Point>  landmarks     = new List <System.Windows.Point>();
                List <Tuple <System.Windows.Point, System.Windows.Point> > gaze_lines = null;
                Tuple <float, float> gaze_angle = new Tuple <float, float>(0, 0);
                var    visibilities             = face_model.GetVisibilities();
                double scale = face_model.GetRigidParams()[0];

                if (detectionSucceeding)
                {
                    List <Tuple <float, float> > landmarks_doubles = face_model.CalculateAllLandmarks();

                    foreach (var p in landmarks_doubles)
                    {
                        landmarks.Add(new System.Windows.Point(p.Item1, p.Item2));
                    }

                    eye_landmarks = face_model.CalculateVisibleEyeLandmarks();

                    gaze_lines = gaze_analyser.CalculateGazeLines(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
                    gaze_angle = gaze_analyser.GetGazeAngle();

                    lines = face_model.CalculateBox(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
                }

                if (reset)
                {
                    face_model.Reset();
                    reset = false;
                }

                // Visualisation updating
                try
                {
                    Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
                    {
                        if (latest_img == null)
                        {
                            latest_img = frame.CreateWriteableBitmap();
                        }

                        List <float> pose = new List <float>();
                        face_model.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());

                        int yaw = (int)(pose[4] * 180 / Math.PI + 0.5);
                        int yaw_abs = Math.Abs(yaw);

                        int roll = (int)(pose[5] * 180 / Math.PI + 0.5);
                        int roll_abs = Math.Abs(roll);

                        int pitch = (int)(pose[3] * 180 / Math.PI + 0.5);
                        int pitch_abs = Math.Abs(pitch);

                        YawLabel.Content = yaw_abs + "°";
                        RollLabel.Content = roll_abs + "°";
                        PitchLabel.Content = pitch_abs + "°";

                        if (yaw > 0)
                        {
                            YawLabelDir.Content = "Right";
                        }
                        else if (yaw < 0)
                        {
                            YawLabelDir.Content = "Left";
                        }
                        else
                        {
                            YawLabelDir.Content = "Straight";
                        }

                        if (pitch > 0)
                        {
                            PitchLabelDir.Content = "Down";
                        }
                        else if (pitch < 0)
                        {
                            PitchLabelDir.Content = "Up";
                        }
                        else
                        {
                            PitchLabelDir.Content = "Straight";
                        }

                        if (roll > 0)
                        {
                            RollLabelDir.Content = "Left";
                        }
                        else if (roll < 0)
                        {
                            RollLabelDir.Content = "Right";
                        }
                        else
                        {
                            RollLabelDir.Content = "Straight";
                        }

                        XPoseLabel.Content = (int)pose[0] + " mm";
                        YPoseLabel.Content = (int)pose[1] + " mm";
                        ZPoseLabel.Content = (int)pose[2] + " mm";

                        String x_angle = String.Format("{0:F0}°", gaze_angle.Item1 * (180.0 / Math.PI));
                        String y_angle = String.Format("{0:F0}°", gaze_angle.Item2 * (180.0 / Math.PI));
                        YawLabelGaze.Content = x_angle;
                        PitchLabelGaze.Content = y_angle;

                        if (gaze_angle.Item1 > 0)
                        {
                            YawLabelGazeDir.Content = "Right";
                        }
                        else if (gaze_angle.Item1 < 0)
                        {
                            YawLabelGazeDir.Content = "Left";
                        }
                        else
                        {
                            YawLabelGazeDir.Content = "Straight";
                        }

                        if (gaze_angle.Item2 > 0)
                        {
                            PitchLabelGazeDir.Content = "Down";
                        }
                        else if (gaze_angle.Item2 < 0)
                        {
                            PitchLabelGazeDir.Content = "Up";
                        }
                        else
                        {
                            PitchLabelGazeDir.Content = "Straight";
                        }

                        double confidence = face_model.GetConfidence();

                        if (confidence < 0)
                        {
                            confidence = 0;
                        }
                        else if (confidence > 1)
                        {
                            confidence = 1;
                        }

                        frame.UpdateWriteableBitmap(latest_img);
                        webcam_img.Clear();

                        webcam_img.Source = latest_img;
                        webcam_img.Confidence.Add(confidence);
                        webcam_img.FPS = processing_fps.GetFPS();
                        if (detectionSucceeding)
                        {
                            webcam_img.OverlayLines.Add(lines);
                            webcam_img.OverlayPoints.Add(landmarks);
                            webcam_img.OverlayPointsVisibility.Add(visibilities);
                            webcam_img.FaceScale.Add(scale);

                            List <System.Windows.Point> eye_landmark_points = new List <System.Windows.Point>();
                            foreach (var p in eye_landmarks)
                            {
                                eye_landmark_points.Add(new System.Windows.Point(p.Item1, p.Item2));
                            }


                            webcam_img.OverlayEyePoints.Add(eye_landmark_points);
                            webcam_img.GazeLines.Add(gaze_lines);

                            // Publish the information for other applications
                            String str_head_pose = String.Format("{0}:{1:F2}, {2:F2}, {3:F2}, {4:F2}, {5:F2}, {6:F2}", "HeadPose", pose[0], pose[1], pose[2],
                                                                 pose[3] * 180 / Math.PI, pose[4] * 180 / Math.PI, pose[5] * 180 / Math.PI);

                            zero_mq_socket.Send(new ZFrame(str_head_pose, Encoding.UTF8));

                            String str_gaze = String.Format("{0}:{1:F2}, {2:F2}", "GazeAngle", gaze_angle.Item1 * (180.0 / Math.PI), gaze_angle.Item2 * (180.0 / Math.PI));

                            zero_mq_socket.Send(new ZFrame(str_gaze, Encoding.UTF8));
                        }
                    }));

                    while (running & pause)
                    {
                        Thread.Sleep(10);
                    }
                }
                catch (TaskCanceledException)
                {
                    // Quitting
                    break;
                }
            }
            reader.Close();
            System.Console.Out.WriteLine("Thread finished");
        }
Esempio n. 6
0
        private void CaptureLoop()
        {
            Thread.CurrentThread.IsBackground = true;

            var lastFrameTime = CurrentTime;

            while (true)
            {
                //////////////////////////////////////////////
                // CAPTURE FRAME AND DETECT LANDMARKS
                //////////////////////////////////////////////

                if (fpsLimit > 0)
                {
                    while (CurrentTime < lastFrameTime + TimeSpan.FromSeconds(1 / fpsLimit))
                    {
                        Thread.Sleep(1);
                    }
                }


                RawImage frame = null;
                try
                {
                    frame = capture.GetNextFrame();
                }
                catch (PsycheInterop.CaptureFailedException)
                {
                    if (videoFile != null)
                    {
                        capture  = new Capture(videoFile);
                        fpsLimit = capture.GetFPS();

                        new Thread(CaptureLoop).Start();
                    }
                    break;
                }
                lastFrameTime = CurrentTime;
                videoFps.AddFrame();

                var grayFrame = capture.GetCurrentFrameGray();

                if (grayFrame == null)
                {
                    continue;
                }

                frameQueue.TryAdd(new Tuple <RawImage, RawImage>(frame, grayFrame));

                try
                {
                    Dispatcher.Invoke(() =>
                    {
                        if (latestImg == null)
                        {
                            latestImg = frame.CreateWriteableBitmap();
                        }

                        fpsLabel.Content = "Video: " + videoFps.GetFPS().ToString("0") + " FPS | Tracking: " + trackingFps.GetFPS().ToString("0") + " FPS";

                        if (!detectionSucceeding)
                        {
                            frame.UpdateWriteableBitmap(latestImg);

                            video.OverlayLines.Clear();
                            video.OverlayPoints.Clear();

                            video.Source = latestImg;
                        }
                    });
                }
                catch (TaskCanceledException)
                {
                    // Quitting
                    break;
                }
            }
        }