// Capturing and processing the video frame by frame private void VideoLoop(UtilitiesOF.SequenceReader reader) { Thread.CurrentThread.IsBackground = true; String root = AppDomain.CurrentDomain.BaseDirectory; FaceModelParameters model_params = new FaceModelParameters(root, true, false, false); // Initialize the face detector FaceDetector face_detector = new FaceDetector(model_params.GetHaarLocation(), model_params.GetMTCNNLocation()); // If MTCNN model not available, use HOG if (!face_detector.IsMTCNNLoaded()) { model_params.SetFaceDetector(false, true, false); } CLNF face_model = new CLNF(model_params); GazeAnalyserManaged gaze_analyser = new GazeAnalyserManaged(); DateTime?startTime = CurrentTime; var lastFrameTime = CurrentTime; while (running) { ////////////////////////////////////////////// // CAPTURE FRAME AND DETECT LANDMARKS FOLLOWED BY THE REQUIRED IMAGE PROCESSING ////////////////////////////////////////////// RawImage frame = reader.GetNextImage(); lastFrameTime = CurrentTime; processing_fps.AddFrame(); var grayFrame = reader.GetCurrentFrameGray(); if (mirror_image) { frame.Mirror(); grayFrame.Mirror(); } bool detectionSucceeding = ProcessFrame(face_model, gaze_analyser, model_params, frame, grayFrame, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); lock (recording_lock) { if (recording) { // Add objects to recording queues List <float> pose = new List <float>(); face_model.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); RawImage image = new RawImage(frame); recording_objects.Enqueue(new Tuple <RawImage, bool, List <float> >(image, detectionSucceeding, pose)); } } List <Tuple <System.Windows.Point, System.Windows.Point> > lines = null; List <Tuple <float, float> > eye_landmarks = null; List <System.Windows.Point> landmarks = new List <System.Windows.Point>(); List <Tuple <System.Windows.Point, System.Windows.Point> > gaze_lines = null; Tuple <float, float> gaze_angle = new Tuple <float, float>(0, 0); var visibilities = face_model.GetVisibilities(); double scale = face_model.GetRigidParams()[0]; if (detectionSucceeding) { List <Tuple <float, float> > landmarks_doubles = face_model.CalculateAllLandmarks(); foreach (var p in landmarks_doubles) { landmarks.Add(new System.Windows.Point(p.Item1, p.Item2)); } eye_landmarks = face_model.CalculateVisibleEyeLandmarks(); gaze_lines = gaze_analyser.CalculateGazeLines(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); gaze_angle = gaze_analyser.GetGazeAngle(); lines = face_model.CalculateBox(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); } if (reset) { face_model.Reset(); reset = false; } // Visualisation updating try { Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() => { if (latest_img == null) { latest_img = frame.CreateWriteableBitmap(); } List <float> pose = new List <float>(); face_model.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); int yaw = (int)(pose[4] * 180 / Math.PI + 0.5); int yaw_abs = Math.Abs(yaw); int roll = (int)(pose[5] * 180 / Math.PI + 0.5); int roll_abs = Math.Abs(roll); int pitch = (int)(pose[3] * 180 / Math.PI + 0.5); int pitch_abs = Math.Abs(pitch); YawLabel.Content = yaw_abs + "°"; RollLabel.Content = roll_abs + "°"; PitchLabel.Content = pitch_abs + "°"; if (yaw > 0) { YawLabelDir.Content = "Right"; } else if (yaw < 0) { YawLabelDir.Content = "Left"; } else { YawLabelDir.Content = "Straight"; } if (pitch > 0) { PitchLabelDir.Content = "Down"; } else if (pitch < 0) { PitchLabelDir.Content = "Up"; } else { PitchLabelDir.Content = "Straight"; } if (roll > 0) { RollLabelDir.Content = "Left"; } else if (roll < 0) { RollLabelDir.Content = "Right"; } else { RollLabelDir.Content = "Straight"; } XPoseLabel.Content = (int)pose[0] + " mm"; YPoseLabel.Content = (int)pose[1] + " mm"; ZPoseLabel.Content = (int)pose[2] + " mm"; String x_angle = String.Format("{0:F0}°", gaze_angle.Item1 * (180.0 / Math.PI)); String y_angle = String.Format("{0:F0}°", gaze_angle.Item2 * (180.0 / Math.PI)); YawLabelGaze.Content = x_angle; PitchLabelGaze.Content = y_angle; if (gaze_angle.Item1 > 0) { YawLabelGazeDir.Content = "Right"; } else if (gaze_angle.Item1 < 0) { YawLabelGazeDir.Content = "Left"; } else { YawLabelGazeDir.Content = "Straight"; } if (gaze_angle.Item2 > 0) { PitchLabelGazeDir.Content = "Down"; } else if (gaze_angle.Item2 < 0) { PitchLabelGazeDir.Content = "Up"; } else { PitchLabelGazeDir.Content = "Straight"; } double confidence = face_model.GetConfidence(); if (confidence < 0) { confidence = 0; } else if (confidence > 1) { confidence = 1; } frame.UpdateWriteableBitmap(latest_img); webcam_img.Clear(); webcam_img.Source = latest_img; webcam_img.Confidence.Add(confidence); webcam_img.FPS = processing_fps.GetFPS(); if (detectionSucceeding) { webcam_img.OverlayLines.Add(lines); webcam_img.OverlayPoints.Add(landmarks); webcam_img.OverlayPointsVisibility.Add(visibilities); webcam_img.FaceScale.Add(scale); List <System.Windows.Point> eye_landmark_points = new List <System.Windows.Point>(); foreach (var p in eye_landmarks) { eye_landmark_points.Add(new System.Windows.Point(p.Item1, p.Item2)); } webcam_img.OverlayEyePoints.Add(eye_landmark_points); webcam_img.GazeLines.Add(gaze_lines); // Publish the information for other applications String str_head_pose = String.Format("{0}:{1:F2}, {2:F2}, {3:F2}, {4:F2}, {5:F2}, {6:F2}", "HeadPose", pose[0], pose[1], pose[2], pose[3] * 180 / Math.PI, pose[4] * 180 / Math.PI, pose[5] * 180 / Math.PI); zero_mq_socket.Send(new ZFrame(str_head_pose, Encoding.UTF8)); String str_gaze = String.Format("{0}:{1:F2}, {2:F2}", "GazeAngle", gaze_angle.Item1 * (180.0 / Math.PI), gaze_angle.Item2 * (180.0 / Math.PI)); zero_mq_socket.Send(new ZFrame(str_gaze, Encoding.UTF8)); } })); while (running & pause) { Thread.Sleep(10); } } catch (TaskCanceledException) { // Quitting break; } } reader.Close(); System.Console.Out.WriteLine("Thread finished"); }