public MainWindow() { InitializeComponent(); this.DataContext = this; // For WPF data binding // Set the icon Uri iconUri = new Uri("logo1.ico", UriKind.RelativeOrAbsolute); this.Icon = BitmapFrame.Create(iconUri); String root = AppDomain.CurrentDomain.BaseDirectory; face_model_params = new FaceModelParameters(root, LandmarkDetectorCECLM, LandmarkDetectorCLNF, LandmarkDetectorCLM); // Initialize the face detector face_detector = new FaceDetector(face_model_params.GetHaarLocation(), face_model_params.GetMTCNNLocation()); // If MTCNN model not available, use HOG if (!face_detector.IsMTCNNLoaded()) { FaceDetCNN.IsEnabled = false; DetectorCNN = false; DetectorHOG = true; } face_model_params.SetFaceDetector(DetectorHaar, DetectorHOG, DetectorCNN); landmark_detector = new CLNF(face_model_params); gaze_analyser = new GazeAnalyserManaged(); }
private void Initialize(object sender, PipelineRunEventArgs e) { string rootDirectory = AppDomain.CurrentDomain.BaseDirectory; faceModelParameters = new FaceModelParameters(rootDirectory, true, false, false); faceModelParameters.optimiseForVideo(); faceDetector = new FaceDetector(faceModelParameters.GetHaarLocation(), faceModelParameters.GetMTCNNLocation()); if (!faceDetector.IsMTCNNLoaded()) { faceModelParameters.SetFaceDetector(false, true, false); } landmarkDetector = new CLNF(faceModelParameters); faceAnalyser = new FaceAnalyser(rootDirectory, dynamic: true, output_width: 112, mask_aligned: true); gazeAnalyser = new GazeAnalyser(); landmarkDetector.Reset(); faceAnalyser.Reset(); }
protected ExtractorBase(FaceModelParameters faceModelParameters) { if (_initialized) { return; } ModelParams = faceModelParameters; GazeAnalyzer = new GazeAnalyserManaged(); var face_detector = new FaceDetector(ModelParams.GetHaarLocation(), ModelParams.GetMTCNNLocation()); if (!face_detector.IsMTCNNLoaded()) // If MTCNN model not available, use HOG { ModelParams.SetFaceDetector(false, true, false); } FaceModel = new CLNF(ModelParams); _initialized = true; }
// Capturing and processing the video frame by frame private void VideoLoop(UtilitiesOF.SequenceReader reader) { Thread.CurrentThread.IsBackground = true; String root = AppDomain.CurrentDomain.BaseDirectory; FaceModelParameters model_params = new FaceModelParameters(root, true, false, false); // Initialize the face detector FaceDetector face_detector = new FaceDetector(model_params.GetHaarLocation(), model_params.GetMTCNNLocation()); // If MTCNN model not available, use HOG if (!face_detector.IsMTCNNLoaded()) { model_params.SetFaceDetector(false, true, false); } CLNF face_model = new CLNF(model_params); GazeAnalyserManaged gaze_analyser = new GazeAnalyserManaged(); DateTime?startTime = CurrentTime; var lastFrameTime = CurrentTime; while (running) { ////////////////////////////////////////////// // CAPTURE FRAME AND DETECT LANDMARKS FOLLOWED BY THE REQUIRED IMAGE PROCESSING ////////////////////////////////////////////// RawImage frame = reader.GetNextImage(); lastFrameTime = CurrentTime; processing_fps.AddFrame(); var grayFrame = reader.GetCurrentFrameGray(); if (mirror_image) { frame.Mirror(); grayFrame.Mirror(); } bool detectionSucceeding = ProcessFrame(face_model, gaze_analyser, model_params, frame, grayFrame, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); lock (recording_lock) { if (recording) { // Add objects to recording queues List <float> pose = new List <float>(); face_model.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); RawImage image = new RawImage(frame); recording_objects.Enqueue(new Tuple <RawImage, bool, List <float> >(image, detectionSucceeding, pose)); } } List <Tuple <System.Windows.Point, System.Windows.Point> > lines = null; List <Tuple <float, float> > eye_landmarks = null; List <System.Windows.Point> landmarks = new List <System.Windows.Point>(); List <Tuple <System.Windows.Point, System.Windows.Point> > gaze_lines = null; Tuple <float, float> gaze_angle = new Tuple <float, float>(0, 0); var visibilities = face_model.GetVisibilities(); double scale = face_model.GetRigidParams()[0]; if (detectionSucceeding) { List <Tuple <float, float> > landmarks_doubles = face_model.CalculateAllLandmarks(); foreach (var p in landmarks_doubles) { landmarks.Add(new System.Windows.Point(p.Item1, p.Item2)); } eye_landmarks = face_model.CalculateVisibleEyeLandmarks(); gaze_lines = gaze_analyser.CalculateGazeLines(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); gaze_angle = gaze_analyser.GetGazeAngle(); lines = face_model.CalculateBox(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); } if (reset) { face_model.Reset(); reset = false; } // Visualisation updating try { Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() => { if (latest_img == null) { latest_img = frame.CreateWriteableBitmap(); } List <float> pose = new List <float>(); face_model.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); int yaw = (int)(pose[4] * 180 / Math.PI + 0.5); int yaw_abs = Math.Abs(yaw); int roll = (int)(pose[5] * 180 / Math.PI + 0.5); int roll_abs = Math.Abs(roll); int pitch = (int)(pose[3] * 180 / Math.PI + 0.5); int pitch_abs = Math.Abs(pitch); YawLabel.Content = yaw_abs + "°"; RollLabel.Content = roll_abs + "°"; PitchLabel.Content = pitch_abs + "°"; if (yaw > 0) { YawLabelDir.Content = "Right"; } else if (yaw < 0) { YawLabelDir.Content = "Left"; } else { YawLabelDir.Content = "Straight"; } if (pitch > 0) { PitchLabelDir.Content = "Down"; } else if (pitch < 0) { PitchLabelDir.Content = "Up"; } else { PitchLabelDir.Content = "Straight"; } if (roll > 0) { RollLabelDir.Content = "Left"; } else if (roll < 0) { RollLabelDir.Content = "Right"; } else { RollLabelDir.Content = "Straight"; } XPoseLabel.Content = (int)pose[0] + " mm"; YPoseLabel.Content = (int)pose[1] + " mm"; ZPoseLabel.Content = (int)pose[2] + " mm"; String x_angle = String.Format("{0:F0}°", gaze_angle.Item1 * (180.0 / Math.PI)); String y_angle = String.Format("{0:F0}°", gaze_angle.Item2 * (180.0 / Math.PI)); YawLabelGaze.Content = x_angle; PitchLabelGaze.Content = y_angle; if (gaze_angle.Item1 > 0) { YawLabelGazeDir.Content = "Right"; } else if (gaze_angle.Item1 < 0) { YawLabelGazeDir.Content = "Left"; } else { YawLabelGazeDir.Content = "Straight"; } if (gaze_angle.Item2 > 0) { PitchLabelGazeDir.Content = "Down"; } else if (gaze_angle.Item2 < 0) { PitchLabelGazeDir.Content = "Up"; } else { PitchLabelGazeDir.Content = "Straight"; } double confidence = face_model.GetConfidence(); if (confidence < 0) { confidence = 0; } else if (confidence > 1) { confidence = 1; } frame.UpdateWriteableBitmap(latest_img); webcam_img.Clear(); webcam_img.Source = latest_img; webcam_img.Confidence.Add(confidence); webcam_img.FPS = processing_fps.GetFPS(); if (detectionSucceeding) { webcam_img.OverlayLines.Add(lines); webcam_img.OverlayPoints.Add(landmarks); webcam_img.OverlayPointsVisibility.Add(visibilities); webcam_img.FaceScale.Add(scale); List <System.Windows.Point> eye_landmark_points = new List <System.Windows.Point>(); foreach (var p in eye_landmarks) { eye_landmark_points.Add(new System.Windows.Point(p.Item1, p.Item2)); } webcam_img.OverlayEyePoints.Add(eye_landmark_points); webcam_img.GazeLines.Add(gaze_lines); // Publish the information for other applications String str_head_pose = String.Format("{0}:{1:F2}, {2:F2}, {3:F2}, {4:F2}, {5:F2}, {6:F2}", "HeadPose", pose[0], pose[1], pose[2], pose[3] * 180 / Math.PI, pose[4] * 180 / Math.PI, pose[5] * 180 / Math.PI); zero_mq_socket.Send(new ZFrame(str_head_pose, Encoding.UTF8)); String str_gaze = String.Format("{0}:{1:F2}, {2:F2}", "GazeAngle", gaze_angle.Item1 * (180.0 / Math.PI), gaze_angle.Item2 * (180.0 / Math.PI)); zero_mq_socket.Send(new ZFrame(str_gaze, Encoding.UTF8)); } })); while (running & pause) { Thread.Sleep(10); } } catch (TaskCanceledException) { // Quitting break; } } reader.Close(); System.Console.Out.WriteLine("Thread finished"); }
public MainWindow() { InitializeComponent(); // Set the icon Uri iconUri = new Uri("logo1.ico", UriKind.RelativeOrAbsolute); this.Icon = BitmapFrame.Create(iconUri); String root = AppDomain.CurrentDomain.BaseDirectory; // TODO, create a demo version of parameters face_model_params = new FaceModelParameters(root, true, false, false); face_model_params.optimiseForVideo(); // Initialize the face detector FaceDetector face_detector = new FaceDetector(face_model_params.GetHaarLocation(), face_model_params.GetMTCNNLocation()); // If MTCNN model not available, use HOG if (!face_detector.IsMTCNNLoaded()) { face_model_params.SetFaceDetector(false, true, false); } landmark_detector = new CLNF(face_model_params); face_analyser = new FaceAnalyserManaged(root, true, 112, true); gaze_analyser = new GazeAnalyserManaged(); Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() => { headPosePlot.AssocColor(0, Colors.Blue); headPosePlot.AssocColor(1, Colors.Red); headPosePlot.AssocColor(2, Colors.Green); headPosePlot.AssocName(1, "Turn"); headPosePlot.AssocName(2, "Tilt"); headPosePlot.AssocName(0, "Up/Down"); headPosePlot.AssocThickness(0, 2); headPosePlot.AssocThickness(1, 2); headPosePlot.AssocThickness(2, 2); gazePlot.AssocColor(0, Colors.Red); gazePlot.AssocColor(1, Colors.Blue); gazePlot.AssocName(0, "Left-right"); gazePlot.AssocName(1, "Up-down"); gazePlot.AssocThickness(0, 2); gazePlot.AssocThickness(1, 2); smilePlot.AssocColor(0, Colors.Green); smilePlot.AssocColor(1, Colors.Red); smilePlot.AssocName(0, "Smile"); smilePlot.AssocName(1, "Frown"); smilePlot.AssocThickness(0, 2); smilePlot.AssocThickness(1, 2); browPlot.AssocColor(0, Colors.Green); browPlot.AssocColor(1, Colors.Red); browPlot.AssocName(0, "Raise"); browPlot.AssocName(1, "Furrow"); browPlot.AssocThickness(0, 2); browPlot.AssocThickness(1, 2); eyePlot.AssocColor(0, Colors.Green); eyePlot.AssocColor(1, Colors.Red); eyePlot.AssocName(0, "Eye widen"); eyePlot.AssocName(1, "Nose wrinkle"); eyePlot.AssocThickness(0, 2); eyePlot.AssocThickness(1, 2); })); }