private void ProcessIndividualImages(ImageReader reader) { // Make sure the GUI is setup appropriately SetupFeatureExtractionMode(); // Indicate we will start running the thread thread_running = true; // Reload the face landmark detector if needed ReloadLandmarkDetector(); if (!landmark_detector.isLoaded()) { DetectorNotFoundWarning(); EndMode(); thread_running = false; return; } // Setup the parameters optimized for working on individual images rather than sequences face_model_params.optimiseForImages(); // Setup the visualization Visualizer visualizer_of = new Visualizer(ShowTrackedVideo || RecordTracked, ShowAppearance, ShowAppearance, false); // Initialize the face detector if it has not been initialized yet if (face_detector == null) { face_detector = new FaceDetector(face_model_params.GetHaarLocation(), face_model_params.GetMTCNNLocation()); } // Initialize the face analyser face_analyser = new FaceAnalyserManaged(AppDomain.CurrentDomain.BaseDirectory, false, image_output_size, MaskAligned); // Loading an image file var frame = new RawImage(reader.GetNextImage()); var gray_frame = new RawImage(reader.GetCurrentFrameGray()); // For FPS tracking DateTime?startTime = CurrentTime; var lastFrameTime = CurrentTime; // This will be false when the image is not available while (reader.isOpened()) { if (!thread_running) { break; } // Setup recording RecorderOpenFaceParameters rec_params = new RecorderOpenFaceParameters(false, false, Record2DLandmarks, Record3DLandmarks, RecordModelParameters, RecordPose, RecordAUs, RecordGaze, RecordHOG, RecordTracked, RecordAligned, true, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), 0); RecorderOpenFace recorder = new RecorderOpenFace(reader.GetName(), rec_params, record_root); // Detect faces here and return bounding boxes List <Rect> face_detections = new List <Rect>(); List <float> confidences = new List <float>(); if (DetectorHOG) { face_detector.DetectFacesHOG(face_detections, gray_frame, confidences); } else if (DetectorCNN) { face_detector.DetectFacesMTCNN(face_detections, frame, confidences); } else if (DetectorHaar) { face_detector.DetectFacesHaar(face_detections, gray_frame, confidences); } // For visualization double progress = reader.GetProgress(); for (int i = 0; i < face_detections.Count; ++i) { bool detection_succeeding = landmark_detector.DetectFaceLandmarksInImage(frame, face_detections[i], face_model_params, gray_frame); var landmarks = landmark_detector.CalculateAllLandmarks(); // Predict action units var au_preds = face_analyser.PredictStaticAUsAndComputeFeatures(frame, landmarks); // Predic eye gaze gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); // Only the final face will contain the details VisualizeFeatures(frame, visualizer_of, landmarks, landmark_detector.GetVisibilities(), detection_succeeding, i == 0, true, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), progress); // Record an observation RecordObservation(recorder, visualizer_of.GetVisImage(), i, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), 0, 0); } frame = new RawImage(reader.GetNextImage()); gray_frame = new RawImage(reader.GetCurrentFrameGray()); // Write out the tracked image if (RecordTracked) { recorder.WriteObservationTracked(); } // Do not cary state accross images landmark_detector.Reset(); face_analyser.Reset(); recorder.Close(); lastFrameTime = CurrentTime; processing_fps.AddFrame(); // TODO how to report errors from the reader here? exceptions? logging? Problem for future versions? } EndMode(); }
// The main function call for processing the webcam feed private void ProcessingLoop(SequenceReader reader) { thread_running = true; Thread.CurrentThread.IsBackground = true; DateTime?startTime = CurrentTime; var lastFrameTime = CurrentTime; landmark_detector.Reset(); face_analyser.Reset(); int frame_id = 0; double old_gaze_x = 0; double old_gaze_y = 0; double smile_cumm = 0; double frown_cumm = 0; double brow_up_cumm = 0; double brow_down_cumm = 0; double widen_cumm = 0; double wrinkle_cumm = 0; while (thread_running) { // Loading an image file RawImage frame = new RawImage(reader.GetNextImage()); RawImage gray_frame = new RawImage(reader.GetCurrentFrameGray()); lastFrameTime = CurrentTime; processing_fps.AddFrame(); bool detection_succeeding = landmark_detector.DetectLandmarksInVideo(gray_frame, face_model_params); // The face analysis step (only done if recording AUs, HOGs or video) face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detection_succeeding, true); gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); double confidence = landmark_detector.GetConfidence(); if (confidence < 0) { confidence = 0; } else if (confidence > 1) { confidence = 1; } List <double> pose = new List <double>(); landmark_detector.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); List <double> non_rigid_params = landmark_detector.GetNonRigidParams(); double scale = landmark_detector.GetRigidParams()[0]; double time_stamp = (DateTime.Now - (DateTime)startTime).TotalMilliseconds; List <Tuple <Point, Point> > lines = null; List <Tuple <double, double> > landmarks = null; List <Tuple <double, double> > eye_landmarks = null; List <Tuple <Point, Point> > gaze_lines = null; Tuple <double, double> gaze_angle = gaze_analyser.GetGazeAngle(); if (detection_succeeding) { landmarks = landmark_detector.CalculateVisibleLandmarks(); eye_landmarks = landmark_detector.CalculateVisibleEyeLandmarks(); lines = landmark_detector.CalculateBox(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); gaze_lines = gaze_analyser.CalculateGazeLines(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); } // Visualisation Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() => { var au_regs = face_analyser.GetCurrentAUsReg(); if (au_regs.Count > 0) { double smile = (au_regs["AU12"] + au_regs["AU06"] + au_regs["AU25"]) / 13.0; double frown = (au_regs["AU15"] + au_regs["AU17"]) / 12.0; double brow_up = (au_regs["AU01"] + au_regs["AU02"]) / 10.0; double brow_down = au_regs["AU04"] / 5.0; double eye_widen = au_regs["AU05"] / 3.0; double nose_wrinkle = au_regs["AU09"] / 4.0; Dictionary <int, double> smileDict = new Dictionary <int, double>(); smileDict[0] = 0.7 * smile_cumm + 0.3 * smile; smileDict[1] = 0.7 * frown_cumm + 0.3 * frown; smilePlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = smileDict, Confidence = confidence }); Dictionary <int, double> browDict = new Dictionary <int, double>(); browDict[0] = 0.7 * brow_up_cumm + 0.3 * brow_up; browDict[1] = 0.7 * brow_down_cumm + 0.3 * brow_down; browPlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = browDict, Confidence = confidence }); Dictionary <int, double> eyeDict = new Dictionary <int, double>(); eyeDict[0] = 0.7 * widen_cumm + 0.3 * eye_widen; eyeDict[1] = 0.7 * wrinkle_cumm + 0.3 * nose_wrinkle; eyePlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = eyeDict, Confidence = confidence }); smile_cumm = smileDict[0]; frown_cumm = smileDict[1]; brow_up_cumm = browDict[0]; brow_down_cumm = browDict[1]; widen_cumm = eyeDict[0]; wrinkle_cumm = eyeDict[1]; } else { // If no AUs present disable the AU visualization MainGrid.ColumnDefinitions[2].Width = new GridLength(0); eyePlot.Visibility = Visibility.Collapsed; browPlot.Visibility = Visibility.Collapsed; smilePlot.Visibility = Visibility.Collapsed; } Dictionary <int, double> poseDict = new Dictionary <int, double>(); poseDict[0] = -pose[3]; poseDict[1] = pose[4]; poseDict[2] = pose[5]; headPosePlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = poseDict, Confidence = confidence }); Dictionary <int, double> gazeDict = new Dictionary <int, double>(); gazeDict[0] = gaze_angle.Item1 * (180.0 / Math.PI); gazeDict[0] = 0.5 * old_gaze_x + 0.5 * gazeDict[0]; gazeDict[1] = -gaze_angle.Item2 * (180.0 / Math.PI); gazeDict[1] = 0.5 * old_gaze_y + 0.5 * gazeDict[1]; gazePlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = gazeDict, Confidence = confidence }); old_gaze_x = gazeDict[0]; old_gaze_y = gazeDict[1]; if (latest_img == null) { latest_img = frame.CreateWriteableBitmap(); } frame.UpdateWriteableBitmap(latest_img); video.Source = latest_img; video.Confidence = confidence; video.FPS = processing_fps.GetFPS(); if (!detection_succeeding) { video.OverlayLines.Clear(); video.OverlayPoints.Clear(); video.OverlayEyePoints.Clear(); video.GazeLines.Clear(); } else { video.OverlayLines = lines; List <Point> landmark_points = new List <Point>(); foreach (var p in landmarks) { landmark_points.Add(new Point(p.Item1, p.Item2)); } List <Point> eye_landmark_points = new List <Point>(); foreach (var p in eye_landmarks) { eye_landmark_points.Add(new Point(p.Item1, p.Item2)); } video.OverlayPoints = landmark_points; video.OverlayEyePoints = eye_landmark_points; video.GazeLines = gaze_lines; } })); if (reset) { if (resetPoint.HasValue) { landmark_detector.Reset(resetPoint.Value.X, resetPoint.Value.Y); resetPoint = null; } else { landmark_detector.Reset(); } face_analyser.Reset(); reset = false; Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() => { headPosePlot.ClearDataPoints(); headPosePlot.ClearDataPoints(); gazePlot.ClearDataPoints(); smilePlot.ClearDataPoints(); browPlot.ClearDataPoints(); eyePlot.ClearDataPoints(); })); } frame_id++; } reader.Close(); latest_img = null; }