private Bitmap ProcessImage(SequenceReader reader) { // set up the face model String root = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"..\..\"); var faceModel = new FaceModelParameters(root, false); faceModel.optimiseForImages(); // set up a face detector and a landmark detector var faceDetector = new FaceDetector(); var landmarkDetector = new CLNF(faceModel); // read the image from the sequence reader var frame = new RawImage(reader.GetNextImage()); var grayFrame = new RawImage(reader.GetCurrentFrameGray()); // detect faces var faces = new List <Rect>(); var confidences = new List <double>(); faceDetector.DetectFacesHOG(faces, grayFrame, confidences); // detect landmarks var landmarks = new List <List <Tuple <double, double> > >(); foreach (var face in faces) { landmarkDetector.DetectFaceLandmarksInImage(grayFrame, face, faceModel); var points = landmarkDetector.CalculateAllLandmarks(); landmarks.Add(points); } // draw rectangles and confidence values on image var image = frame.ToBitmap(); using (Graphics g = Graphics.FromImage(image)) { int index = 0; var pen = new System.Drawing.Pen(System.Drawing.Color.LightGreen, 4); var pen2 = new System.Drawing.Pen(System.Drawing.Color.Red, 4); var font = new Font(FontFamily.GenericSansSerif, 30); foreach (var face in faces) { g.DrawRectangle(pen, (int)face.X, (int)face.Y, (int)face.Width, (int)face.Height); g.DrawString($"{confidences[index]:0.00}", font, Brushes.Black, (int)face.X + 36, (int)face.Y - 36); // draw landmark points foreach (var p in landmarks[index]) { g.DrawRectangle(pen2, new Rectangle((int)p.Item1, (int)p.Item2, 4, 4)); } index++; } } return(image); }
private Bitmap ProcessImage(SequenceReader reader) { // set up the face model var root = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"..\..\"); var faceModel = new FaceModelParameters(root, false); faceModel.optimiseForImages(); // set up a face detector, a landmark detector, and a face analyser var faceDetector = new FaceDetector(); var landmarkDetector = new CLNF(faceModel); var faceAnalyser = new FaceAnalyserManaged(root, true, 0); // read the image from the sequence reader var frame = new RawImage(reader.GetNextImage()); var grayFrame = new RawImage(reader.GetCurrentFrameGray()); // detect faces var faces = new List <Rect>(); var confidences = new List <double>(); faceDetector.DetectFacesHOG(faces, grayFrame, confidences); // detect landmarks var image = frame.ToBitmap(); foreach (var face in faces) { landmarkDetector.DetectFaceLandmarksInImage(grayFrame, face, faceModel); var points = landmarkDetector.CalculateAllLandmarks(); // calculate action units var features = faceAnalyser.PredictStaticAUsAndComputeFeatures(grayFrame, points); // find the action units var actionUnits = (from au in features.Item2 where au.Value > 0 orderby au.Key select au.Key); // get top emotions var topEmotions = GetTopEmotions(actionUnits); // draw the emotion on the face using (Graphics g = Graphics.FromImage(image)) { string name = string.Join(Environment.NewLine, topEmotions); Font fnt = new Font("Verdana", 15, GraphicsUnit.Pixel); Brush brs = new SolidBrush(Color.Black); var bump = 36; System.Drawing.SizeF stringSize = g.MeasureString(name, fnt); g.FillRectangle(new SolidBrush(Color.Yellow), (int)face.X + bump, (int)face.Y, stringSize.Width, stringSize.Height); g.DrawString(name, fnt, brs, (int)face.X + bump, (int)face.Y); } } return(image); }
// The main function call for processing sequences private void ProcessSequence(SequenceReader reader) { Thread.CurrentThread.Priority = ThreadPriority.Highest; SetupFeatureExtractionMode(); thread_running = true; // Reload the face landmark detector if needed ReloadLandmarkDetector(); if (!landmark_detector.isLoaded()) { DetectorNotFoundWarning(); EndMode(); thread_running = false; return; } // Set the face detector face_model_params.SetFaceDetector(DetectorHaar, DetectorHOG, DetectorCNN); face_model_params.optimiseForVideo(); // Setup the visualization Visualizer visualizer_of = new Visualizer(ShowTrackedVideo || RecordTracked, ShowAppearance, ShowAppearance, false); // Initialize the face analyser face_analyser = new FaceAnalyserManaged(AppDomain.CurrentDomain.BaseDirectory, DynamicAUModels, image_output_size, MaskAligned); // Reset the tracker landmark_detector.Reset(); // Loading an image file var frame = new RawImage(reader.GetNextImage()); var gray_frame = new RawImage(reader.GetCurrentFrameGray()); // Setup recording RecorderOpenFaceParameters rec_params = new RecorderOpenFaceParameters(true, reader.IsWebcam(), Record2DLandmarks, Record3DLandmarks, RecordModelParameters, RecordPose, RecordAUs, RecordGaze, RecordHOG, RecordTracked, RecordAligned, false, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), reader.GetFPS()); RecorderOpenFace recorder = new RecorderOpenFace(reader.GetName(), rec_params, record_root); // For FPS tracking DateTime?startTime = CurrentTime; var lastFrameTime = CurrentTime; // Empty image would indicate that the stream is over while (!gray_frame.IsEmpty) { if (!thread_running) { break; } double progress = reader.GetProgress(); bool detection_succeeding = landmark_detector.DetectLandmarksInVideo(frame, face_model_params, gray_frame); // The face analysis step (for AUs and eye gaze) face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detection_succeeding, false); gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); // Only the final face will contain the details VisualizeFeatures(frame, visualizer_of, landmark_detector.CalculateAllLandmarks(), landmark_detector.GetVisibilities(), detection_succeeding, true, false, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), progress); // Record an observation RecordObservation(recorder, visualizer_of.GetVisImage(), 0, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), reader.GetTimestamp(), reader.GetFrameNumber()); if (RecordTracked) { recorder.WriteObservationTracked(); } while (thread_running & thread_paused && skip_frames == 0) { Thread.Sleep(10); } if (skip_frames > 0) { skip_frames--; } frame = new RawImage(reader.GetNextImage()); gray_frame = new RawImage(reader.GetCurrentFrameGray()); lastFrameTime = CurrentTime; processing_fps.AddFrame(); } // Finalize the recording and flush to disk recorder.Close(); // Post-process the AU recordings if (RecordAUs) { face_analyser.PostProcessOutputFile(recorder.GetCSVFile()); } // Close the open video/webcam reader.Close(); EndMode(); }
// The main function call for processing the webcam feed private void ProcessingLoop(SequenceReader reader) { thread_running = true; Thread.CurrentThread.IsBackground = true; DateTime?startTime = CurrentTime; var lastFrameTime = CurrentTime; landmark_detector.Reset(); face_analyser.Reset(); int frame_id = 0; double old_gaze_x = 0; double old_gaze_y = 0; double smile_cumm = 0; double frown_cumm = 0; double brow_up_cumm = 0; double brow_down_cumm = 0; double widen_cumm = 0; double wrinkle_cumm = 0; while (thread_running) { // Loading an image file RawImage frame = new RawImage(reader.GetNextImage()); RawImage gray_frame = new RawImage(reader.GetCurrentFrameGray()); lastFrameTime = CurrentTime; processing_fps.AddFrame(); bool detection_succeeding = landmark_detector.DetectLandmarksInVideo(gray_frame, face_model_params); // The face analysis step (only done if recording AUs, HOGs or video) face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detection_succeeding, true); gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); double confidence = landmark_detector.GetConfidence(); if (confidence < 0) { confidence = 0; } else if (confidence > 1) { confidence = 1; } List <double> pose = new List <double>(); landmark_detector.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); List <double> non_rigid_params = landmark_detector.GetNonRigidParams(); double scale = landmark_detector.GetRigidParams()[0]; double time_stamp = (DateTime.Now - (DateTime)startTime).TotalMilliseconds; List <Tuple <Point, Point> > lines = null; List <Tuple <double, double> > landmarks = null; List <Tuple <double, double> > eye_landmarks = null; List <Tuple <Point, Point> > gaze_lines = null; Tuple <double, double> gaze_angle = gaze_analyser.GetGazeAngle(); if (detection_succeeding) { landmarks = landmark_detector.CalculateVisibleLandmarks(); eye_landmarks = landmark_detector.CalculateVisibleEyeLandmarks(); lines = landmark_detector.CalculateBox(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); gaze_lines = gaze_analyser.CalculateGazeLines(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); } // Visualisation Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() => { var au_regs = face_analyser.GetCurrentAUsReg(); if (au_regs.Count > 0) { double smile = (au_regs["AU12"] + au_regs["AU06"] + au_regs["AU25"]) / 13.0; double frown = (au_regs["AU15"] + au_regs["AU17"]) / 12.0; double brow_up = (au_regs["AU01"] + au_regs["AU02"]) / 10.0; double brow_down = au_regs["AU04"] / 5.0; double eye_widen = au_regs["AU05"] / 3.0; double nose_wrinkle = au_regs["AU09"] / 4.0; Dictionary <int, double> smileDict = new Dictionary <int, double>(); smileDict[0] = 0.7 * smile_cumm + 0.3 * smile; smileDict[1] = 0.7 * frown_cumm + 0.3 * frown; smilePlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = smileDict, Confidence = confidence }); Dictionary <int, double> browDict = new Dictionary <int, double>(); browDict[0] = 0.7 * brow_up_cumm + 0.3 * brow_up; browDict[1] = 0.7 * brow_down_cumm + 0.3 * brow_down; browPlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = browDict, Confidence = confidence }); Dictionary <int, double> eyeDict = new Dictionary <int, double>(); eyeDict[0] = 0.7 * widen_cumm + 0.3 * eye_widen; eyeDict[1] = 0.7 * wrinkle_cumm + 0.3 * nose_wrinkle; eyePlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = eyeDict, Confidence = confidence }); smile_cumm = smileDict[0]; frown_cumm = smileDict[1]; brow_up_cumm = browDict[0]; brow_down_cumm = browDict[1]; widen_cumm = eyeDict[0]; wrinkle_cumm = eyeDict[1]; } else { // If no AUs present disable the AU visualization MainGrid.ColumnDefinitions[2].Width = new GridLength(0); eyePlot.Visibility = Visibility.Collapsed; browPlot.Visibility = Visibility.Collapsed; smilePlot.Visibility = Visibility.Collapsed; } Dictionary <int, double> poseDict = new Dictionary <int, double>(); poseDict[0] = -pose[3]; poseDict[1] = pose[4]; poseDict[2] = pose[5]; headPosePlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = poseDict, Confidence = confidence }); Dictionary <int, double> gazeDict = new Dictionary <int, double>(); gazeDict[0] = gaze_angle.Item1 * (180.0 / Math.PI); gazeDict[0] = 0.5 * old_gaze_x + 0.5 * gazeDict[0]; gazeDict[1] = -gaze_angle.Item2 * (180.0 / Math.PI); gazeDict[1] = 0.5 * old_gaze_y + 0.5 * gazeDict[1]; gazePlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = gazeDict, Confidence = confidence }); old_gaze_x = gazeDict[0]; old_gaze_y = gazeDict[1]; if (latest_img == null) { latest_img = frame.CreateWriteableBitmap(); } frame.UpdateWriteableBitmap(latest_img); video.Source = latest_img; video.Confidence = confidence; video.FPS = processing_fps.GetFPS(); if (!detection_succeeding) { video.OverlayLines.Clear(); video.OverlayPoints.Clear(); video.OverlayEyePoints.Clear(); video.GazeLines.Clear(); } else { video.OverlayLines = lines; List <Point> landmark_points = new List <Point>(); foreach (var p in landmarks) { landmark_points.Add(new Point(p.Item1, p.Item2)); } List <Point> eye_landmark_points = new List <Point>(); foreach (var p in eye_landmarks) { eye_landmark_points.Add(new Point(p.Item1, p.Item2)); } video.OverlayPoints = landmark_points; video.OverlayEyePoints = eye_landmark_points; video.GazeLines = gaze_lines; } })); if (reset) { if (resetPoint.HasValue) { landmark_detector.Reset(resetPoint.Value.X, resetPoint.Value.Y); resetPoint = null; } else { landmark_detector.Reset(); } face_analyser.Reset(); reset = false; Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() => { headPosePlot.ClearDataPoints(); headPosePlot.ClearDataPoints(); gazePlot.ClearDataPoints(); smilePlot.ClearDataPoints(); browPlot.ClearDataPoints(); eyePlot.ClearDataPoints(); })); } frame_id++; } reader.Close(); latest_img = null; }
// Capturing and processing the video frame by frame private void VideoLoop(UtilitiesOF.SequenceReader reader) { Thread.CurrentThread.IsBackground = true; String root = AppDomain.CurrentDomain.BaseDirectory; FaceModelParameters model_params = new FaceModelParameters(root, true, false, false); // Initialize the face detector FaceDetector face_detector = new FaceDetector(model_params.GetHaarLocation(), model_params.GetMTCNNLocation()); // If MTCNN model not available, use HOG if (!face_detector.IsMTCNNLoaded()) { model_params.SetFaceDetector(false, true, false); } CLNF face_model = new CLNF(model_params); GazeAnalyserManaged gaze_analyser = new GazeAnalyserManaged(); DateTime?startTime = CurrentTime; var lastFrameTime = CurrentTime; while (running) { ////////////////////////////////////////////// // CAPTURE FRAME AND DETECT LANDMARKS FOLLOWED BY THE REQUIRED IMAGE PROCESSING ////////////////////////////////////////////// RawImage frame = reader.GetNextImage(); lastFrameTime = CurrentTime; processing_fps.AddFrame(); var grayFrame = reader.GetCurrentFrameGray(); if (mirror_image) { frame.Mirror(); grayFrame.Mirror(); } bool detectionSucceeding = ProcessFrame(face_model, gaze_analyser, model_params, frame, grayFrame, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); lock (recording_lock) { if (recording) { // Add objects to recording queues List <float> pose = new List <float>(); face_model.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); RawImage image = new RawImage(frame); recording_objects.Enqueue(new Tuple <RawImage, bool, List <float> >(image, detectionSucceeding, pose)); } } List <Tuple <System.Windows.Point, System.Windows.Point> > lines = null; List <Tuple <float, float> > eye_landmarks = null; List <System.Windows.Point> landmarks = new List <System.Windows.Point>(); List <Tuple <System.Windows.Point, System.Windows.Point> > gaze_lines = null; Tuple <float, float> gaze_angle = new Tuple <float, float>(0, 0); var visibilities = face_model.GetVisibilities(); double scale = face_model.GetRigidParams()[0]; if (detectionSucceeding) { List <Tuple <float, float> > landmarks_doubles = face_model.CalculateAllLandmarks(); foreach (var p in landmarks_doubles) { landmarks.Add(new System.Windows.Point(p.Item1, p.Item2)); } eye_landmarks = face_model.CalculateVisibleEyeLandmarks(); gaze_lines = gaze_analyser.CalculateGazeLines(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); gaze_angle = gaze_analyser.GetGazeAngle(); lines = face_model.CalculateBox(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); } if (reset) { face_model.Reset(); reset = false; } // Visualisation updating try { Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() => { if (latest_img == null) { latest_img = frame.CreateWriteableBitmap(); } List <float> pose = new List <float>(); face_model.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); int yaw = (int)(pose[4] * 180 / Math.PI + 0.5); int yaw_abs = Math.Abs(yaw); int roll = (int)(pose[5] * 180 / Math.PI + 0.5); int roll_abs = Math.Abs(roll); int pitch = (int)(pose[3] * 180 / Math.PI + 0.5); int pitch_abs = Math.Abs(pitch); YawLabel.Content = yaw_abs + "°"; RollLabel.Content = roll_abs + "°"; PitchLabel.Content = pitch_abs + "°"; if (yaw > 0) { YawLabelDir.Content = "Right"; } else if (yaw < 0) { YawLabelDir.Content = "Left"; } else { YawLabelDir.Content = "Straight"; } if (pitch > 0) { PitchLabelDir.Content = "Down"; } else if (pitch < 0) { PitchLabelDir.Content = "Up"; } else { PitchLabelDir.Content = "Straight"; } if (roll > 0) { RollLabelDir.Content = "Left"; } else if (roll < 0) { RollLabelDir.Content = "Right"; } else { RollLabelDir.Content = "Straight"; } XPoseLabel.Content = (int)pose[0] + " mm"; YPoseLabel.Content = (int)pose[1] + " mm"; ZPoseLabel.Content = (int)pose[2] + " mm"; String x_angle = String.Format("{0:F0}°", gaze_angle.Item1 * (180.0 / Math.PI)); String y_angle = String.Format("{0:F0}°", gaze_angle.Item2 * (180.0 / Math.PI)); YawLabelGaze.Content = x_angle; PitchLabelGaze.Content = y_angle; if (gaze_angle.Item1 > 0) { YawLabelGazeDir.Content = "Right"; } else if (gaze_angle.Item1 < 0) { YawLabelGazeDir.Content = "Left"; } else { YawLabelGazeDir.Content = "Straight"; } if (gaze_angle.Item2 > 0) { PitchLabelGazeDir.Content = "Down"; } else if (gaze_angle.Item2 < 0) { PitchLabelGazeDir.Content = "Up"; } else { PitchLabelGazeDir.Content = "Straight"; } double confidence = face_model.GetConfidence(); if (confidence < 0) { confidence = 0; } else if (confidence > 1) { confidence = 1; } frame.UpdateWriteableBitmap(latest_img); webcam_img.Clear(); webcam_img.Source = latest_img; webcam_img.Confidence.Add(confidence); webcam_img.FPS = processing_fps.GetFPS(); if (detectionSucceeding) { webcam_img.OverlayLines.Add(lines); webcam_img.OverlayPoints.Add(landmarks); webcam_img.OverlayPointsVisibility.Add(visibilities); webcam_img.FaceScale.Add(scale); List <System.Windows.Point> eye_landmark_points = new List <System.Windows.Point>(); foreach (var p in eye_landmarks) { eye_landmark_points.Add(new System.Windows.Point(p.Item1, p.Item2)); } webcam_img.OverlayEyePoints.Add(eye_landmark_points); webcam_img.GazeLines.Add(gaze_lines); // Publish the information for other applications String str_head_pose = String.Format("{0}:{1:F2}, {2:F2}, {3:F2}, {4:F2}, {5:F2}, {6:F2}", "HeadPose", pose[0], pose[1], pose[2], pose[3] * 180 / Math.PI, pose[4] * 180 / Math.PI, pose[5] * 180 / Math.PI); zero_mq_socket.Send(new ZFrame(str_head_pose, Encoding.UTF8)); String str_gaze = String.Format("{0}:{1:F2}, {2:F2}", "GazeAngle", gaze_angle.Item1 * (180.0 / Math.PI), gaze_angle.Item2 * (180.0 / Math.PI)); zero_mq_socket.Send(new ZFrame(str_gaze, Encoding.UTF8)); } })); while (running & pause) { Thread.Sleep(10); } } catch (TaskCanceledException) { // Quitting break; } } reader.Close(); System.Console.Out.WriteLine("Thread finished"); }