// The main function call for processing sequences
        private void ProcessSequence(SequenceReader reader)
        {
            Thread.CurrentThread.Priority = ThreadPriority.Highest;

            SetupFeatureExtractionMode();

            thread_running = true;

            // Reload the face landmark detector if needed
            ReloadLandmarkDetector();

            if (!landmark_detector.isLoaded())
            {
                DetectorNotFoundWarning();
                EndMode();
                thread_running = false;
                return;
            }

            // Set the face detector
            face_model_params.SetFaceDetector(DetectorHaar, DetectorHOG, DetectorCNN);
            face_model_params.optimiseForVideo();

            // Setup the visualization
            Visualizer visualizer_of = new Visualizer(ShowTrackedVideo || RecordTracked, ShowAppearance, ShowAppearance, false);

            // Initialize the face analyser
            face_analyser = new FaceAnalyserManaged(AppDomain.CurrentDomain.BaseDirectory, DynamicAUModels, image_output_size, MaskAligned);

            // Reset the tracker
            landmark_detector.Reset();

            // Loading an image file
            var frame      = new RawImage(reader.GetNextImage());
            var gray_frame = new RawImage(reader.GetCurrentFrameGray());

            // Setup recording
            RecorderOpenFaceParameters rec_params = new RecorderOpenFaceParameters(true, reader.IsWebcam(),
                                                                                   Record2DLandmarks, Record3DLandmarks, RecordModelParameters, RecordPose, RecordAUs,
                                                                                   RecordGaze, RecordHOG, RecordTracked, RecordAligned, false,
                                                                                   reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), reader.GetFPS());

            RecorderOpenFace recorder = new RecorderOpenFace(reader.GetName(), rec_params, record_root);

            // For FPS tracking
            DateTime?startTime     = CurrentTime;
            var      lastFrameTime = CurrentTime;

            // Empty image would indicate that the stream is over
            while (!gray_frame.IsEmpty)
            {
                if (!thread_running)
                {
                    break;
                }

                double progress = reader.GetProgress();

                bool detection_succeeding = landmark_detector.DetectLandmarksInVideo(frame, face_model_params, gray_frame);

                // The face analysis step (for AUs and eye gaze)
                face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detection_succeeding, false);

                gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());

                // Only the final face will contain the details
                VisualizeFeatures(frame, visualizer_of, landmark_detector.CalculateAllLandmarks(), landmark_detector.GetVisibilities(), detection_succeeding, true, false, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), progress);

                // Record an observation
                RecordObservation(recorder, visualizer_of.GetVisImage(), 0, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), reader.GetTimestamp(), reader.GetFrameNumber());

                if (RecordTracked)
                {
                    recorder.WriteObservationTracked();
                }

                while (thread_running & thread_paused && skip_frames == 0)
                {
                    Thread.Sleep(10);
                }

                if (skip_frames > 0)
                {
                    skip_frames--;
                }

                frame      = new RawImage(reader.GetNextImage());
                gray_frame = new RawImage(reader.GetCurrentFrameGray());

                lastFrameTime = CurrentTime;
                processing_fps.AddFrame();
            }

            // Finalize the recording and flush to disk
            recorder.Close();

            // Post-process the AU recordings
            if (RecordAUs)
            {
                face_analyser.PostProcessOutputFile(recorder.GetCSVFile());
            }

            // Close the open video/webcam
            reader.Close();

            EndMode();
        }
Exemple #2
0
        // The main function call for processing the webcam feed
        private void ProcessingLoop(SequenceReader reader)
        {
            thread_running = true;

            Thread.CurrentThread.IsBackground = true;

            DateTime?startTime = CurrentTime;

            var lastFrameTime = CurrentTime;

            landmark_detector.Reset();
            face_analyser.Reset();

            int frame_id = 0;

            double old_gaze_x = 0;
            double old_gaze_y = 0;

            double smile_cumm     = 0;
            double frown_cumm     = 0;
            double brow_up_cumm   = 0;
            double brow_down_cumm = 0;
            double widen_cumm     = 0;
            double wrinkle_cumm   = 0;

            while (thread_running)
            {
                // Loading an image file
                RawImage frame      = new RawImage(reader.GetNextImage());
                RawImage gray_frame = new RawImage(reader.GetCurrentFrameGray());

                lastFrameTime = CurrentTime;
                processing_fps.AddFrame();

                bool detection_succeeding = landmark_detector.DetectLandmarksInVideo(gray_frame, face_model_params);

                // The face analysis step (only done if recording AUs, HOGs or video)
                face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detection_succeeding, true);
                gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());

                double confidence = landmark_detector.GetConfidence();

                if (confidence < 0)
                {
                    confidence = 0;
                }
                else if (confidence > 1)
                {
                    confidence = 1;
                }

                List <double> pose = new List <double>();

                landmark_detector.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());

                List <double> non_rigid_params = landmark_detector.GetNonRigidParams();
                double        scale            = landmark_detector.GetRigidParams()[0];

                double time_stamp = (DateTime.Now - (DateTime)startTime).TotalMilliseconds;


                List <Tuple <Point, Point> >   lines         = null;
                List <Tuple <double, double> > landmarks     = null;
                List <Tuple <double, double> > eye_landmarks = null;
                List <Tuple <Point, Point> >   gaze_lines    = null;
                Tuple <double, double>         gaze_angle    = gaze_analyser.GetGazeAngle();

                if (detection_succeeding)
                {
                    landmarks     = landmark_detector.CalculateVisibleLandmarks();
                    eye_landmarks = landmark_detector.CalculateVisibleEyeLandmarks();
                    lines         = landmark_detector.CalculateBox(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
                    gaze_lines    = gaze_analyser.CalculateGazeLines(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
                }

                // Visualisation
                Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
                {
                    var au_regs = face_analyser.GetCurrentAUsReg();
                    if (au_regs.Count > 0)
                    {
                        double smile = (au_regs["AU12"] + au_regs["AU06"] + au_regs["AU25"]) / 13.0;
                        double frown = (au_regs["AU15"] + au_regs["AU17"]) / 12.0;

                        double brow_up = (au_regs["AU01"] + au_regs["AU02"]) / 10.0;
                        double brow_down = au_regs["AU04"] / 5.0;

                        double eye_widen = au_regs["AU05"] / 3.0;
                        double nose_wrinkle = au_regs["AU09"] / 4.0;

                        Dictionary <int, double> smileDict = new Dictionary <int, double>();
                        smileDict[0] = 0.7 * smile_cumm + 0.3 * smile;
                        smileDict[1] = 0.7 * frown_cumm + 0.3 * frown;
                        smilePlot.AddDataPoint(new DataPointGraph()
                        {
                            Time = CurrentTime, values = smileDict, Confidence = confidence
                        });

                        Dictionary <int, double> browDict = new Dictionary <int, double>();
                        browDict[0] = 0.7 * brow_up_cumm + 0.3 * brow_up;
                        browDict[1] = 0.7 * brow_down_cumm + 0.3 * brow_down;
                        browPlot.AddDataPoint(new DataPointGraph()
                        {
                            Time = CurrentTime, values = browDict, Confidence = confidence
                        });

                        Dictionary <int, double> eyeDict = new Dictionary <int, double>();
                        eyeDict[0] = 0.7 * widen_cumm + 0.3 * eye_widen;
                        eyeDict[1] = 0.7 * wrinkle_cumm + 0.3 * nose_wrinkle;
                        eyePlot.AddDataPoint(new DataPointGraph()
                        {
                            Time = CurrentTime, values = eyeDict, Confidence = confidence
                        });

                        smile_cumm = smileDict[0];
                        frown_cumm = smileDict[1];
                        brow_up_cumm = browDict[0];
                        brow_down_cumm = browDict[1];
                        widen_cumm = eyeDict[0];
                        wrinkle_cumm = eyeDict[1];
                    }
                    else
                    {
                        // If no AUs present disable the AU visualization
                        MainGrid.ColumnDefinitions[2].Width = new GridLength(0);
                        eyePlot.Visibility = Visibility.Collapsed;
                        browPlot.Visibility = Visibility.Collapsed;
                        smilePlot.Visibility = Visibility.Collapsed;
                    }

                    Dictionary <int, double> poseDict = new Dictionary <int, double>();
                    poseDict[0] = -pose[3];
                    poseDict[1] = pose[4];
                    poseDict[2] = pose[5];
                    headPosePlot.AddDataPoint(new DataPointGraph()
                    {
                        Time = CurrentTime, values = poseDict, Confidence = confidence
                    });

                    Dictionary <int, double> gazeDict = new Dictionary <int, double>();
                    gazeDict[0] = gaze_angle.Item1 * (180.0 / Math.PI);
                    gazeDict[0] = 0.5 * old_gaze_x + 0.5 * gazeDict[0];
                    gazeDict[1] = -gaze_angle.Item2 * (180.0 / Math.PI);
                    gazeDict[1] = 0.5 * old_gaze_y + 0.5 * gazeDict[1];
                    gazePlot.AddDataPoint(new DataPointGraph()
                    {
                        Time = CurrentTime, values = gazeDict, Confidence = confidence
                    });

                    old_gaze_x = gazeDict[0];
                    old_gaze_y = gazeDict[1];

                    if (latest_img == null)
                    {
                        latest_img = frame.CreateWriteableBitmap();
                    }

                    frame.UpdateWriteableBitmap(latest_img);

                    video.Source = latest_img;
                    video.Confidence = confidence;
                    video.FPS = processing_fps.GetFPS();

                    if (!detection_succeeding)
                    {
                        video.OverlayLines.Clear();
                        video.OverlayPoints.Clear();
                        video.OverlayEyePoints.Clear();
                        video.GazeLines.Clear();
                    }
                    else
                    {
                        video.OverlayLines = lines;

                        List <Point> landmark_points = new List <Point>();
                        foreach (var p in landmarks)
                        {
                            landmark_points.Add(new Point(p.Item1, p.Item2));
                        }

                        List <Point> eye_landmark_points = new List <Point>();
                        foreach (var p in eye_landmarks)
                        {
                            eye_landmark_points.Add(new Point(p.Item1, p.Item2));
                        }


                        video.OverlayPoints = landmark_points;
                        video.OverlayEyePoints = eye_landmark_points;
                        video.GazeLines = gaze_lines;
                    }
                }));

                if (reset)
                {
                    if (resetPoint.HasValue)
                    {
                        landmark_detector.Reset(resetPoint.Value.X, resetPoint.Value.Y);
                        resetPoint = null;
                    }
                    else
                    {
                        landmark_detector.Reset();
                    }

                    face_analyser.Reset();
                    reset = false;

                    Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
                    {
                        headPosePlot.ClearDataPoints();
                        headPosePlot.ClearDataPoints();
                        gazePlot.ClearDataPoints();
                        smilePlot.ClearDataPoints();
                        browPlot.ClearDataPoints();
                        eyePlot.ClearDataPoints();
                    }));
                }

                frame_id++;
            }
            reader.Close();
            latest_img = null;
        }