Example #1
0
        static void Main()
        {
            // initialize application
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            // set up the game form
            form             = new GameForm();
            form.OnGazeMove += new OnGazeMoveDelegate(OnGazeMove);

            // grab the first camera
            var baseFolder = AppDomain.CurrentDomain.BaseDirectory;
            var camera     = SequenceReader.GetCameras(baseFolder).First();

            // set up a sequence reader
            reader = new SequenceReader(camera.Item1, 640, 400);

            // set up the face model
            String root = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"..\..\");

            faceModel = new FaceModelParameters(root, false);
            faceModel.optimiseForVideo();

            // set up a face detector, a landmark detector, and a gaze analyser
            faceDetector     = new FaceDetector();
            landmarkDetector = new CLNF(faceModel);
            gazeAnalyser     = new GazeAnalyserManaged();

            // run the game
            Application.Run(form);
        }
Example #2
0
        private void Initialize(object sender, PipelineRunEventArgs e)
        {
            string rootDirectory = AppDomain.CurrentDomain.BaseDirectory;

            faceModelParameters = new FaceModelParameters(rootDirectory, true, false, false);
            faceModelParameters.optimiseForVideo();

            faceDetector = new FaceDetector(faceModelParameters.GetHaarLocation(), faceModelParameters.GetMTCNNLocation());
            if (!faceDetector.IsMTCNNLoaded())
            {
                faceModelParameters.SetFaceDetector(false, true, false);
            }

            landmarkDetector = new CLNF(faceModelParameters);
            faceAnalyser     = new FaceAnalyser(rootDirectory, dynamic: true, output_width: 112, mask_aligned: true);
            gazeAnalyser     = new GazeAnalyser();

            landmarkDetector.Reset();
            faceAnalyser.Reset();
        }
Example #3
0
        // The main function call for processing sequences
        private void ProcessSequence(SequenceReader reader)
        {
            Thread.CurrentThread.Priority = ThreadPriority.Highest;

            SetupFeatureExtractionMode();

            thread_running = true;

            // Reload the face landmark detector if needed
            ReloadLandmarkDetector();

            if (!landmark_detector.isLoaded())
            {
                DetectorNotFoundWarning();
                EndMode();
                thread_running = false;
                return;
            }

            // Set the face detector
            face_model_params.SetFaceDetector(DetectorHaar, DetectorHOG, DetectorCNN);
            face_model_params.optimiseForVideo();

            // Setup the visualization
            Visualizer visualizer_of = new Visualizer(ShowTrackedVideo || RecordTracked, ShowAppearance, ShowAppearance, false);

            // Initialize the face analyser
            face_analyser = new FaceAnalyserManaged(AppDomain.CurrentDomain.BaseDirectory, DynamicAUModels, image_output_size, MaskAligned);

            // Reset the tracker
            landmark_detector.Reset();

            // Loading an image file
            var frame      = new RawImage(reader.GetNextImage());
            var gray_frame = new RawImage(reader.GetCurrentFrameGray());

            // Setup recording
            RecorderOpenFaceParameters rec_params = new RecorderOpenFaceParameters(true, reader.IsWebcam(),
                                                                                   Record2DLandmarks, Record3DLandmarks, RecordModelParameters, RecordPose, RecordAUs,
                                                                                   RecordGaze, RecordHOG, RecordTracked, RecordAligned, false,
                                                                                   reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), reader.GetFPS());

            RecorderOpenFace recorder = new RecorderOpenFace(reader.GetName(), rec_params, record_root);

            // For FPS tracking
            DateTime?startTime     = CurrentTime;
            var      lastFrameTime = CurrentTime;

            // Empty image would indicate that the stream is over
            while (!gray_frame.IsEmpty)
            {
                if (!thread_running)
                {
                    break;
                }

                double progress = reader.GetProgress();

                bool detection_succeeding = landmark_detector.DetectLandmarksInVideo(frame, face_model_params, gray_frame);

                // The face analysis step (for AUs and eye gaze)
                face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detection_succeeding, false);

                gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());

                // Only the final face will contain the details
                VisualizeFeatures(frame, visualizer_of, landmark_detector.CalculateAllLandmarks(), landmark_detector.GetVisibilities(), detection_succeeding, true, false, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), progress);

                // Record an observation
                RecordObservation(recorder, visualizer_of.GetVisImage(), 0, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), reader.GetTimestamp(), reader.GetFrameNumber());

                if (RecordTracked)
                {
                    recorder.WriteObservationTracked();
                }

                while (thread_running & thread_paused && skip_frames == 0)
                {
                    Thread.Sleep(10);
                }

                if (skip_frames > 0)
                {
                    skip_frames--;
                }

                frame      = new RawImage(reader.GetNextImage());
                gray_frame = new RawImage(reader.GetCurrentFrameGray());

                lastFrameTime = CurrentTime;
                processing_fps.AddFrame();
            }

            // Finalize the recording and flush to disk
            recorder.Close();

            // Post-process the AU recordings
            if (RecordAUs)
            {
                face_analyser.PostProcessOutputFile(recorder.GetCSVFile());
            }

            // Close the open video/webcam
            reader.Close();

            EndMode();
        }
Example #4
0
        public MainWindow()
        {
            InitializeComponent();

            // Set the icon
            Uri iconUri = new Uri("logo1.ico", UriKind.RelativeOrAbsolute);

            this.Icon = BitmapFrame.Create(iconUri);

            String root = AppDomain.CurrentDomain.BaseDirectory;

            // TODO, create a demo version of parameters
            face_model_params = new FaceModelParameters(root, true, false, false);
            face_model_params.optimiseForVideo();

            // Initialize the face detector
            FaceDetector face_detector = new FaceDetector(face_model_params.GetHaarLocation(), face_model_params.GetMTCNNLocation());

            // If MTCNN model not available, use HOG
            if (!face_detector.IsMTCNNLoaded())
            {
                face_model_params.SetFaceDetector(false, true, false);
            }

            landmark_detector = new CLNF(face_model_params);
            face_analyser     = new FaceAnalyserManaged(root, true, 112, true);
            gaze_analyser     = new GazeAnalyserManaged();

            Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
            {
                headPosePlot.AssocColor(0, Colors.Blue);
                headPosePlot.AssocColor(1, Colors.Red);
                headPosePlot.AssocColor(2, Colors.Green);

                headPosePlot.AssocName(1, "Turn");
                headPosePlot.AssocName(2, "Tilt");
                headPosePlot.AssocName(0, "Up/Down");

                headPosePlot.AssocThickness(0, 2);
                headPosePlot.AssocThickness(1, 2);
                headPosePlot.AssocThickness(2, 2);

                gazePlot.AssocColor(0, Colors.Red);
                gazePlot.AssocColor(1, Colors.Blue);

                gazePlot.AssocName(0, "Left-right");
                gazePlot.AssocName(1, "Up-down");
                gazePlot.AssocThickness(0, 2);
                gazePlot.AssocThickness(1, 2);

                smilePlot.AssocColor(0, Colors.Green);
                smilePlot.AssocColor(1, Colors.Red);
                smilePlot.AssocName(0, "Smile");
                smilePlot.AssocName(1, "Frown");
                smilePlot.AssocThickness(0, 2);
                smilePlot.AssocThickness(1, 2);

                browPlot.AssocColor(0, Colors.Green);
                browPlot.AssocColor(1, Colors.Red);
                browPlot.AssocName(0, "Raise");
                browPlot.AssocName(1, "Furrow");
                browPlot.AssocThickness(0, 2);
                browPlot.AssocThickness(1, 2);

                eyePlot.AssocColor(0, Colors.Green);
                eyePlot.AssocColor(1, Colors.Red);
                eyePlot.AssocName(0, "Eye widen");
                eyePlot.AssocName(1, "Nose wrinkle");
                eyePlot.AssocThickness(0, 2);
                eyePlot.AssocThickness(1, 2);
            }));
        }