private void Initialize(object sender, PipelineRunEventArgs e) { string rootDirectory = AppDomain.CurrentDomain.BaseDirectory; faceModelParameters = new FaceModelParameters(rootDirectory, true, false, false); faceModelParameters.optimiseForVideo(); faceDetector = new FaceDetector(faceModelParameters.GetHaarLocation(), faceModelParameters.GetMTCNNLocation()); if (!faceDetector.IsMTCNNLoaded()) { faceModelParameters.SetFaceDetector(false, true, false); } landmarkDetector = new CLNF(faceModelParameters); faceAnalyser = new FaceAnalyser(rootDirectory, dynamic: true, output_width: 112, mask_aligned: true); gazeAnalyser = new GazeAnalyser(); landmarkDetector.Reset(); faceAnalyser.Reset(); }
private void ProcessLoop() { Thread.CurrentThread.IsBackground = true; CLMParameters clmParams = new CLMParameters(); CLM clmModel = new CLM(); float fx = 500, fy = 500, cx = 0, cy = 0; FaceAnalyser analyser = new FaceAnalyser(); DateTime?startTime = CurrentTime; arousalPlot.AssocColor(0, Colors.Red); valencePlot.AssocColor(0, Colors.Blue); while (true) { var newFrames = frameQueue.Take(); var frame = new RawImage(newFrames.Item1); var grayFrame = newFrames.Item2; if (!startTime.HasValue) { startTime = CurrentTime; } if (cx == 0 && cy == 0) { cx = grayFrame.Width / 2f; cy = grayFrame.Height / 2f; } if (reset) { clmModel.Reset(); analyser.Reset(); reset = false; } if (resetPoint.HasValue) { clmModel.Reset(resetPoint.Value.X, resetPoint.Value.Y); analyser.Reset(); resetPoint = null; } detectionSucceeding = clmModel.DetectLandmarksInVideo(grayFrame, clmParams); List <Tuple <Point, Point> > lines = null; List <Point> landmarks = null; if (detectionSucceeding) { landmarks = clmModel.CalculateLandmarks(); lines = clmModel.CalculateBox(fx, fy, cx, cy); } else { analyser.Reset(); } ////////////////////////////////////////////// // Analyse frame and detect AUs ////////////////////////////////////////////// analyser.AddNextFrame(grayFrame, clmModel, (CurrentTime - startTime.Value).TotalSeconds); var alignedFace = analyser.GetLatestAlignedFace(); var hogDescriptor = analyser.GetLatestHOGDescriptorVisualisation(); trackingFps.AddFrame(); Dictionary <String, double> aus = analyser.GetCurrentAUs(); string emotion = analyser.GetCurrentCategoricalEmotion(); double arousal = analyser.GetCurrentArousal(); double valence = analyser.GetCurrentValence(); double confidence = analyser.GetConfidence(); try { Dispatcher.Invoke(() => { if (latestAlignedFace == null) { latestAlignedFace = alignedFace.CreateWriteableBitmap(); } if (latestHOGDescriptor == null) { latestHOGDescriptor = hogDescriptor.CreateWriteableBitmap(); } confidenceBar.Value = confidence; if (detectionSucceeding) { frame.UpdateWriteableBitmap(latestImg); alignedFace.UpdateWriteableBitmap(latestAlignedFace); hogDescriptor.UpdateWriteableBitmap(latestHOGDescriptor); imgAlignedFace.Source = latestAlignedFace; imgHOGDescriptor.Source = latestHOGDescriptor; video.OverlayLines = lines; video.OverlayPoints = landmarks; video.Confidence = confidence; video.Source = latestImg; Dictionary <int, double> arousalDict = new Dictionary <int, double>(); arousalDict[0] = arousal * 0.5 + 0.5; arousalPlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = arousalDict, Confidence = confidence }); Dictionary <int, double> valenceDict = new Dictionary <int, double>(); valenceDict[0] = valence * 0.5 + 0.5; valencePlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = valenceDict, Confidence = confidence }); Dictionary <int, double> avDict = new Dictionary <int, double>(); avDict[0] = arousal; avDict[1] = valence; avPlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = avDict, Confidence = confidence }); auGraph.Update(aus, confidence); emotionLabelHistory.Enqueue(new Tuple <DateTime, string>(CurrentTime, emotion)); UpdateEmotionLabel(); } else { foreach (var k in aus.Keys.ToArray()) { aus[k] = 0; } auGraph.Update(aus, 0); } }); } catch (TaskCanceledException) { // Quitting break; } } }
private void ProcessLoop() { Thread.CurrentThread.IsBackground = true; CLMParameters clmParams = new CLMParameters(); CLM clmModel = new CLM(); float fx = 500, fy = 500, cx = 0, cy = 0; FaceAnalyser analyser = new FaceAnalyser(); DateTime? startTime = CurrentTime; arousalPlot.AssocColor(0, Colors.Red); valencePlot.AssocColor(0, Colors.Blue); while (true) { var newFrames = frameQueue.Take(); var frame = new RawImage(newFrames.Item1); var grayFrame = newFrames.Item2; if (!startTime.HasValue) startTime = CurrentTime; if (cx == 0 && cy == 0) { cx = grayFrame.Width / 2f; cy = grayFrame.Height / 2f; } if (reset) { clmModel.Reset(); analyser.Reset(); reset = false; } if (resetPoint.HasValue) { clmModel.Reset(resetPoint.Value.X, resetPoint.Value.Y); analyser.Reset(); resetPoint = null; } detectionSucceeding = clmModel.DetectLandmarksInVideo(grayFrame, clmParams); List<Tuple<Point, Point>> lines = null; List<Point> landmarks = null; if (detectionSucceeding) { landmarks = clmModel.CalculateLandmarks(); lines = clmModel.CalculateBox(fx, fy, cx, cy); } else { analyser.Reset(); } ////////////////////////////////////////////// // Analyse frame and detect AUs ////////////////////////////////////////////// analyser.AddNextFrame(grayFrame, clmModel, (CurrentTime - startTime.Value).TotalSeconds); var alignedFace = analyser.GetLatestAlignedFace(); var hogDescriptor = analyser.GetLatestHOGDescriptorVisualisation(); trackingFps.AddFrame(); Dictionary<String, double> aus = analyser.GetCurrentAUs(); string emotion = analyser.GetCurrentCategoricalEmotion(); double arousal = analyser.GetCurrentArousal(); double valence = analyser.GetCurrentValence(); double confidence = analyser.GetConfidence(); try { Dispatcher.Invoke(() => { if (latestAlignedFace == null) latestAlignedFace = alignedFace.CreateWriteableBitmap(); if (latestHOGDescriptor == null) latestHOGDescriptor = hogDescriptor.CreateWriteableBitmap(); confidenceBar.Value = confidence; if (detectionSucceeding) { frame.UpdateWriteableBitmap(latestImg); alignedFace.UpdateWriteableBitmap(latestAlignedFace); hogDescriptor.UpdateWriteableBitmap(latestHOGDescriptor); imgAlignedFace.Source = latestAlignedFace; imgHOGDescriptor.Source = latestHOGDescriptor; video.OverlayLines = lines; video.OverlayPoints = landmarks; video.Confidence = confidence; video.Source = latestImg; Dictionary<int, double> arousalDict = new Dictionary<int, double>(); arousalDict[0] = arousal * 0.5 + 0.5; arousalPlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = arousalDict, Confidence = confidence }); Dictionary<int, double> valenceDict = new Dictionary<int, double>(); valenceDict[0] = valence * 0.5 + 0.5; valencePlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = valenceDict, Confidence = confidence }); Dictionary<int, double> avDict = new Dictionary<int, double>(); avDict[0] = arousal; avDict[1] = valence; avPlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = avDict, Confidence = confidence }); auGraph.Update(aus, confidence); emotionLabelHistory.Enqueue(new Tuple<DateTime, string>(CurrentTime, emotion)); UpdateEmotionLabel(); } else { foreach (var k in aus.Keys.ToArray()) aus[k] = 0; auGraph.Update(aus, 0); } }); } catch (TaskCanceledException) { // Quitting break; } } }