// Capturing and processing the video frame by frame private void VideoLoop(UtilitiesOF.SequenceReader reader) { Thread.CurrentThread.IsBackground = true; String root = AppDomain.CurrentDomain.BaseDirectory; FaceModelParameters model_params = new FaceModelParameters(root, true, false, false); // Initialize the face detector FaceDetector face_detector = new FaceDetector(model_params.GetHaarLocation(), model_params.GetMTCNNLocation()); // If MTCNN model not available, use HOG if (!face_detector.IsMTCNNLoaded()) { model_params.SetFaceDetector(false, true, false); } CLNF face_model = new CLNF(model_params); GazeAnalyserManaged gaze_analyser = new GazeAnalyserManaged(); DateTime?startTime = CurrentTime; var lastFrameTime = CurrentTime; while (running) { ////////////////////////////////////////////// // CAPTURE FRAME AND DETECT LANDMARKS FOLLOWED BY THE REQUIRED IMAGE PROCESSING ////////////////////////////////////////////// RawImage frame = reader.GetNextImage(); lastFrameTime = CurrentTime; processing_fps.AddFrame(); var grayFrame = reader.GetCurrentFrameGray(); if (mirror_image) { frame.Mirror(); grayFrame.Mirror(); } bool detectionSucceeding = ProcessFrame(face_model, gaze_analyser, model_params, frame, grayFrame, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); lock (recording_lock) { if (recording) { // Add objects to recording queues List <float> pose = new List <float>(); face_model.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); recording_objects.Enqueue(new Tuple <RawImage, bool, List <float> >(frame, detectionSucceeding, pose)); } } List <Tuple <System.Windows.Point, System.Windows.Point> > lines = null; List <Tuple <float, float> > eye_landmarks = null; List <System.Windows.Point> landmarks = new List <System.Windows.Point>(); List <Tuple <System.Windows.Point, System.Windows.Point> > gaze_lines = null; Tuple <float, float> gaze_angle = new Tuple <float, float>(0, 0); var visibilities = face_model.GetVisibilities(); double scale = face_model.GetRigidParams()[0]; EyeDataStruct EyeSample = new EyeDataStruct();; if (detectionSucceeding) { List <Tuple <float, float> > landmarks_doubles = face_model.CalculateAllLandmarks(); foreach (var p in landmarks_doubles) { landmarks.Add(new System.Windows.Point(p.Item1, p.Item2)); } eye_landmarks = face_model.CalculateVisibleEyeLandmarks(); gaze_lines = gaze_analyser.CalculateGazeLines(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); gaze_angle = gaze_analyser.GetGazeAngle(); if (IsBroadcastingToWAMP) { EyeSample.Confident = face_model.GetConfidence(); EyeSample.EyeLandmarks3D = face_model.CalculateAllEyeLandmarks3D(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); EyeSample.Gaze = gaze_analyser.GetGazeCamera(); } lines = face_model.CalculateBox(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); } if (reset) { face_model.Reset(); reset = false; } // Visualisation updating try { Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() => { if (latest_img == null) { latest_img = frame.CreateWriteableBitmap(); } List <float> pose = new List <float>(); face_model.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy()); //stream to wamp if (IsBroadcastingToWAMP) { OpenFaceDataStruct Sample = new OpenFaceDataStruct(); Sample.GazeData = EyeSample; Sample.HeadData = pose; OpenFaceDataSamples.Enqueue(Sample); } int yaw = (int)(pose[4] * 180 / Math.PI + 0.5); int yaw_abs = Math.Abs(yaw); int roll = (int)(pose[5] * 180 / Math.PI + 0.5); int roll_abs = Math.Abs(roll); int pitch = (int)(pose[3] * 180 / Math.PI + 0.5); int pitch_abs = Math.Abs(pitch); YawLabel.Content = yaw_abs + "°"; RollLabel.Content = roll_abs + "°"; PitchLabel.Content = pitch_abs + "°"; if (yaw > 0) { YawLabelDir.Content = "Right"; } else if (yaw < 0) { YawLabelDir.Content = "Left"; } else { YawLabelDir.Content = "Straight"; } if (pitch > 0) { PitchLabelDir.Content = "Down"; } else if (pitch < 0) { PitchLabelDir.Content = "Up"; } else { PitchLabelDir.Content = "Straight"; } if (roll > 0) { RollLabelDir.Content = "Left"; } else if (roll < 0) { RollLabelDir.Content = "Right"; } else { RollLabelDir.Content = "Straight"; } XPoseLabel.Content = (int)pose[0] + " mm"; YPoseLabel.Content = (int)pose[1] + " mm"; ZPoseLabel.Content = (int)pose[2] + " mm"; String x_angle = String.Format("{0:F0}°", gaze_angle.Item1 * (180.0 / Math.PI)); String y_angle = String.Format("{0:F0}°", gaze_angle.Item2 * (180.0 / Math.PI)); YawLabelGaze.Content = x_angle; PitchLabelGaze.Content = y_angle; if (gaze_angle.Item1 > 0) { YawLabelGazeDir.Content = "Right"; } else if (gaze_angle.Item1 < 0) { YawLabelGazeDir.Content = "Left"; } else { YawLabelGazeDir.Content = "Straight"; } if (gaze_angle.Item2 > 0) { PitchLabelGazeDir.Content = "Down"; } else if (gaze_angle.Item2 < 0) { PitchLabelGazeDir.Content = "Up"; } else { PitchLabelGazeDir.Content = "Straight"; } double confidence = face_model.GetConfidence(); if (confidence < 0) { confidence = 0; } else if (confidence > 1) { confidence = 1; } frame.UpdateWriteableBitmap(latest_img); webcam_img.Clear(); webcam_img.Source = latest_img; webcam_img.Confidence.Add(confidence); webcam_img.FPS = processing_fps.GetFPS(); if (detectionSucceeding) { webcam_img.OverlayLines.Add(lines); webcam_img.OverlayPoints.Add(landmarks); webcam_img.OverlayPointsVisibility.Add(visibilities); webcam_img.FaceScale.Add(scale); List <System.Windows.Point> eye_landmark_points = new List <System.Windows.Point>(); foreach (var p in eye_landmarks) { eye_landmark_points.Add(new System.Windows.Point(p.Item1, p.Item2)); } webcam_img.OverlayEyePoints.Add(eye_landmark_points); webcam_img.GazeLines.Add(gaze_lines); // Publish the information for other applications String str_head_pose = String.Format("{0}:{1:F2}, {2:F2}, {3:F2}, {4:F2}, {5:F2}, {6:F2}", "HeadPose", pose[0], pose[1], pose[2], pose[3] * 180 / Math.PI, pose[4] * 180 / Math.PI, pose[5] * 180 / Math.PI); zero_mq_socket.Send(new ZFrame(str_head_pose, Encoding.UTF8)); String str_gaze = String.Format("{0}:{1:F2}, {2:F2}", "GazeAngle", gaze_angle.Item1 * (180.0 / Math.PI), gaze_angle.Item2 * (180.0 / Math.PI)); zero_mq_socket.Send(new ZFrame(str_gaze, Encoding.UTF8)); } })); while (running & pause) { Thread.Sleep(10); } } catch (TaskCanceledException) { // Quitting break; } } reader.Close(); System.Console.Out.WriteLine("Thread finished"); }
private void CreateWAMPClient() { using (var reader = new System.IO.StreamReader(@"./crossbar_config.csv")) { List <string> listA = new List <string>(); List <string> listB = new List <string>(); while (!reader.EndOfStream) { var line = reader.ReadLine(); var values = line.Split(','); WAMPRouterAdress = values[0]; WAMPRealm = values[1]; } } OpenFaceDataSamples = new ConcurrentQueue <OpenFaceDataStruct>(); //EyeDataSamples = new ConcurrentQueue<EyeDataStruct>(); //HeadDataSamples = new ConcurrentQueue<List<float>>(); DefaultWampChannelFactory channelFactory = new DefaultWampChannelFactory(); channel = channelFactory.CreateJsonChannel(WAMPRouterAdress, WAMPRealm); counter = 0; if (ConnectToRouter()) { IsBroadcastingToWAMP = true; while (!StopWAMP) { //stream eye data OpenFaceDataStruct Sample; if (OpenFaceDataSamples.TryDequeue(out Sample)) { EyeDataStruct EyeData = Sample.GazeData; object[] gazeArgs; if (EyeData.EyeLandmarks3D != null) { float leftPupilRadius = Distance(EyeData.EyeLandmarks3D[21], EyeData.EyeLandmarks3D[25]) / 2; float rightPupilRadius = Distance(EyeData.EyeLandmarks3D[49], EyeData.EyeLandmarks3D[53]) / 2; Tuple <float, float, float> leftEyeLocation = /*new Tuple<float, float, float>(0, 0, 0);//*/ AverageBetweenTuples(EyeData.EyeLandmarks3D[11], EyeData.EyeLandmarks3D[17]); Tuple <float, float, float> rightEyeLocation = AverageBetweenTuples(EyeData.EyeLandmarks3D[39], EyeData.EyeLandmarks3D[45]); //gazeArgs = new object[] { EyeData.Confident, leftPupilRadius, // leftEyeLocation.Item3/10, leftEyeLocation.Item1/10, -leftEyeLocation.Item2/10, // (EyeData.Gaze.Item1.Item1), EyeData.Gaze.Item1.Item2, EyeData.Gaze.Item1.Item3, // EyeData.Confident, rightPupilRadius, // rightEyeLocation.Item3/10, rightEyeLocation.Item1/10, -rightEyeLocation.Item2/10, // (EyeData.Gaze.Item2.Item1), EyeData.Gaze.Item2.Item2, EyeData.Gaze.Item2.Item3}; gazeArgs = new object[] { EyeData.Confident, leftPupilRadius, 0, 0, 0, (EyeData.Gaze.Item2.Item3), EyeData.Gaze.Item2.Item1, -EyeData.Gaze.Item2.Item2, EyeData.Confident, rightPupilRadius, 0, 0, 0, (EyeData.Gaze.Item1.Item3), EyeData.Gaze.Item1.Item1, -EyeData.Gaze.Item1.Item2 }; } else { gazeArgs = new object[] { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }; } List <float> HeadData = Sample.HeadData; //float[] orientation = ConvertEulerAnglesToQuaternion(HeadData); object[] headArgs = new object[] { "OpenFace", new object[] { HeadData[2] / 10, HeadData[0] / 10, -HeadData[1] / 10 }, new object[] { HeadData[3] + Math.PI, HeadData[4], -(HeadData[5] + Math.PI) }, //new object[] { orientation[0], orientation[1], orientation[2] }, new object[] { } }; WampEvent evt = new WampEvent(); evt.Arguments = new object[] { "OpenFace", gazeArgs, headArgs }; OpenFaceSubject.OnNext(evt); } //stream head data //Thread.Sleep(100); } } }