private void StartTrackingLoop() { PXCMFaceData FaceData = FaceModule.CreateOutput(); while (!_shouldStop) { pxcmStatus acquireFrameStatus = SenseManager.AcquireFrame(true); if (acquireFrameStatus < pxcmStatus.PXCM_STATUS_NO_ERROR) { ResetTrackData(); Console.WriteLine("SenseManager.AcquireFrame(true) error: " + acquireFrameStatus.ToString()); continue; } PXCMCapture.Sample captureSample = SenseManager.QueryFaceSample(); if (captureSample == null) { ResetTrackData(); SenseManager.ReleaseFrame(); continue; } //TODO: Image Daten holen TrackImageData(captureSample); FaceData.Update(); int numberOfDetectedFaces = FaceData.QueryNumberOfDetectedFaces(); if (numberOfDetectedFaces != 1) { ResetTrackData(); SenseManager.ReleaseFrame(); continue; } PXCMFaceData.Face faceDataFace = FaceData.QueryFaceByIndex(0); TrackPose(faceDataFace); TrackExpressions(faceDataFace); //TrackLandmarks(faceDataFace); //TrackGaze(); //FaceData.QueryRecognitionModule(); //im nächsten object steckt boundingrectangle und avarageDepth drin //PXCMFaceData.DetectionData faceDataDetectionData = faceDataFace.QueryDetection(); //faceDataDetectionData.QueryFaceAverageDepth(); //faceDataDetectionData.QueryBoundingRect(); SenseManager.ReleaseFrame(); Thread.Sleep(250); } if (FaceData != null) { FaceData.Dispose(); } FaceConfiguration.Dispose(); SenseManager.Close(); SenseManager.Dispose(); }