protected void Init() { try { // RealSense初期化 // 参考:https://software.intel.com/sites/landingpage/realsense/camera-sdk/v2016r3/documentation/html/index.html?doc_face_general_procedure.html // 参考:.\Intel\RSSDK\sample\core\RawStreams.unity SenseManager = SenseManager.CreateInstance(); FaceModule = FaceModule.Activate(SenseManager); FaceModule.FrameProcessed += FaceModule_FrameProcessed; FaceData = FaceModule.CreateOutput(); FaceConfig = FaceModule.CreateActiveConfiguration(); FaceConfig.TrackingMode = TrackingModeType.FACE_MODE_COLOR; FaceConfig.Expressions.Properties.Enabled = true; FaceConfig.ApplyChanges(); SampleReader = SampleReader.Activate(SenseManager); SampleReader.EnableStream(StreamType.STREAM_TYPE_COLOR, 640, 480, 30); SampleReader.SampleArrived += SampleReader_SampleArrived; SenseManager.Init(); SenseManager.StreamFrames(false); // RawStreams Texture = NativeTexturePlugin.Activate(); Material.mainTexture = new Texture2D(640, 480, TextureFormat.BGRA32, false); Material.mainTextureScale = new Vector2(-1, -1); TexPtr = Material.mainTexture.GetNativeTexturePtr(); // 解像度取得 StreamProfileSet profile; SenseManager.CaptureManager.Device.QueryStreamProfileSet(out profile); Resolution = profile.color.imageInfo; // 平滑化初期化 // 参考:https://software.intel.com/sites/landingpage/realsense/camera-sdk/v2016r3/documentation/html/index.html?doc_utils_the_smoother_utility.html Smoother = Smoother.CreateInstance(SenseManager.Session); SmoothBody = Smoother.Create3DWeighted(BodyPosSmoothWeight); SmoothHead = Smoother.Create3DWeighted(HeadAngSmoothWeight); SmoothEyes = Smoother.Create2DWeighted(EyesPosSmoothWeight); SmoothEyesClose = Smoother.Create1DWeighted(EyesCloseSmoothWeight); SmoothBrowRai = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothBrowLow = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothSmile = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothKiss = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothMouth = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothTongue = Smoother.Create1DWeighted(FaceSmoothWeight); } catch (Exception e) { ErrorLog.text = "RealSense Error\n"; ErrorLog.text += e.Message; } }
private void ProcessLandmarks() { FaceData faceData = faceModule.CreateOutput(); faceData.Update(); int numOfFaces = faceData.NumberOfDetectedFaces > maxTrackedFaces ? maxTrackedFaces : faceData.NumberOfDetectedFaces; boundingBoxes.Clear(); landmarks.Clear(); Face face; RectI32 boundingRect; LandmarkPoint[] groupPoints; LandmarkPoint[] allPoints; for (int i = 0; i < numOfFaces; i++) { face = faceData.QueryFaceByIndex(i); boundingRect = face.Detection.BoundingRect; boundingBoxes.Add(boundingRect); if (face != null && face.Landmarks != null) { if (extractLandmarkGroup) { face.Landmarks.QueryPointsByGroup(landmarkGroup, out groupPoints); } else { groupPoints = face.Landmarks.Points; } if (groupPoints != null) { landmarks.Add(groupPoints); } allPoints = face.Landmarks.Points; if (allPoints != null) { allLandmarks.Add(allPoints); } } } faceData.Dispose(); faceModule.Dispose(); }
private void StartTrackingThread() { IsTracking = true; TrackingThread = new Thread(() => { PXCMFaceData FaceData = FaceModule.CreateOutput(); while (IsTracking) { pxcmStatus acquireFrameStatus = SdkCommonHelper.SenseManager.AcquireFrame(true); if (acquireFrameStatus < pxcmStatus.PXCM_STATUS_NO_ERROR) { ResetUserTrackData(); Console.WriteLine("SenseManager.AcquireFrame(true) error: " + acquireFrameStatus.ToString()); Thread.Sleep(250); continue; } FaceData.Update(); int numberOfDetectedFaces = FaceData.QueryNumberOfDetectedFaces(); if (numberOfDetectedFaces != 1) { ResetUserTrackData(); SdkCommonHelper.SenseManager.ReleaseFrame(); Thread.Sleep(250); continue; } PXCMFaceData.Face faceDataFace = FaceData.QueryFaceByIndex(0); TrackUserPosition(faceDataFace); TrackUserExpressions(faceDataFace); SdkCommonHelper.SenseManager.ReleaseFrame(); Thread.Sleep(100); } }); TrackingThread.Start(); }
private void StartTrackingLoop() { PXCMFaceData FaceData = FaceModule.CreateOutput(); while (!_shouldStop) { pxcmStatus acquireFrameStatus = SenseManager.AcquireFrame(true); if (acquireFrameStatus < pxcmStatus.PXCM_STATUS_NO_ERROR) { ResetTrackData(); Console.WriteLine("SenseManager.AcquireFrame(true) error: " + acquireFrameStatus.ToString()); continue; } PXCMCapture.Sample captureSample = SenseManager.QueryFaceSample(); if (captureSample == null) { ResetTrackData(); SenseManager.ReleaseFrame(); continue; } //TODO: Image Daten holen TrackImageData(captureSample); FaceData.Update(); int numberOfDetectedFaces = FaceData.QueryNumberOfDetectedFaces(); if (numberOfDetectedFaces != 1) { ResetTrackData(); SenseManager.ReleaseFrame(); continue; } PXCMFaceData.Face faceDataFace = FaceData.QueryFaceByIndex(0); TrackPose(faceDataFace); TrackExpressions(faceDataFace); //TrackLandmarks(faceDataFace); //TrackGaze(); //FaceData.QueryRecognitionModule(); //im nächsten object steckt boundingrectangle und avarageDepth drin //PXCMFaceData.DetectionData faceDataDetectionData = faceDataFace.QueryDetection(); //faceDataDetectionData.QueryFaceAverageDepth(); //faceDataDetectionData.QueryBoundingRect(); SenseManager.ReleaseFrame(); Thread.Sleep(250); } if (FaceData != null) { FaceData.Dispose(); } FaceConfiguration.Dispose(); SenseManager.Close(); SenseManager.Dispose(); }