public void Configure(RealSenseCamera camera) { _camera = camera; _camera.Manager.EnableFace(); _faceModule = camera.Manager.QueryFace(); _faceData = _faceModule.CreateOutput(); }
private void startButton_Click(object sender, RoutedEventArgs e) { CurrentIpAdress = ipTextBox.Text; currentPort = portTextBox.Text; senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60); senseManager.EnableFace(); senseManager.Init(); faceModule = senseManager.QueryFace(); faceConfiguration = faceModule.CreateActiveConfiguration(); faceConfiguration.detection.isEnabled = true; expressionConfiguration = faceConfiguration.QueryExpressions(); expressionConfiguration.Enable(); expressionConfiguration.EnableAllExpressions(); faceConfiguration.landmarks.isEnabled = true; faceConfiguration.landmarks.numLandmarks = 78; faceConfiguration.EnableAllAlerts(); faceConfiguration.ApplyChanges(); captureProcess = new Thread(new ThreadStart(CaptureProcess)); captureProcess.Start(); }
private void InitializeCamera() { FaceModule = SenseManager.QueryFace(); var config = FaceModule.CreateActiveConfiguration(); config.detection.isEnabled = true; config.landmarks.isEnabled = false; config.pose.isEnabled = false; config.EnableAllAlerts(); if (RetrieveMode == Mode.Events) { config.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_FARTHEST_TO_CLOSEST; PXCMFaceConfiguration.OnFiredAlertDelegate alertHandler = new PXCMFaceConfiguration.OnFiredAlertDelegate(OnAlertHandler); config.SubscribeAlert(alertHandler); } if (config.ApplyChanges().IsSuccessful()) { if (SenseManager.Init().IsError()) { MessageBox.Show("Errore nell'inizializzazione della camera"); Close(); } } config.Dispose(); }
private static void StartFace() { pxcmStatus status = senseManager.EnableFace(); PXCMFaceModule faceModule = senseManager.QueryFace(); if (status != pxcmStatus.PXCM_STATUS_NO_ERROR || faceModule == null) { return; } PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration(); if (faceConfig == null) { return; } faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); faceConfig.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT; PXCMFaceConfiguration.ExpressionsConfiguration econfiguration = faceConfig.QueryExpressions(); if (econfiguration == null) { return; } econfiguration.properties.maxTrackedFaces = 1; econfiguration.EnableAllExpressions(); econfiguration.Enable(); faceConfig.ApplyChanges(); faceConfig.pose.isEnabled = true; faceConfig.landmarks.isEnabled = true; faceData = faceModule.CreateOutput(); }
// 循环执行流的主体程序 private void DoStreaming_SaveData_Open() { this.m_stopped = false; InitStreamState(); switch (m_algoOption) { // 面部算法 case AlgoOption.Face: this.faceModule = manager.QueryFace(); if (faceModule == null) { MessageBox.Show("QueryFace failed"); return; } InitFaceState(); this.faceData = this.faceModule.CreateOutput(); if (faceData == null) { MessageBox.Show("CreateOutput failed"); return; } break; } if (manager.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR) { #if DEBUG System.Windows.Forms.MessageBox.Show("init failed"); #endif return; } }
/** * Constructor of the model * It does all the important stuff to use our camera. Its so FANCY ! * Like enabling all important tracker(Hand, Face), the stream and builds up the configuration. * blib blub */ public Model() { emotions["Anger"] = 0; emotions["Fear"] = 0; emotions["Disgust"] = 0; emotions["Surprise"] = 0; emotions["Joy"] = 0; emotions["Sadness"] = 0; emotions["Contempt"] = 0; width = 1920; height = 1080; framerate = 30; senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, framerate); // Enable Face detection senseManager.EnableFace(); senseManager.Init(); face = senseManager.QueryFace(); faceConfig = face.CreateActiveConfiguration(); faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); faceConfig.detection.isEnabled = true; faceConfig.pose.isEnabled = true; faceConfig.ApplyChanges(); faceConfig.Update(); modules = new List <RSModule>(); }
private void ConfigureRealSense() { try { // Create the SenseManager instance sm = PXCMSenseManager.CreateInstance(); // Enable the color stream sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, ImageWidth, ImageHeight, 30); // Enable person tracking sm.EnablePersonTracking(); personModule = sm.QueryPersonTracking(); PXCMPersonTrackingConfiguration personConfig = personModule.QueryConfiguration(); personConfig.SetTrackedAngles(PXCMPersonTrackingConfiguration.TrackingAngles.TRACKING_ANGLES_ALL); // Enable skeleton tracking - not supported on r200? //PXCMPersonTrackingConfiguration.SkeletonJointsConfiguration skeletonConfig = personConfig.QuerySkeletonJoints(); //skeletonConfig.Enable(); // Enable the face module sm.EnableFace(); PXCMFaceModule faceModule = sm.QueryFace(); PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration(); faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); faceConfig.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_APPEARANCE_TIME; faceConfig.detection.maxTrackedFaces = 1; faceConfig.ApplyChanges(); sm.EnableBlob(); PXCMBlobModule blobModule = sm.QueryBlob(); PXCMBlobConfiguration blobConfig = blobModule.CreateActiveConfiguration(); blobConfig.SetMaxBlobs(4); // 4 is the max blobConfig.SetMaxDistance(2000); // in mm's blobConfig.ApplyChanges(); //initialize the SenseManager sm.Init(); faceData = faceModule.CreateOutput(); blobData = blobModule.CreateOutput(); // Mirror the image sm.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // Release resources personConfig.Dispose(); faceConfig.Dispose(); faceModule.Dispose(); blobConfig.Dispose(); blobModule.Dispose(); } catch (Exception) { MessageBox.Show("Unable to configure the RealSense camera. Please make sure a R200 camera is connected.", "System Error"); throw; } }
/// <summary> /// Called when a face is detected. /// </summary> /// <param name="module">The module.</param> private void OnFaceCallback(PXCMFaceModule module) { PXCMRectI32 bounds; using (var faceData = module.CreateOutput()) { faceData.Update(); var faces = faceData.QueryFaces(); foreach (var face in faces) { var detection = face.QueryDetection(); detection.QueryBoundingRect(out bounds); // Debug.WriteLine("{0} Face detected: {1}", Time(), bounds); var landmarkData = face.QueryLandmarks(); if (landmarkData != null) { PXCMFaceData.LandmarkPoint[] landmarks; landmarkData.QueryPoints(out landmarks); var landmarkDict = new Dictionary <string, Point>(); foreach (PXCMFaceData.LandmarkPoint landmark in landmarks) { landmarkDict.Add("LANDMARK_" + landmark.source.index, new Point(landmark.image.x, landmark.image.y)); /*Debug.WriteLine("{0}/{1} at {2},{3},{4}", * landmark.source.index, * landmark.source.alias, * landmark.image.x, * landmark.image.y, * landmark.confidenceImage); */ } var landmarkArgs = new LandmarksEventArgs(landmarkDict, Resolution.Item1.width, Resolution.Item1.height); OnLandmarksFound(this, landmarkArgs); } // Expression var expressionValues = new Dictionary <string, double>(); var expressionData = face.QueryExpressions(); if (expressionData != null) { foreach (PXCMFaceData.ExpressionsData.FaceExpression expression in Enum.GetValues(typeof(PXCMFaceData.ExpressionsData.FaceExpression))) { PXCMFaceData.ExpressionsData.FaceExpressionResult score; expressionData.QueryExpression(expression, out score); expressionValues.Add(expression.ToString(), score.intensity / 100d); // Debug.WriteLine("{0} Expression: {1} == {2}", Time(), expression, score.intensity / 100d); } } OnFaceFound(this, new FaceEventArgs(new Rectangle(bounds.x, bounds.y, bounds.w, bounds.h), expressionValues, Resolution.Item1.width, Resolution.Item1.height)); OnDataAvailable(this, new DataEventArgs(expressionValues)); } } }
// Use this for initialization void Start() { // Creates an instance of the sense manager to be called later session = PXCMSenseManager.CreateInstance(); //Output an error if there is no instance of the sense manager if (session == null) { Debug.LogError("SenseManager Init Failed!"); } // Enables hand tracking sts = session.EnableHand(); handAnalyzer = session.QueryHand(); sts2 = session.EnableFace(); faceAnalyzer = session.QueryFace(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) Debug.LogError("PXCSenseManager.EnableHand: " + sts); if (sts2 != pxcmStatus.PXCM_STATUS_NO_ERROR) Debug.LogError("PXCSenseManager.EnableFace: " + sts2); // Creates the session sts = session.Init(); sts2 = session.Init(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) Debug.LogError("PXCSenseManager.Init: " + sts); if (sts2 != pxcmStatus.PXCM_STATUS_NO_ERROR) Debug.LogError("PXCSenseManager.Init: " + sts2); // Creates a hand config for future data PXCMHandConfiguration handconfig = handAnalyzer.CreateActiveConfiguration(); PXCMFaceConfiguration faceconfig = faceAnalyzer.CreateActiveConfiguration(); //If there is handconfig instance if (handconfig != null) { handconfig.EnableAllAlerts(); handconfig.ApplyChanges(); handconfig.Dispose(); } if (faceconfig != null) { faceconfig.EnableAllAlerts(); faceconfig.ApplyChanges(); faceconfig.Dispose(); } }
// Initialise all the things public override void Init(PXCMSenseManager sManager) { senseManager = sManager; module = senseManager.QueryFace(); PXCMFaceConfiguration config = module.CreateActiveConfiguration(); config.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR); config.detection.isEnabled = true; config.ApplyChanges(); config.Update(); }
// Initialise all the things public override void Init(PXCMSenseManager sManager) { senseManager = sManager; module = senseManager.QueryFace(); fg = module.CreateActiveConfiguration(); fg.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR); fg.detection.isEnabled = true; // Set to enable all alerts fg.EnableAllAlerts(); // Apply changes fg.ApplyChanges(); fg.Update(); }
private PXCMFaceModule faceAnalyzer; //FaceModule Instance /// <summary> /// Use this for initialization /// Unity function called on the frame when a script is enabled /// just before any of the Update methods is called the first time. /// </summary> void Start() { faceRenderer = gameObject.GetComponent <FaceRenderer>(); /* Initialize a PXCMSenseManager instance */ psm = PXCMSenseManager.CreateInstance(); if (psm == null) { Debug.LogError("SenseManager Initialization Failed"); return; } /* Enable the color stream of size 640x480 */ psm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480); /* Enable the face tracking module*/ sts = psm.EnableFace(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("PXCSenseManager.EnableFace: " + sts); } /* Retrieve an instance of face to configure */ faceAnalyzer = psm.QueryFace(); if (faceAnalyzer == null) { Debug.LogError("PXCSenseManager.QueryFace"); } /* Initialize the execution pipeline */ sts = psm.Init(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("PXCMSenseManager.Init Failed"); OnDisable(); return; } /* Retrieve a PXCMFaceConfiguration instance from a face to enable Gestures and Alerts */ PXCMFaceConfiguration config = faceAnalyzer.CreateActiveConfiguration(); config.detection.isEnabled = true; // 3D detection is the default tracking mode. config.landmarks.isEnabled = true; config.pose.isEnabled = true; config.QueryExpressions().Enable(); config.QueryExpressions().EnableExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN); config.EnableAllAlerts(); config.ApplyChanges(); config.Dispose(); }
public override void Init(PXCMSenseManager sManager) { senseManager = sManager; // Get a face instance here (or inside the AcquireFrame/ReleaseFrame loop) for configuration module = senseManager.QueryFace(); // face is a PXCMFaceModule instance fg = module.CreateActiveConfiguration(); // Set to enable all alerts fg.EnableAllAlerts(); // Apply changes fg.ApplyChanges(); //fg.Update(); }
private void StartRealSense() { bool useHead = bool.Parse(ConfigurationManager.AppSettings["UseHead"]); // Instantiate and initialize the SenseManager senseManager = PXCMSenseManager.CreateInstance(); // Configure the Hand Module if (useHead) { senseManager.EnableFace(); face = senseManager.QueryFace(); faceConfig = face.CreateActiveConfiguration(); faceConfig.detection.isEnabled = true; faceConfig.QueryExpressions().Enable(); faceConfig.ApplyChanges(); } else { // Enable cursor tracking senseManager.EnableHand(); // Get an instance of the hand cursor module hand = senseManager.QueryHand(); // Get an instance of the cursor configuration var cursorConfig = hand.CreateActiveConfiguration(); // Make configuration changes and apply them cursorConfig.DisableAllAlerts(); cursorConfig.EnableTrackedJoints(true); cursorConfig.EnableStabilizer(true); cursorConfig.ApplyChanges(); } senseManager.Init(); // Create an instance of PXCMSmoother senseManager.session.CreateImpl <PXCMSmoother>(out smoother); smoother2D = smoother.Create2DQuadratic(.5F); smoother2D2 = smoother.Create2DQuadratic(1); // Start the worker thread processingThread = new Thread(new ThreadStart(ProcessingThread)); processingThread.Start(); }
public override void Work(Graphics g) { // Retrieve gesture data hand = senseManager.QueryHand(); face = senseManager.QueryFace(); // if (hand != null) { // Retrieve the most recent processed data handData = hand.CreateOutput(); handData.Update(); handWaving = handData.IsGestureFired("wave", out gestureData); } if (face != null) { faceData = face.CreateOutput(); faceData.Update(); //surching faces Int32 nfaces = faceData.QueryNumberOfDetectedFaces(); for (Int32 i = 0; i < nfaces; i++) { // Retrieve the data instance PXCMFaceData.Face faceI = faceData.QueryFaceByIndex(i); PXCMFaceData.ExpressionsData edata = faceI.QueryExpressions(); if (edata != null) { edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out score); if (score.intensity >= 25) { smiling = true; } else { smiling = false; } Console.WriteLine(i + ": " + score.intensity); } } faceData.Dispose(); } }
private void ConfigureRealSense() { my.init(); child.Start(); try { // Create the SenseManager instance sm = PXCMSenseManager.CreateInstance(); // Enable the color stream sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, ImageWidth, ImageHeight, 30); // Enable person tracking sm.EnablePersonTracking(); personModule = sm.QueryPersonTracking(); PXCMPersonTrackingConfiguration personConfig = personModule.QueryConfiguration(); personConfig.SetTrackedAngles(PXCMPersonTrackingConfiguration.TrackingAngles.TRACKING_ANGLES_ALL); // Enable the face module sm.EnableFace(); PXCMFaceModule faceModule = sm.QueryFace(); PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration(); faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); faceConfig.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_APPEARANCE_TIME; faceConfig.detection.maxTrackedFaces = 1; // Apply changes and initialize the SenseManager faceConfig.ApplyChanges(); sm.Init(); faceData = faceModule.CreateOutput(); // Mirror the image sm.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // Release resources personConfig.Dispose(); faceConfig.Dispose(); faceModule.Dispose(); } catch (Exception) { // For the sake of brevity we're not doing extensive exception handling in this code sample, // simply give a hint that the camera is not connected, and then throw the exception MessageBox.Show("Unable to configure the RealSense camera. Please make sure a R200 camera is connected.", "System Error"); throw; } }
public override void Init(PXCMSenseManager sManager) { this.senseManager = sManager; hand = senseManager.QueryHand(); face = senseManager.QueryFace(); handConfig = hand.CreateActiveConfiguration(); handConfig.EnableGesture("wave"); handConfig.EnableAllAlerts(); handConfig.ApplyChanges(); faceConfic = face.CreateActiveConfiguration(); faceConfic.QueryExpressions(); faceConfic.EnableAllAlerts(); faceConfic.ApplyChanges(); PXCMFaceConfiguration.ExpressionsConfiguration expc = faceConfic.QueryExpressions(); expc.Enable(); expc.EnableAllExpressions(); faceConfic.ApplyChanges(); Console.WriteLine("init smile done"); }
protected override void Uninitialize() { if (this.faceData != null) { this.faceData.Dispose(); this.faceData = null; } if (this.config != null) { this.config.Dispose(); this.config = null; } if (this.faceModule != null) { this.faceModule.Dispose(); this.faceModule = null; } base.Uninitialize(); }
public static void ConfigureRealSense() { //Start the SenseManager and session senseManager = PXCMSenseManager.CreateInstance(); // Enable the color stream //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30); // Enable the face module senseManager.EnableFace(); PXCMFaceModule faceModule = senseManager.QueryFace(); PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration(); // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking) faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); // Enable facial recognition recognitionConfig = faceConfig.QueryRecognition(); recognitionConfig.Enable(); //Create a recognition database PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc storageDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc(); storageDesc.maxUsers = DatabaseUsers; recognitionConfig.CreateStorage(DatabaseName, out storageDesc); recognitionConfig.UseStorage(DatabaseName); LoadDatabaseFromFile(); recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS); faceConfig.ApplyChanges(); senseManager.Init(); faceData = faceModule.CreateOutput(); senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); faceConfig.Dispose(); faceModule.Dispose(); }
private void InitializeCamera() { FaceModule = SenseManager.QueryFace(); var config = FaceModule.CreateActiveConfiguration(); config.detection.isEnabled = false; config.landmarks.isEnabled = false; config.pose.isEnabled = true; config.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_FARTHEST_TO_CLOSEST; if (config.ApplyChanges().IsSuccessful()) { if (SenseManager.Init().IsError()) { MessageBox.Show("Errore nell'inizializzazione della camera"); Close(); } } config.Dispose(); }
public Model() { width = 640; height = 480; framerate = 30; senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, framerate); // Enable Face detection senseManager.EnableFace(); senseManager.EnableHand(); senseManager.Init(); face = senseManager.QueryFace(); faceConfig = face.CreateActiveConfiguration(); faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR); faceConfig.detection.isEnabled = true; faceConfig.QueryExpressions(); PXCMFaceConfiguration.ExpressionsConfiguration expc = faceConfig.QueryExpressions(); expc.Enable(); expc.EnableAllExpressions(); faceConfig.ApplyChanges(); faceConfig.Update(); //faceData = face.CreateOutput(); //faceData.Update(); hand = senseManager.QueryHand(); PXCMHandConfiguration config = hand.CreateActiveConfiguration(); config.SetTrackingMode(PXCMHandData.TrackingModeType.TRACKING_MODE_FULL_HAND); config.ApplyChanges(); config.Update(); //handData = hand.CreateOutput(); //handData.Update(); modules = new List <RSModule>(); }
/** * Constructor of the model * It does all the important stuff to use our camera. Its so FANCY ! * Like enabling all important tracker(Hand, Face), the stream and builds up the configuration. */ public Model(bool s) { stream = s; emotions[Emotion.ANGER] = 0; emotions[Emotion.CONTEMPT] = 0; emotions[Emotion.DISGUST] = 0; emotions[Emotion.FEAR] = 0; emotions[Emotion.JOY] = 0; emotions[Emotion.SADNESS] = 0; emotions[Emotion.SURPRISE] = 0; if (stream) { width = 1920; height = 1080; framerate = 30; senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, framerate); // Enable Face detection senseManager.EnableFace(); bool couldInit = senseManager.Init().IsSuccessful(); if (!couldInit) { MessageBox.Show("Could not connect to the hardware! Make sure you have the camera-drivers installed.", "Aww, dang"); Environment.Exit(1); } face = senseManager.QueryFace(); faceConfig = face.CreateActiveConfiguration(); faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); faceConfig.detection.isEnabled = true; faceConfig.pose.isEnabled = true; faceConfig.ApplyChanges(); faceConfig.Update(); modules = new List <RSModule>(); } }
public FaceTrackerThread() { running = true; senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60); senseManager.EnableFace(); senseManager.Init(); face = senseManager.QueryFace(); faceConfiguration = face.CreateActiveConfiguration(); faceConfiguration.detection.isEnabled = true; expressionConfiguration = faceConfiguration.QueryExpressions(); expressionConfiguration.Enable(); expressionConfiguration.EnableAllExpressions(); //Gaze detection gazec = faceConfiguration.QueryGaze(); gazec.isEnabled = true; faceConfiguration.ApplyChanges(); faceConfiguration.EnableAllAlerts(); faceConfiguration.ApplyChanges(); }
private void CheckForDepthStream(PXCMCapture.Device.StreamProfileSet profiles, PXCMFaceModule faceModule) { PXCMFaceConfiguration faceConfiguration = faceModule.CreateActiveConfiguration(); if (faceConfiguration == null) { Debug.Assert(faceConfiguration != null); return; } PXCMFaceConfiguration.TrackingModeType trackingMode = faceConfiguration.GetTrackingMode(); faceConfiguration.Dispose(); //change if (trackingMode != PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH) return; if (profiles.depth.imageInfo.format == PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH) return; PXCMCapture.DeviceInfo dinfo; m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out dinfo); if (dinfo != null) MessageBox.Show( String.Format("Depth stream is not supported for device: {0}. \nUsing 2D tracking", dinfo.name), @"Face Tracking", MessageBoxButtons.OK, MessageBoxIcon.Information); }
private void PlaybackStreaming_PlayByFrameIndex() { this.m_stopped = false; InitStreamState(); switch (m_algoOption) { // 面部算法 case AlgoOption.Face: this.faceModule = manager.QueryFace(); if (faceModule == null) { MessageBox.Show("QueryFace failed"); return; } InitFaceState(); this.faceData = this.faceModule.CreateOutput(); if (faceData == null) { MessageBox.Show("CreateOutput failed"); return; } break; } if (manager.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR) { #if DEBUG System.Windows.Forms.MessageBox.Show("init failed"); #endif return; } int nframes = manager.captureManager.QueryNumberOfFrames(); MessageBox.Show(nframes + ""); for (int i = 0;;) { if (m_pause) { continue; } manager.captureManager.SetFrameByIndex(i); manager.FlushFrame(); if (manager.AcquireFrame(true).IsError()) { break; } this.sample = manager.QuerySample(); if (sample.depth != null) { this.m_timestamp = (sample.depth.timeStamp); } else if (sample.color != null) { this.m_timestamp = sample.color.timeStamp; } m_timestamp_sec = m_timestamp / 10000000; if (m_timestamp_sec_init == -1) { m_timestamp_sec_init = m_timestamp_sec; } if (this.m_label != null) { //updateLabel(this.m_timestamp.ToString()); System.Threading.Thread t1 = new System.Threading.Thread(updateLabel); t1.Start((m_timestamp_sec - m_timestamp_sec_init).ToString()); } if (m_display) { this.DoRender(); } manager.ReleaseFrame(); if (this.m_playback_reverse) { if (i <= this.m_playback_framespeed) { break; } i -= this.m_playback_framespeed; } else { if (i >= nframes - this.m_playback_framespeed) { break; } i += this.m_playback_framespeed; } } faceData.Dispose(); manager.Dispose(); }
private void DoStreaming_SaveData() { this.m_stopped = false; InitStreamState(); // 设置Playback模式 manager.captureManager.SetFileName(this.PlaybackFile, false); manager.captureManager.SetRealtime(false); int nOf = manager.captureManager.QueryNumberOfFrames(); switch (m_algoOption) { // 面部算法 case AlgoOption.Face: this.faceModule = manager.QueryFace(); if (faceModule == null) { MessageBox.Show("QueryFace failed"); return; } InitFaceState(); this.faceData = this.faceModule.CreateOutput(); if (faceData == null) { MessageBox.Show("CreateOutput failed"); return; } break; } if (manager.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR) { #if DEBUG System.Windows.Forms.MessageBox.Show("init failed"); #endif return; } FacialLandmarks fl; FacialExpression fe; while (!m_stopped) { if (manager.AcquireFrame(false).IsError()) { break; } this.sample = manager.QuerySample(); //if (sample == null) { manager.ReleaseFrame(); continue; } if (sample.depth != null) { this.m_timestamp = (sample.depth.timeStamp); } else if (sample.color != null) { this.m_timestamp = sample.color.timeStamp; } else { continue; } // 仅当下一秒时调用检测算法 m_timestamp_sec = m_timestamp / 10000000; if (m_timestamp_sec_init != -1 && m_timestamp_sec - m_timestamp_sec_init >= nOf) { break; } Console.WriteLine("curframe:" + m_timestamp_sec); if (m_timestamp_sec_init == -1) { m_timestamp_sec_init = m_timestamp_sec; } if (m_timestamp_sec_last == -1) { m_timestamp_sec_last = m_timestamp_sec - 1; } long interval = m_timestamp_sec - m_timestamp_sec_last; if (interval > 0) { if (interval > 1) { for (int i = 1; i < interval; i++) { buffer += (m_timestamp_sec_last + i - m_timestamp_sec_init).ToString() + " "; buffer += "\n"; Console.WriteLine((m_timestamp_sec_last + i - m_timestamp_sec_init).ToString()); } } buffer += (m_timestamp_sec - m_timestamp_sec_init).ToString() + " "; // 原生算法调用处理,并缓存实时数据 faceData.Update(); fl = this.GetFaceLandmarks(); fe = this.GetExpression(); if (fl != null) { buffer += fl.ToString(); } else { buffer += FacialLandmarks.generateBlank(); } if (fe != null) { buffer += fe.ToString(); } else { buffer += FacialExpression.generateBlank(); } buffer += "\n"; m_timestamp_sec_last = m_timestamp_sec; //Console.WriteLine((m_timestamp_sec- m_timestamp_sec_init).ToString()); } //if(m_timestamp_sec>m_timestamp_sec_last) //{ // // 原生算法调用处理,并缓存实时数据 // faceData.Update(); // fl = this.GetFaceLandmarks(); // fe = this.GetExpression(); // if (fl != null) // buffer += fl.ToString(); // if (fe != null) // buffer += fe.ToString(); // buffer += "\n"; // m_timestamp_sec_last = m_timestamp_sec; // Console.WriteLine(m_timestamp_sec.ToString()); //} // 用于显示视频流功能 if (m_display) { this.DoRender(); } manager.ReleaseFrame(); } faceData.Dispose(); manager.Dispose(); Console.WriteLine("done!!!"); }
public MainWindow() { InitializeComponent(); //set the current date and time currentDateTime = DateTime.Now.ToString("yyyyMMddHHmmssfff"); //set total timer count to 0 and init vars highPerformanceTimer = new HiPerfTimer(); totalHighPerfTimeElapsed = 0; numLinesWritten = 0; //set the total number of lines written to 0 so we can track when to start the timer //init pipe stuff pipeClient = new MyClient(PIPE_NAME); pipeClient.SendMessage("I Am Intel RealSense"); //Debug.WriteLine("Server Ready"); //initialise combobox populateComboBox(); //init the exprToDisplay global var exprToDisplay = ""; //Work on the file //create paths string dirToCreate = "data"; string dirToCreateFull = System.IO.Path.GetFullPath(dirToCreate); Directory.CreateDirectory(dirToCreateFull); dirToCreate = "video"; dirToCreateFull = System.IO.Path.GetFullPath(dirToCreate); Directory.CreateDirectory(dirToCreateFull); //create the csv file to write to file = new StreamWriter("data/" + currentDateTime + "data" + ".csv"); //initialise global expressions array - faster to add the keys here? var enumListMain = Enum.GetNames(typeof(PXCMFaceData.ExpressionsData.FaceExpression)); exprTable = new Hashtable(); string initLine = ""; //Add the column schema //Initial line: timestamp and high prec time initLine += "TIMESTAMP,HIGH_PRECISION_TIME_FROM_START,STIMCODE"; //add all the expression data columns for (int i = 0; i < enumListMain.Length; i++) { exprTable.Add(enumListMain[i], 0); initLine += "," + enumListMain[i]; } //add the bounding rectangle column initLine += "," + "BOUNDING_RECTANGLE_HEIGHT" + "," + "BOUNDING_RECTANGLE_WIDTH" + "," + "BOUNDING_RECTANGLE_X" + "," + "BOUNDING_RECTANGLE_Y"; //add the average depth column initLine += "," + "AVERAGE_DEPTH"; //add landmark points column for (int i = 0; i < LANDMARK_POINTS_TOTAL; i++) { initLine += "," + "LANDMARK_" + i + "_X"; initLine += "," + "LANDMARK_" + i + "_Y"; } //add euler angles columns initLine += "," + "EULER_ANGLE_PITCH" + "," + "EULER_ANGLE_ROLL" + "," + "EULER_ANGLE_YAW"; initLine += "," + "QUATERNION_W" + "," + "QUATERNION_X" + "," + "QUATERNION_Y" + "," + "QUATERNION_Z"; //write the initial row to the file file.WriteLine(initLine); //configure the camera mode selection box cbCameraMode.Items.Add("Color"); cbCameraMode.Items.Add("IR"); cbCameraMode.Items.Add("Depth"); //configure initial camera mode cameraMode = "Color"; //initialise global vars numFacesDetected = 0; handWaving = false; handTrigger = false; handResetTimer = 0; lEyeClosedIntensity = 0; lEyeClosed = false; lEyeClosedTrigger = false; lEyeClosedResetTimer = 0; rEyeClosed = false; rEyeClosedTrigger = false; rEyeClosedResetTimer = 0; rEyeClosedIntensity = 0; emotionEvidence = 0; blinkTrigger = false; blinkResetTimer = 0; //global fps vars prevTime = 0; stopwatch = new Stopwatch(); // Instantiate and initialize the SenseManager senseManager = PXCMSenseManager.CreateInstance(); if (senseManager == null) { MessageBox.Show("Cannot initialise sense manager: closing in 20s, report to Sriram"); Thread.Sleep(20000); Environment.Exit(1); } //capture samples senseManager.captureManager.SetFileName("video/" + currentDateTime + ".raw", true); //Enable color stream senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, STREAM_WIDTH, STREAM_HEIGHT, STREAM_FPS); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, STREAM_WIDTH, STREAM_HEIGHT, STREAM_FPS); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, STREAM_WIDTH, STREAM_HEIGHT, STREAM_FPS); //Enable face and hand tracking AND EMOTION TRACKING senseManager.EnableHand(); senseManager.EnableFace(); senseManager.EnableEmotion(); //Initialise the senseManager - begin collecting data senseManager.Init(); // Configure the Hand Module hand = senseManager.QueryHand(); handConfig = hand.CreateActiveConfiguration(); handConfig.EnableGesture("wave"); handConfig.EnableAllAlerts(); handConfig.ApplyChanges(); //Configure the Face Module face = senseManager.QueryFace(); faceConfig = face.CreateActiveConfiguration(); faceConfig.EnableAllAlerts(); faceConfig.detection.isEnabled = true; //enables querydetection function to retrieve face loc data faceConfig.detection.maxTrackedFaces = 1; //MAXIMUM TRACKING - 1 FACE faceConfig.ApplyChanges(); //Configure the sub-face-module Expressions exprConfig = faceConfig.QueryExpressions(); exprConfig.Enable(); exprConfig.EnableAllExpressions(); faceConfig.ApplyChanges(); // Start the worker thread that processes the captured data in real-time processingThread = new Thread(new ThreadStart(ProcessingThread)); processingThread.Start(); }
//*********************************私有函数******************************************************************* // 循环执行流的主体程序 private void DoStreaming() { this.m_stopped = false; InitStreamState(); switch (m_algoOption) { // 面部算法 case AlgoOption.Face: this.faceModule = manager.QueryFace(); if (faceModule == null) { MessageBox.Show("QueryFace failed"); return; } InitFaceState(); this.faceData = this.faceModule.CreateOutput(); if (faceData == null) { MessageBox.Show("CreateOutput failed"); return; } break; } if (manager.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR) { #if DEBUG System.Windows.Forms.MessageBox.Show("init failed"); #endif return; } while (!m_stopped) { //if (m_pause) //{ // System.Threading.Thread.Sleep(10); // continue; //} if (manager.AcquireFrame(true).IsError()) { break; } this.sample = manager.QuerySample(); if (sample.depth != null) { this.m_timestamp = (sample.depth.timeStamp); } else if (sample.color != null) { this.m_timestamp = sample.color.timeStamp; } m_timestamp_sec = m_timestamp / 10000000; if (m_timestamp_sec_init == -1) { m_timestamp_sec_init = m_timestamp_sec; } if (this.m_label != null) { //updateLabel(this.m_timestamp.ToString()); System.Threading.Thread t1 = new System.Threading.Thread(updateLabel); t1.Start((m_timestamp_sec - m_timestamp_sec_init).ToString()); } //OnTimeStampChanged(this.m_timestamp.ToString()); // 原生算法调用处理,并缓存实时数据 faceData.Update(); FacialLandmarks fl = this.GetFaceLandmarks(); // 用于显示视频流功能 if (m_display) { this.DoRender(); } manager.ReleaseFrame(); } faceData.Dispose(); manager.Dispose(); }
private bool InitializeRealSense() { pxcmStatus status; senseManager = PXCMSenseManager.CreateInstance(); if (senseManager == null) { Debug.LogError("Unable to create SenseManager."); return false; } status = senseManager.EnableFace(); if (status != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("Couldn't enable the Face Module."); return false; } faceModule = senseManager.QueryFace(); if (faceModule == null) { Debug.LogError("Couldn't query the Face Module."); return false; } PXCMFaceConfiguration faceConfiguration = faceModule.CreateActiveConfiguration(); if (faceConfiguration == null) { Debug.LogError("Couldn't create an active configuration."); return false; } faceConfiguration.pose.isEnabled = true; faceConfiguration.pose.maxTrackedFaces = 1; PXCMFaceConfiguration.ExpressionsConfiguration expressionsConfiguration = faceConfiguration.QueryExpressions(); expressionsConfiguration.Enable(); status = expressionsConfiguration.EnableExpression(JumpExpression); if (status != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("Unable to enable the expression " + JumpExpression + "."); return false; } status = expressionsConfiguration.EnableExpression(FireExpression); if (status != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("Unable to enable the expression " + FireExpression + "."); return false; } status = faceConfiguration.ApplyChanges(); if (status != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("Unable to apply configuration settings."); return false; } faceData = faceModule.CreateOutput(); if (faceData == null) { Debug.LogError("Couldn't create the data output object."); return false; } status = senseManager.Init(); if (status != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("Unable to initialize SenseManager."); return false; } PXCMSession session = senseManager.QuerySession(); PXCMSmoother smoother; status = session.CreateImpl<PXCMSmoother>(out smoother); if (status != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("Failed to create the smoother."); return false; } smoother2D = smoother.Create2DWeighted(10); return true; }
private void InitializeCamera() { FaceModule = SenseManager.QueryFace(); var config = FaceModule.CreateActiveConfiguration(); config.detection.isEnabled = true; config.landmarks.isEnabled = false; config.pose.isEnabled = false; config.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_CLOSEST_TO_FARTHEST; if (config.ApplyChanges().IsSuccessful()) { if (SenseManager.Init().IsError()) { MessageBox.Show("Errore nell'inizializzazione della camera"); Close(); } } config.Dispose(); }
public void Run() { while (running && senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { //Console.WriteLine(DateTime.UtcNow.ToString("yyyy-MM-dd HH:mm:ss.fff", // CultureInfo.InvariantCulture)); // Get a face instance face = senseManager.QueryFace(); if (face != null) { Console.WriteLine("in face"); // Get face tracking processed data PXCMFaceData faceData = face.CreateOutput(); faceData.Update(); numberTrackedFaces = faceData.QueryNumberOfDetectedFaces(); //Console.WriteLine("numberTrackedFaces = " + numberTrackedFaces); for (Int32 i = 0; i < numberTrackedFaces; i++) { // Retrieve the face location data instance PXCMFaceData.Face faceDataFace = faceData.QueryFaceByIndex(i); if (faceDataFace != null) { // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = faceDataFace.QueryDetection(); PXCMFaceData.GazeCalibData gazeCalibData = faceDataFace.QueryGazeCalibration(); Byte[] buffer; PXCMFaceData.GazeCalibData.CalibrationStatus calibStatus = gazeCalibData.QueryCalibData(out buffer); PXCMFaceData.GazeCalibData.CalibrationState state; PXCMPointI32 calibp; Console.WriteLine("calib status:" + calibStatus); if (faceDetectionData != null) { PXCMFaceData.GazeData gazed = faceDataFace.QueryGaze(); if (gazed != null) { PXCMFaceData.GazePoint gazep = gazed.QueryGazePoint(); Console.WriteLine("RS Gaze points: " + gazep.screenPoint.x + "," + gazep.screenPoint.y); } //faceAverageDepth = 0; //bool b = faceDetectionData.QueryFaceAverageDepth(out faceAverageDepth); //faceAverageDepth = (Int32)faceAverageDepthFloat; //Console.WriteLine("b = " + b); //Console.Out.WriteLine("Depth:" + faceAverageDepth);//face depth in inches + (j++) + "." } if (gazeCalibData != null) { state = gazeCalibData.QueryCalibrationState(); switch (state) { case PXCMFaceData.GazeCalibData.CalibrationState.CALIBRATION_IDLE: // Visual clue to the user that the calibration process starts, or LoadCalibData. Console.WriteLine("in idle"); break; case PXCMFaceData.GazeCalibData.CalibrationState.CALIBRATION_NEW_POINT: // Visual cue to the user that a new calibration point is available. calibp = gazeCalibData.QueryCalibPoint(); Console.WriteLine("in new point"); Console.WriteLine(calibp.x + "," + calibp.y); break; case PXCMFaceData.GazeCalibData.CalibrationState.CALIBRATION_SAME_POINT: // Continue visual cue to the user at the same location. //Console.WriteLine("presenting same point"); break; case PXCMFaceData.GazeCalibData.CalibrationState.CALIBRATION_DONE: // Visual cue to the user that the calibration process is complete or calibration data is loaded. // Optionally save the calibration data. //Console.WriteLine("in calibration done"); Byte[] buffer2; calibStatus = gazeCalibData.QueryCalibData(out buffer2); Console.WriteLine(calibStatus); //m_oWorker.ReportProgress(100); break; } } } } faceData.Dispose(); } senseManager.ReleaseFrame(); } }
public void SimplePipeline() { PXCMSenseManager pp = m_form.Session.CreateSenseManager(); if (pp == null) { throw new Exception("PXCMSenseManager null"); } PXCMCaptureManager captureMgr = pp.captureManager; if (captureMgr == null) { throw new Exception("PXCMCaptureManager null"); } var selectedRes = m_form.GetCheckedColorResolution(); if (selectedRes != null && !m_form.IsInPlaybackState()) { // Set active camera PXCMCapture.DeviceInfo deviceInfo; m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out deviceInfo); captureMgr.FilterByDeviceInfo(m_form.GetCheckedDeviceInfo()); // activate filter only live/record mode , no need in playback mode var set = new PXCMCapture.Device.StreamProfileSet { color = { frameRate = selectedRes.Item2, imageInfo = { format = selectedRes.Item1.format, height = selectedRes.Item1.height, width = selectedRes.Item1.width } } }; if (m_form.IsPulseEnabled() && (set.color.imageInfo.width < 1280 || set.color.imageInfo.height < 720)) { captureMgr.FilterByStreamProfiles(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720, 0); } else { captureMgr.FilterByStreamProfiles(set); } } // Set Source & Landmark Profile Index if (m_form.IsInPlaybackState()) { //pp.captureManager.FilterByStreamProfiles(null); captureMgr.SetFileName(m_form.GetFileName(), false); captureMgr.SetRealtime(false); } else if (m_form.GetRecordState()) { captureMgr.SetFileName(m_form.GetFileName(), true); } // Set Module pp.EnableFace(); PXCMFaceModule faceModule = pp.QueryFace(); if (faceModule == null) { Debug.Assert(faceModule != null); return; } PXCMFaceConfiguration moduleConfiguration = faceModule.CreateActiveConfiguration(); if (moduleConfiguration == null) { Debug.Assert(moduleConfiguration != null); return; } var checkedProfile = m_form.GetCheckedProfile(); var mode = m_form.FaceModesMap.First(x => x.Value == checkedProfile).Key; moduleConfiguration.SetTrackingMode(mode); moduleConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT; moduleConfiguration.detection.maxTrackedFaces = m_form.NumDetection; moduleConfiguration.landmarks.maxTrackedFaces = m_form.NumLandmarks; moduleConfiguration.pose.maxTrackedFaces = m_form.NumPose; PXCMFaceConfiguration.ExpressionsConfiguration econfiguration = moduleConfiguration.QueryExpressions(); if (econfiguration == null) { throw new Exception("ExpressionsConfiguration null"); } econfiguration.properties.maxTrackedFaces = m_form.NumExpressions; econfiguration.EnableAllExpressions(); moduleConfiguration.detection.isEnabled = m_form.IsDetectionEnabled(); moduleConfiguration.landmarks.isEnabled = m_form.IsLandmarksEnabled(); moduleConfiguration.pose.isEnabled = m_form.IsPoseEnabled(); if (m_form.IsExpressionsEnabled()) { econfiguration.Enable(); } PXCMFaceConfiguration.PulseConfiguration pulseConfiguration = moduleConfiguration.QueryPulse(); if (pulseConfiguration == null) { throw new Exception("pulseConfiguration null"); } pulseConfiguration.properties.maxTrackedFaces = m_form.NumPulse; if (m_form.IsPulseEnabled()) { pulseConfiguration.Enable(); } qrecognition = moduleConfiguration.QueryRecognition(); if (qrecognition == null) { throw new Exception("PXCMFaceConfiguration.RecognitionConfiguration null"); } if (m_form.IsRecognitionChecked()) { qrecognition.Enable(); #region 臉部辨識資料庫讀取 if (File.Exists(DatabasePath)) { m_form.UpdateStatus("正在讀取資料庫", MainForm.Label.StatusLabel); List <RecognitionFaceData> faceData = null; FaceDatabaseFile.Load(DatabasePath, ref faceData, ref NameMapping); FaceData = faceData.ToArray(); qrecognition.SetDatabase(FaceData); } #endregion } moduleConfiguration.EnableAllAlerts(); moduleConfiguration.SubscribeAlert(FaceAlertHandler); pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges(); m_form.UpdateStatus("Init Started", MainForm.Label.StatusLabel); if (applyChangesStatus < pxcmStatus.PXCM_STATUS_NO_ERROR || pp.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR) { m_form.UpdateStatus("Init Failed", MainForm.Label.StatusLabel); } else { using (PXCMFaceData moduleOutput = faceModule.CreateOutput()) { Debug.Assert(moduleOutput != null); PXCMCapture.Device.StreamProfileSet profiles; PXCMCapture.Device device = captureMgr.QueryDevice(); if (device == null) { throw new Exception("device null"); } device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles); CheckForDepthStream(profiles, faceModule); m_form.UpdateStatus("Streaming", MainForm.Label.StatusLabel); m_timer = new FPSTimer(m_form); #region loop while (!m_form.Stopped) { if (pp.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } var isConnected = pp.IsConnected(); DisplayDeviceConnection(isConnected); if (isConnected) { var sample = pp.QueryFaceSample(); if (sample == null) { pp.ReleaseFrame(); continue; } switch (mode) { case PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR: if (sample.ir != null) { DisplayPicture(sample.ir); } break; default: DisplayPicture(sample.color); break; } moduleOutput.Update(); PXCMFaceConfiguration.RecognitionConfiguration recognition = moduleConfiguration.QueryRecognition(); if (recognition == null) { pp.ReleaseFrame(); continue; } if (recognition.properties.isEnabled) { UpdateRecognition(moduleOutput); } m_form.DrawGraphics(moduleOutput); m_form.UpdatePanel(); } pp.ReleaseFrame(); } #endregion } // moduleConfiguration.UnsubscribeAlert(FaceAlertHandler); // moduleConfiguration.ApplyChanges(); m_form.UpdateStatus("Stopped", MainForm.Label.StatusLabel); } #region 儲存臉部辨識資訊檔案 if (DatabaseChanged) { FaceDatabaseFile.Save(DatabasePath, FaceData.ToList(), NameMapping); } #endregion var dbm = new FaceDatabaseManager(pp); moduleConfiguration.Dispose(); pp.Close(); pp.Dispose(); }
//private void DisplayDeviceConnection(bool isConnected) //{ // if (isConnected && !m_wasConnected) m_form.UpdateStatus("Device Reconnected", MainForm.Label.StatusLabel); // else if (!isConnected && m_wasConnected) // m_form.UpdateStatus("Device Disconnected", MainForm.Label.StatusLabel); // m_wasConnected = isConnected; //} //private void DisplayPicture(PXCMImage image) //{ // PXCMImage.ImageData data; // if (image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out data) < // pxcmStatus.PXCM_STATUS_NO_ERROR) return; // m_form.DrawBitmap(data.ToBitmap(0, image.info.width, image.info.height)); // m_timer.Tick(""); // image.ReleaseAccess(data); //} private void CheckForDepthStream(PXCMCapture.Device.StreamProfileSet profiles, PXCMFaceModule faceModule) { PXCMFaceConfiguration faceConfiguration = faceModule.CreateActiveConfiguration(); if (faceConfiguration == null) { Debug.Assert(faceConfiguration != null); return; } PXCMFaceConfiguration.TrackingModeType trackingMode = faceConfiguration.GetTrackingMode(); faceConfiguration.Dispose(); if (trackingMode != PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH) { return; } if (profiles.depth.imageInfo.format == PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH) { return; } PXCMCapture.DeviceInfo dinfo; m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out dinfo); if (dinfo != null) { MessageBox.Show( String.Format("Depth stream is not supported for device: {0}. \nUsing 2D tracking", dinfo.name), @"Face Tracking", MessageBoxButtons.OK, MessageBoxIcon.Information); } }
// Update is called once per frame void Update() { if (session == null) { return; } // For accessing hand data handAnalyzer = session.QueryHand(); faceAnalyzer = session.QueryFace(); if (handAnalyzer != null) { PXCMHandData handData = handAnalyzer.CreateOutput(); if (handData != null) { handData.Update(); PXCMHandData.IHand IHAND; // Ihand instance for accessing future data // Int32 IhandData; // for QueryOpenness Value // PXCMPoint3DF32 location; // Stores hand tracking position //Fills IHAND with information to later be grabbed and used for tracking + openness handData.QueryHandData(PXCMHandData.AccessOrderType.ACCESS_ORDER_NEAR_TO_FAR, 0, out IHAND); // If there is data in Ihand if (IHAND != null) { // Debug.DrawLine(transform.position, hit.point, Color.red); // Inits hand tracking from the center of the hand. // location = IHAND.QueryMassCenterWorld(); // if (mCurrentDart != null) // { // Vector3 locationUnity = new Vector3(location.x, location.y, location.z); // mCurrentDart.transform.localPosition = locationUnity * RSScale; // } } } handAnalyzer.Dispose(); session.ReleaseFrame(); } if (faceAnalyzer != null) { PXCMFaceData facedata = faceAnalyzer.CreateOutput(); if (facedata != null) { Int32 nfaces = facedata.QueryNumberOfDetectedFaces(); for (Int32 i = 0; i < nfaces; i++) { // Retrieve the face landmark data instance PXCMFaceData.Face face = facedata.QueryFaceByIndex(i); PXCMFaceData.PoseData pdata = face.QueryPose(); // retrieve the pose information PXCMFaceData.PoseEulerAngles angles; pdata.QueryPoseAngles(out angles); Debug.Log("Eular Angles yaw : " + angles.yaw); Debug.Log("Eular Angles pitch: " + angles.pitch); Debug.Log("Eular Angles Roll: " + angles.roll); angles.pitch = gameObject.transform.rotation.z; angles.yaw = gameObject.transform.rotation.y; } // device is a PXCMCapture.Device instance } } }
private void ProcessingThread() { // Start AcquireFrame/ReleaseFrame loop - MAIN PROCESSING LOOP while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { if (firstFrame == true) { firstFrame = false; //pipeClient.SendMessage(CAMERA_CONNECTED_MESSAGE); } //Get sample from the sensemanager to convert to bitmap and show PXCMCapture.Sample sample = senseManager.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData = null; // Get color/ir image data if (cameraMode == "Color") sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); else if (cameraMode == "IR") sample.ir.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); else if (cameraMode == "Depth") ;// -> broken! // sample.depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, out colorData); else sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); //convert it to bitmap colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Retrieve hand and face data AND EMOTION DATA hand = senseManager.QueryHand(); face = senseManager.QueryFace(); emotion = senseManager.QueryEmotion(); //Process hand data if (hand != null) { // Retrieve the most recent processed data handData = hand.CreateOutput(); handData.Update(); handWaving = handData.IsGestureFired("wave", out gestureData); } //Process face data if (face != null) { // Retrieve the most recent processed data faceData = face.CreateOutput(); faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // for (Int32 i = 0; i < numFacesDetected; i++) --> MULTIPLE FACE DETECTION DISABLED, UNCOMMENT TO INCLUDE // { // PXCMFaceData.Face singleFace = faceData.QueryFaceByIndex(i); --> FOR MULTIPLE FACE DETECTION //get all possible data from frame PXCMFaceData.Face singleFaceData = faceData.QueryFaceByIndex(0); //only getting first face! PXCMFaceData.ExpressionsData singleExprData = singleFaceData.QueryExpressions(); PXCMFaceData.DetectionData detectionData = singleFaceData.QueryDetection(); PXCMFaceData.LandmarksData landmarksData = singleFaceData.QueryLandmarks(); PXCMFaceData.PoseData poseData = singleFaceData.QueryPose(); //Work on face location data from detectionData if (detectionData != null) { // vars are defined globally detectionData.QueryBoundingRect(out boundingRect); detectionData.QueryFaceAverageDepth(out averageDepth); } //Work on getting landmark data if (landmarksData != null) { //var is defined globally landmarksData.QueryPoints(out landmarkPoints); } //Work on getting euler angles for face pose data if (poseData != null) { //var is defined globally poseData.QueryPoseAngles(out eulerAngles); poseData.QueryPoseQuaternion(out quaternionAngles); } //Do work on all face location data from singleExprData if (singleExprData != null) { //get scores and intensities for right and left eye closing - 22 possible expressions --> put into hashtable PXCMFaceData.ExpressionsData.FaceExpressionResult score; //this gets a list of enum names as strings var enumNames = Enum.GetNames(typeof(PXCMFaceData.ExpressionsData.FaceExpression)); //for all enumnames, calculate the for (int j = 0; j < enumNames.Length; j++) { PXCMFaceData.ExpressionsData.FaceExpressionResult innerScore; singleExprData.QueryExpression((PXCMFaceData.ExpressionsData.FaceExpression)(j), out innerScore); //Console.WriteLine((PXCMFaceData.ExpressionsData.FaceExpression)(j)); exprTable[enumNames[j]] = innerScore.intensity; } //Attempt to write to file if there are any significant events /* //check if everything is 0 bool significantEntry = false; foreach (DictionaryEntry entry in exprTable) { if (Convert.ToInt32(entry.Value.ToString()) != 0) { significantEntry = true; break; } } if (significantEntry) */ writeSignificantToFile(exprTable, boundingRect, averageDepth, landmarkPoints, eulerAngles, quaternionAngles); singleExprData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_LEFT, out score); lEyeClosedIntensity = score.intensity; singleExprData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_RIGHT, out score); rEyeClosedIntensity = score.intensity; //eye closed logic -> will be reset in UI thread after some number of frames if (lEyeClosedIntensity >= EYE_CLOSED_DETECT_THRESHOLD) lEyeClosed = true; if (rEyeClosedIntensity >= EYE_CLOSED_DETECT_THRESHOLD) rEyeClosed = true; } // } } } if (emotion != null) { int numFaces = emotion.QueryNumFaces(); for (int fid = 0; fid < numFaces; fid++) { //TODO - MULTIPLE FACE IMPLEMENTATION? //retrieve all est data PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[10]; emotion.QueryAllEmotionData(fid, out arrData); //find emotion with maximum evidence int idx_outstanding_emotion = 0; int max_evidence = arrData[0].evidence; for (int k = 1; k < 7; k++) { if (arrData[k].evidence < max_evidence) { } else { max_evidence = arrData[k].evidence; idx_outstanding_emotion = k; } } currentEmotion = arrData[idx_outstanding_emotion].eid; //Console.WriteLine(currentEmotion.ToString()); emotionEvidence = max_evidence; // Console.WriteLine(currentEmotion.ToString() + ":" + emotionEvidence.ToString()); } } // Update the user interface UpdateUI(colorBitmap); // Release the frame if (handData != null) handData.Dispose(); // colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); senseManager.ReleaseFrame(); } }
// Update is called once per frame void Update() { if (session == null) return; // For accessing hand data handAnalyzer = session.QueryHand(); faceAnalyzer = session.QueryFace(); if (handAnalyzer != null) { PXCMHandData handData = handAnalyzer.CreateOutput(); if (handData != null) { handData.Update(); PXCMHandData.IHand IHAND; // Ihand instance for accessing future data // Int32 IhandData; // for QueryOpenness Value // PXCMPoint3DF32 location; // Stores hand tracking position //Fills IHAND with information to later be grabbed and used for tracking + openness handData.QueryHandData(PXCMHandData.AccessOrderType.ACCESS_ORDER_NEAR_TO_FAR, 0, out IHAND); // If there is data in Ihand if (IHAND != null) { // Debug.DrawLine(transform.position, hit.point, Color.red); // Inits hand tracking from the center of the hand. // location = IHAND.QueryMassCenterWorld(); // if (mCurrentDart != null) // { // Vector3 locationUnity = new Vector3(location.x, location.y, location.z); // mCurrentDart.transform.localPosition = locationUnity * RSScale; // } } } handAnalyzer.Dispose(); session.ReleaseFrame(); } if (faceAnalyzer != null) { PXCMFaceData facedata = faceAnalyzer.CreateOutput(); if (facedata != null) { Int32 nfaces = facedata.QueryNumberOfDetectedFaces(); for (Int32 i = 0; i < nfaces; i++) { // Retrieve the face landmark data instance PXCMFaceData.Face face = facedata.QueryFaceByIndex(i); PXCMFaceData.PoseData pdata = face.QueryPose(); // retrieve the pose information PXCMFaceData.PoseEulerAngles angles; pdata.QueryPoseAngles(out angles); Debug.Log("Eular Angles yaw : " + angles.yaw); Debug.Log("Eular Angles pitch: " + angles.pitch); Debug.Log("Eular Angles Roll: " + angles.roll); angles.pitch = gameObject.transform.rotation.z; angles.yaw = gameObject.transform.rotation.y; } // device is a PXCMCapture.Device instance } } }
private void FaceTrackingPipeline() { IsDispose = false; OnStart?.Invoke(this, null); #region Manager Init realSenseManager = RealSenseObjects.Session.CreateSenseManager(); if (realSenseManager == null) { MessageBox.Show( "PXCMSenseManager初始化失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } PXCMCaptureManager captureManager = realSenseManager.captureManager; if (captureManager == null) { MessageBox.Show( "PXCMCaptureManager初始化失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } #endregion #region 基本設定 //設定裝置 captureManager.FilterByDeviceInfo(Form.SelectedDevice); //設定串流類型 captureManager.FilterByStreamProfiles(Form.SelectedDeviceStreamProfile); //啟用臉部追蹤模組 realSenseManager.EnableFace(); PXCMFaceModule faceModule = realSenseManager.QueryFace(); if (faceModule == null) { MessageBox.Show( "取得PXCMFaceModule失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } //建立臉部追蹤模組設定 moduleConfiguration = faceModule.CreateActiveConfiguration(); if (moduleConfiguration == null) { MessageBox.Show( "建立PXCMFaceConfiguration失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } //追蹤模式設定 moduleConfiguration.SetTrackingMode(Form.ModeType); moduleConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT; moduleConfiguration.detection.isEnabled = true; moduleConfiguration.detection.maxTrackedFaces = 4;//最大追蹤4個臉 moduleConfiguration.landmarks.isEnabled = false; moduleConfiguration.pose.isEnabled = false; recognitionConfig = moduleConfiguration.QueryRecognition(); if (recognitionConfig == null) { MessageBox.Show( "建立RecognitionConfiguration失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } recognitionConfig.Enable(); #endregion #region 讀取資料庫數據 if (Form.FaceData != null) { recognitionConfig.SetDatabase(Form.FaceData); moduleConfiguration.ApplyChanges(); } #endregion #region 預備啟動 moduleConfiguration.EnableAllAlerts(); //moduleConfiguration.SubscribeAlert(FaceAlertHandler); pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges(); Form.SetStatus("RealSenseManager初始化中"); if (applyChangesStatus.IsError() || realSenseManager.Init().IsError()) { MessageBox.Show( "RealSenseManager初始化失敗,請檢查設定正確。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } #endregion using (moduleOutput = faceModule.CreateOutput()) { PXCMCapture.Device.StreamProfileSet profiles; PXCMCapture.Device device = captureManager.QueryDevice(); if (device == null) { MessageBox.Show( "取得設備失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles); #region Loop while (!_Stop) { while (_Paush) { Application.DoEvents(); } if (realSenseManager.AcquireFrame(true).IsError()) { break; } var isConnected = realSenseManager.IsConnected(); if (isConnected) { var sample = realSenseManager.QueryFaceSample(); if (sample == null) { realSenseManager.ReleaseFrame(); continue; } #region 畫面取出 PXCMImage image = null; if (Form.ModeType == PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR) { image = sample.ir; } else { image = sample.color; } #endregion moduleOutput.Update();//更新辨識 PXCMFaceConfiguration.RecognitionConfiguration recognition = moduleConfiguration.QueryRecognition(); if (recognition == null) { realSenseManager.ReleaseFrame(); continue; } #region 繪圖與事件 OnFrame?.Invoke(this, new FaceRecognitionEventArgs() { Image = ToBitmap(image) }); FindFace(moduleOutput); #endregion } //發布框 realSenseManager.ReleaseFrame(); } #endregion //更新資料庫緩衝區 //Buffer = moduleOutput.QueryRecognitionModule().GetDatabaseBuffer(); } #region 釋放資源 moduleConfiguration.Dispose(); realSenseManager.Close(); realSenseManager.Dispose(); #endregion IsDispose = true; OnStop?.Invoke(this, null); }
public void SimplePipeline() { PXCMSenseManager pp = m_form.Session.CreateSenseManager(); if (pp == null) { throw new Exception("PXCMSenseManager null"); } // Set Source & Landmark Profile Index PXCMCapture.DeviceInfo info; if (m_form.GetRecordState()) { pp.captureManager.SetFileName(m_form.GetFileName(), true); if (m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out info)) { pp.captureManager.FilterByDeviceInfo(info); } } else if (m_form.GetPlaybackState()) { pp.captureManager.SetFileName(m_form.GetFileName(), false); PXCMCaptureManager cmanager = pp.QueryCaptureManager(); if (cmanager == null) { throw new Exception("PXCMCaptureManager null"); } cmanager.SetRealtime(false); } else { if (m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out info)) { pp.captureManager.FilterByDeviceInfo(info); Tuple <PXCMImage.ImageInfo, PXCMRangeF32> selectedRes = m_form.GetCheckedColorResolution(); var set = new PXCMCapture.Device.StreamProfileSet(); set.color.frameRate = selectedRes.Item2; set.color.imageInfo.format = selectedRes.Item1.format; set.color.imageInfo.width = selectedRes.Item1.width; set.color.imageInfo.height = selectedRes.Item1.height; pp.captureManager.FilterByStreamProfiles(set); } } // Set Module pp.EnableFace(); PXCMFaceModule faceModule = pp.QueryFace(); if (faceModule == null) { Debug.Assert(faceModule != null); return; } PXCMFaceConfiguration moduleConfiguration = faceModule.CreateActiveConfiguration(); if (moduleConfiguration == null) { Debug.Assert(moduleConfiguration != null); return; } PXCMFaceConfiguration.TrackingModeType mode = m_form.GetCheckedProfile().Contains("3D") ? PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH : PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR; moduleConfiguration.SetTrackingMode(mode); moduleConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT; moduleConfiguration.detection.maxTrackedFaces = m_form.NumDetection; moduleConfiguration.landmarks.maxTrackedFaces = m_form.NumLandmarks; moduleConfiguration.pose.maxTrackedFaces = m_form.NumPose; PXCMFaceConfiguration.ExpressionsConfiguration econfiguration = moduleConfiguration.QueryExpressions(); if (econfiguration == null) { throw new Exception("ExpressionsConfiguration null"); } econfiguration.properties.maxTrackedFaces = m_form.NumExpressions; econfiguration.EnableAllExpressions(); moduleConfiguration.detection.isEnabled = m_form.IsDetectionEnabled(); moduleConfiguration.landmarks.isEnabled = m_form.IsLandmarksEnabled(); moduleConfiguration.pose.isEnabled = m_form.IsPoseEnabled(); if (m_form.IsExpressionsEnabled()) { econfiguration.Enable(); } PXCMFaceConfiguration.RecognitionConfiguration qrecognition = moduleConfiguration.QueryRecognition(); if (qrecognition == null) { throw new Exception("PXCMFaceConfiguration.RecognitionConfiguration null"); } if (m_form.IsRecognitionChecked()) { qrecognition.Enable(); } moduleConfiguration.EnableAllAlerts(); moduleConfiguration.SubscribeAlert(FaceAlertHandler); pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges(); m_form.UpdateStatus("Init Started", MainForm.Label.StatusLabel); if (applyChangesStatus < pxcmStatus.PXCM_STATUS_NO_ERROR || pp.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR) { m_form.UpdateStatus("Init Failed", MainForm.Label.StatusLabel); } else { using (PXCMFaceData moduleOutput = faceModule.CreateOutput()) { Debug.Assert(moduleOutput != null); PXCMCapture.Device.StreamProfileSet profiles; PXCMCaptureManager cmanager = pp.QueryCaptureManager(); if (cmanager == null) { throw new Exception("capture manager null"); } PXCMCapture.Device device = cmanager.QueryDevice(); if (device == null) { throw new Exception("device null"); } device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles); CheckForDepthStream(profiles, faceModule); ushort threshold = device.QueryDepthConfidenceThreshold(); int filter_option = device.QueryIVCAMFilterOption(); int range_tradeoff = device.QueryIVCAMMotionRangeTradeOff(); device.SetDepthConfidenceThreshold(1); device.SetIVCAMFilterOption(6); device.SetIVCAMMotionRangeTradeOff(21); if (m_form.IsMirrored()) { device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); } else { device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_DISABLED); } m_form.UpdateStatus("Streaming", MainForm.Label.StatusLabel); m_timer = new FPSTimer(m_form); while (!m_form.Stopped) { if (pp.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } bool isConnected = pp.IsConnected(); DisplayDeviceConnection(isConnected); if (isConnected) { PXCMCapture.Sample sample = pp.QueryFaceSample(); if (sample == null) { pp.ReleaseFrame(); continue; } DisplayPicture(sample.color); moduleOutput.Update(); if (moduleConfiguration.QueryRecognition().properties.isEnabled) { UpdateRecognition(moduleOutput); } m_form.DrawGraphics(moduleOutput); m_form.UpdatePanel(); } pp.ReleaseFrame(); } device.SetDepthConfidenceThreshold(threshold); device.SetIVCAMFilterOption(filter_option); device.SetIVCAMMotionRangeTradeOff(range_tradeoff); } moduleConfiguration.UnsubscribeAlert(FaceAlertHandler); moduleConfiguration.ApplyChanges(); m_form.UpdateStatus("Stopped", MainForm.Label.StatusLabel); } moduleConfiguration.Dispose(); pp.Close(); pp.Dispose(); }
/// <summary> /// Called when a face is detected. /// </summary> /// <param name="module">The module.</param> private void OnFaceCallback(PXCMFaceModule module) { PXCMRectI32 bounds; using (var faceData = module.CreateOutput()) { faceData.Update(); var faces = faceData.QueryFaces(); foreach (var face in faces) { var detection = face.QueryDetection(); detection.QueryBoundingRect(out bounds); // Debug.WriteLine("{0} Face detected: {1}", Time(), bounds); var landmarkData = face.QueryLandmarks(); if (landmarkData != null) { PXCMFaceData.LandmarkPoint[] landmarks; landmarkData.QueryPoints(out landmarks); var landmarkDict = new Dictionary<string, Point>(); foreach(PXCMFaceData.LandmarkPoint landmark in landmarks) { landmarkDict.Add("LANDMARK_" + landmark.source.index, new Point(landmark.image.x, landmark.image.y)); /*Debug.WriteLine("{0}/{1} at {2},{3},{4}", landmark.source.index, landmark.source.alias, landmark.image.x, landmark.image.y, landmark.confidenceImage); */ } var landmarkArgs = new LandmarksEventArgs(landmarkDict, Resolution.Item1.width, Resolution.Item1.height); OnLandmarksFound(this, landmarkArgs); } // Expression var expressionValues = new Dictionary<string, double>(); var expressionData = face.QueryExpressions(); if (expressionData != null) { foreach (PXCMFaceData.ExpressionsData.FaceExpression expression in Enum.GetValues(typeof(PXCMFaceData.ExpressionsData.FaceExpression))) { PXCMFaceData.ExpressionsData.FaceExpressionResult score; expressionData.QueryExpression(expression, out score); expressionValues.Add(expression.ToString(), score.intensity / 100d); // Debug.WriteLine("{0} Expression: {1} == {2}", Time(), expression, score.intensity / 100d); } } OnFaceFound(this, new FaceEventArgs(new Rectangle(bounds.x, bounds.y, bounds.w, bounds.h), expressionValues, Resolution.Item1.width, Resolution.Item1.height)); OnDataAvailable(this, new DataEventArgs(expressionValues)); } } }
// Use this for initialization void Start() { // Creates an instance of the sense manager to be called later session = PXCMSenseManager.CreateInstance(); //Output an error if there is no instance of the sense manager if (session == null) { Debug.LogError("SenseManager Init Failed!"); } // Enables hand tracking sts = session.EnableHand(); handAnalyzer = session.QueryHand(); sts2 = session.EnableFace(); faceAnalyzer = session.QueryFace(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("PXCSenseManager.EnableHand: " + sts); } if (sts2 != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("PXCSenseManager.EnableFace: " + sts2); } // Creates the session sts = session.Init(); sts2 = session.Init(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("PXCSenseManager.Init: " + sts); } if (sts2 != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("PXCSenseManager.Init: " + sts2); } // Creates a hand config for future data PXCMHandConfiguration handconfig = handAnalyzer.CreateActiveConfiguration(); PXCMFaceConfiguration faceconfig = faceAnalyzer.CreateActiveConfiguration(); //If there is handconfig instance if (handconfig != null) { handconfig.EnableAllAlerts(); handconfig.ApplyChanges(); handconfig.Dispose(); } if (faceconfig != null) { faceconfig.EnableAllAlerts(); faceconfig.ApplyChanges(); faceconfig.Dispose(); } }