/// <summary> /// Device-specific implementation of Update. /// Updates data buffers of all active channels with data of current frame. /// </summary> /// <remarks>This method is implicitely called by <see cref="Camera.Update"/> inside a camera lock.</remarks> /// <seealso cref="Camera.Update"/> protected override unsafe void UpdateImpl() { bool synced = true; /* Wait until a frame is ready: Synchronized or Asynchronous */ pxcmStatus status; status = pp.AcquireFrame(synced); if (status < pxcmStatus.PXCM_STATUS_NO_ERROR) { log.Error(Name + ": error" + status.ToString()); } /* Display images */ sample = pp.QuerySample(); PXCMImage.ImageData depthData; sample.depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_F32, out depthData); depthImage = new FloatCameraImage(sample.depth.info.width, sample.depth.info.height); memcpy(new IntPtr(depthImage.Data), depthData.planes[0], new UIntPtr((uint)sample.depth.info.width * (uint)sample.depth.info.height * (uint)sizeof(float))); sample.depth.ReleaseAccess(depthData); PXCMImage.ImageData irData; sample.ir.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_Y8, out irData); irImage = new ByteCameraImage(sample.ir.info.width, sample.ir.info.height); memcpy(new IntPtr(irImage.Data), irData.planes[0], new UIntPtr((uint)sample.ir.info.width * (uint)sample.ir.info.height * (uint)sizeof(byte))); sample.ir.ReleaseAccess(irData); pp.ReleaseFrame(); }
pxcmStatus OnModuleProcessedFrame( int mid, PXCMBase module, PXCMCapture.Sample sample) { // is it our module? if (mid == PXCMFaceModule.CUID) { this.faceData.Update(); // Any faces? var firstFace = this.faceData.QueryFaces().FirstOrDefault(); if (firstFace != null) { // face detection - the bounding rectangle of the face. var localFaceBox = default(PXCMRectI32); if (firstFace.QueryDetection()?.QueryBoundingRect(out localFaceBox) == true) { this.faceBox = localFaceBox; } var landmarks = firstFace.QueryLandmarks()?.QueryPoints( out this.landmarks); } else { this.faceBox = null; this.landmarks = null; } } this.Dispatcher.Invoke(this.DrawFaceFrameUIThread); return(pxcmStatus.PXCM_STATUS_NO_ERROR); }
void CompositionTarget_Rendering(object sender, EventArgs e) { try { // フレームを取得する pxcmStatus ret = senseManager.AcquireFrame(false); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } // フレームデータを取得する PXCMCapture.Sample sample = senseManager.QuerySample(); if (sample != null) { // 各データを表示する UpdateDepthImage(sample.depth); } // 手のデータを更新する UpdateHandFrame(); // フレームを解放する senseManager.ReleaseFrame(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
/// <summary> /// take a single depth grey color image shot /// </summary> /// <returns></returns> public WriteableBitmap DepthSnapshot() { for (; ;) { if (senseManager.AcquireFrame(true).IsError()) { throw new Exception("Failed to acquire frame"); break; } else { // capture PXCMCapture.Sample sample = senseManager.QuerySample(); PXCMImage MyImg = sample.depth; //test PXCMImage.ImageData MyImgData; MyImg.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, out MyImgData); WriteableBitmap WBmp = MyImgData.ToWritableBitmap(0, MyImg.info.width, MyImg.info.height, 96, 96); // release senseManager.ReleaseFrame(); return(WBmp); //break; } } ////should be used if we want to shut down the cam . //senseManager.Dispose(); return(null); }
void CompositionTarget_Rendering(object sender, EventArgs e) { try { // フレームを取得する pxcmStatus ret = senseManager.AcquireFrame(false); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } // フレームデータを取得する PXCMCapture.Sample sample = senseManager.QueryTrackerSample(); if (sample != null) { UpdateColorImage(sample.color); } // オブジェクト追跡の更新 UpdateObjectTraking(); // フレームを解放する senseManager.ReleaseFrame(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
private void RunProcess() { while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { PXCMCapture.Sample sample = senseManager.QuerySample(); if (sample != null && faceData != null) { DisplayImage(sample.color); faceData.Update(); Analyze(faceData); if (showGrid) { DrawFaceCamGrid(); } UpdatePictureBox(); } sample.color.Dispose(); senseManager.ReleaseFrame(); } }
/** * Update the View */ private void update() { while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) // Got an image? { // Console.WriteLine("Update"); // <magic> PXCMCapture.Sample sample = senseManager.QueryFaceSample(); sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); Graphics bitmapGraphics = Graphics.FromImage(colorBitmap); modules.ForEach(delegate(RSModule mod) { mod.Work(bitmapGraphics); }); // </magic> // save to hard drive (careful!) - will be stored in project folder/bin/debug if (storeImages) { colorBitmap.Save("cap" + capNum++ + ".png"); } // update PictureBox pb.Image = colorBitmap; senseManager.ReleaseFrame(); } }
/** * Update the View */ private void update() { while (model.SenseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) // Got an image? { // <magic> PXCMCapture.Sample sample = model.SenseManager.QueryFaceSample(); sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); model.FaceData = model.Face.CreateOutput(); model.FaceData.Update(); // props to Tanja model.FaceAktuell = model.FaceData.QueryFaceByIndex(0); if (model.FaceAktuell != null) { model.Edata = model.FaceAktuell.QueryExpressions(); } model.HandData = model.Hand.CreateOutput(); model.HandData.Update(); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); Graphics bitmapGraphics = Graphics.FromImage(colorBitmap); model.Modules.ForEach(delegate(RSModule mod) { mod.Work(bitmapGraphics); }); // update PictureBox pb.Image = colorBitmap; model.SenseManager.ReleaseFrame(); model.FaceData.Dispose(); // DONE! model.HandData.Dispose(); model.Edata = null; } }
pxcmStatus OnNewSample(int mid, PXCMCapture.Sample sample) { PXCMImage.ImageData colorImage; // We get hold of the image here and we keep it until we have a chance // to draw it because we are not on the UI thread here. This is inefficient // but I think we get away with it here. if (sample.color.AcquireAccess( PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorImage) == pxcmStatus.PXCM_STATUS_NO_ERROR) { this.currentColorImage = colorImage; if (!this.imageDimensions.HasArea) { this.imageDimensions.Width = sample.color.info.width; this.imageDimensions.Height = sample.color.info.height; } } this.Dispatcher.Invoke(this.DrawColourFrameUIThread); sample.color.ReleaseAccess(colorImage); return(pxcmStatus.PXCM_STATUS_NO_ERROR); }
public void DrawUI(PXCMCapture.Sample sample) { if ((this.handMap == null) || (this.handMap.Count == 0)) { this.ClearAll(); } else { if (this.drawnHandJointVisualMap == null) { this.drawnHandJointVisualMap = new HandVisualMap(); } if (this.drawnHandBoneVisualMap == null) { this.drawnHandBoneVisualMap = new BoneVisualMap(); } // get rid of anything we drew for a previous hand that is not // present in the current frame of data. this.ClearLostHands(); // draw the stuff that *is* present in the current frame of // data... this.DrawHands(); } }
void Update() { if (sm.AcquireFrame(false, 0) == pxcmStatus.PXCM_STATUS_NO_ERROR) { /* Query and return available image samples*/ PXCMCapture.Sample sample = sm.QuerySample(); if (sample != null && sample.depth != null) { /* Query depth image properties*/ PXCMImage.ImageInfo info = sample.depth.QueryInfo(); if (isInitBlob == false) { /* Initialize the blob extraction algorithm*/ m_blob.Init(info); isInitBlob = true; } if (isInitContour == false) { /* Initialize the contour extraction algorithm*/ m_contour.Init(info); isInitContour = true; } ProcessImage(sample.depth); } sm.ReleaseFrame(); } }
/// <summary> /// Called when a module has processed a frame. /// </summary> /// <param name="type">The type.</param> /// <param name="module">The module.</param> /// <param name="sample">The sample.</param> /// <returns>pxcmStatus.</returns> /// <exception cref="System.NotImplementedException">Unknown type.</exception> pxcmStatus OnModuleProcessedFrame(int type, PXCMBase module, PXCMCapture.Sample sample) { // Process the frame using the appropriate callback. switch (type) { case PXCMFaceModule.CUID: OnFaceCallback(module as PXCMFaceModule); break; case PXCMEmotion.CUID: OnEmotionCallback(module as PXCMEmotion); break; default: throw new NotImplementedException("Unknown type."); } // Handle graphics. if (sample.color != null) { OnColourImage(this, new EventArgs <PXCMImage>(sample.color)); } if (sample.depth != null) { OnDepthImage(this, new EventArgs <PXCMImage>(sample.depth)); } if (sample.ir != null) { OnInfraredImage(this, new EventArgs <PXCMImage>(sample.ir)); } // return NO_ERROR to continue, or any error to abort. return(pxcmStatus.PXCM_STATUS_NO_ERROR); }
public void DrawUI(PXCMCapture.Sample sample) { this.canvas.Children.Clear(); // Draw a box around the face. if (this.faceBox.HasValue) { this.canvas.Children.Add(this.MakeRectangle(this.faceBox.Value)); } // Draw circles for each of the facial landmarks. if (this.landmarks != null) { foreach (var landmark in this.landmarks) { this.canvas.Children.Add(this.MakeEllipse(landmark.image)); } } var medIntensityExpressions = this.expressions.Where(e => e.Value > LOW_INTENSITY); var highIntensityExpressions = medIntensityExpressions.Where(e => e.Value > HIGH_INTENSITY); var missingExpressions = this.expressions.Where(e => e.Value <= LOW_INTENSITY); this.PopulateLabelFromExpressions(this.txtExpressions, highIntensityExpressions); this.PopulateLabelFromExpressions(this.txtPossibleExpressions, medIntensityExpressions); this.PopulateLabelFromExpressions(this.txtMissingExpressions, missingExpressions); this.txtHeartRate.Text = this.heartRate.HasValue ? $"{this.heartRate:G2} bpm" : string.Empty; }
PXCMCapture.Sample getCameraSample() { // Retrieve sample from camera PXCMCapture.Sample sample = psm.QuerySample(); return(sample); }
//顔のフレームの更新処理 private void updateFaceFrame() { // フレームデータを取得する PXCMCapture.Sample sample = senceManager.QuerySample(); UpdateColorImage(sample.color); //SenceManagerモジュールの顔のデータを更新する faceData.Update(); //検出した顔の数を取得する int numFaces = faceData.QueryNumberOfDetectedFaces(); //追加:顔の姿勢情報を格納するための変数を用意する PXCMFaceData.PoseEulerAngles[] poseAngle = new PXCMFaceData.PoseEulerAngles[POSE_MAXFACES]; if (senceManager != null) { //それぞれの顔ごとに情報取得および描画処理を行う for (int i = 0; i < numFaces; ++i) { //顔の情報を取得する PXCMFaceData.Face face = faceData.QueryFaceByIndex(i); // 顔の位置を取得:Depthで取得する var detection = face.QueryDetection(); if (detection != null) { PXCMRectI32 faceRect; detection.QueryBoundingRect(out faceRect); //顔の位置に合わせて長方形を変更 TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y); rect[i].Width = faceRect.w; rect[i].Height = faceRect.h; rect[i].Stroke = Brushes.Blue; rect[i].StrokeThickness = 3; rect[i].RenderTransform = transform; //追加:ポーズ(顔の向きを取得):Depth使用時のみ PXCMFaceData.PoseData pose = face.QueryPose(); if (pose != null) { //顔の位置に合わせて姿勢情報を表示 tb[i, 0].RenderTransform = new TranslateTransform(transform.X, transform.Y - 30); tb[i, 1].RenderTransform = new TranslateTransform(transform.X, transform.Y - 60); tb[i, 2].RenderTransform = new TranslateTransform(transform.X, transform.Y - 90); //追加:顔の姿勢情報(Yaw, Pitch, Roll)の情報 pose.QueryPoseAngles(out poseAngle[i]); tb[i, 0].Text = "pitch:" + poseAngle[i].pitch; tb[i, 1].Text = "roll:" + poseAngle[i].roll; tb[i, 2].Text = "yaw:" + poseAngle[i].yaw; } } } } }
private pxcmStatus OnNewSample(int mid, PXCMCapture.Sample sample) { if (m_OnSample != null) { m_OnSample(sample); } return(pxcmStatus.PXCM_STATUS_NO_ERROR); }
private pxcmStatus onModuleProcessedFrame(int mid, PXCMBase module, PXCMCapture.Sample sample) { if (mid == PXCMHandModule.CUID) { this._handData.Update(); } return(pxcmStatus.PXCM_STATUS_NO_ERROR); }
private void Button_Click(object sender, RoutedEventArgs e) { // Get instance of SenseManager PXCMSession session = PXCMSession.CreateInstance(); // Get RS version PXCMSession.ImplVersion version = session.QueryVersion(); textBox1.Text = version.major.ToString() + "." + version.minor.ToString(); // setup Pipeline PXCMSenseManager sm = session.CreateSenseManager(); // Get streams ready sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480); sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480); // Init Pipeline sm.Init(); // Get samples pxcmStatus status = sm.AcquireFrame(true); // Synchronous capturing PXCMCapture.Sample sample = sm.QuerySample(); // Convert samples to image PXCMImage image = sample.color; PXCMImage dimage = sample.depth; PXCMImage.ImageData data; image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out data); WriteableBitmap wbm = data.ToWritableBitmap(0, image.info.width, image.info.height, 96.0, 96.0); PXCMImage.ImageData data2; dimage.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_RAW, out data2); WriteableBitmap wbm2 = data2.ToWritableBitmap( 0, dimage.info.width, dimage.info.height, 96.0, 96.0); // Display image imageRGB.Source = wbm; imageDepth.Source = wbm2; // Clean up image.ReleaseAccess(data); image.ReleaseAccess(data2); sm.ReleaseFrame(); sm.Close(); session.Dispose(); }
private void OnSampleCallback(PXCMCapture.Sample sample) { if (shuttingDown) { return; } UseTexturePlugin.CopyPXCImageToTexture(sample.color, m_texColorNative); }
private void computePixelQualityFromClusters(PXCMCapture.Sample sample, managed_obj_detector.ProcessParams processParams) { this.loadRgbIrDXyzPoints(sample); obj_detector.clusterize(this.rgb_ir_d_xyz_points, processParams); if (rosPublisher.isStarted() && obj_detector.GotTarget) { rosPublisher.publishPose(obj_detector.TargetXyz.x, obj_detector.TargetXyz.y, obj_detector.TargetXyz.z); } }
void RSCapture() { //----------------------RS--------------------- /* Make sure PXCMSenseManager Instance is Initialized */ if (psm == null) { LogText.text = "PXCMSM Failed"; return; } /* Wait until any frame data is available true(aligned) false(unaligned) */ if (psm.AcquireFrame(true) != pxcmStatus.PXCM_STATUS_NO_ERROR) { LogText.text = "Waiting..."; return; } /* Retrieve a sample from the camera */ PXCMCapture.Sample sample = psm.QuerySample(); if (sample != null) { LogText.text = "Capturing..."; } //-----UVMap-----// PXCMImage.ImageData imageData = new PXCMImage.ImageData(); sample.depth.AcquireAccess(PXCMImage.Access.ACCESS_READ_WRITE, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, PXCMImage.Option.OPTION_ANY, out imageData); bool found = false; unsafe { UInt16 *ptr = (UInt16 *)imageData.planes[0].ToPointer(); ulong length = (ulong)(sample.depth.info.width * sample.depth.info.height); for (ulong i = 0; ((i < length) && !found); i++, ptr++) { found = (*ptr > 0) && (*ptr < minimumDistanceMm); } a = ptr[120 * sample.depth.info.width + 320]; b = ptr[360 * sample.depth.info.width + 320]; c = ptr[240 * sample.depth.info.width + 160]; d = ptr[240 * sample.depth.info.width + 480]; // indexer = row*width + column; } if (found) { Output.text = "Pass"; } else { Output.text = "Fail"; } //-----EOUVM-----// /* Release the frame to process the next frame */ depthImage = sample.depth; psm.ReleaseFrame(); //---------------------EORS-------------------- }
pxcmStatus newIRFrame(int mid, PXCMCapture.Sample sample) { if (sample.ir != null && irImageForm != null && !irImageForm.IsDisposed) { PXCMImage.ImageData data = new PXCMImage.ImageData(); sample.ir.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out data); irImageForm.Image = data.ToBitmap(0, sample.ir.info.width, sample.ir.info.height); sample.ir.ReleaseAccess(data); } return(pxcmStatus.PXCM_STATUS_NO_ERROR); }
private void ProcessingThread() { // Start AcquireFrame/ReleaseFrame loop while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { // Acquire the color image data PXCMCapture.Sample sample = senseManager.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData; sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Get face data if (faceData != null) { faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(0); // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); faceDetectionData.QueryFaceAverageDepth(out currentFaceDepth); // Process face recognition data if (face != null) { userId = "Unrecognized"; } } else { userId = "No users in view"; } } // Display the color stream and other UI elements UpdateUI(colorBitmap); // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); // Release the frame senseManager.ReleaseFrame(); } }
pxcmStatus OnModuleProcessedFrame(int mid, PXCMBase module, PXCMCapture.Sample sample) { ForAllRenderers( mid, r => r.ProcessFrame(sample)); Dispatcher.InvokeAsync(() => { ForAllRenderers(mid, r => r.DrawUI(sample)); } ); return(pxcmStatus.PXCM_STATUS_NO_ERROR); }
public void ProcessFrame(PXCMCapture.Sample sample) { this.currentColorImage = null; PXCMImage.ImageData colorImage; if (sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorImage).Succeeded()) { this.InitialiseImageDimensions(sample.color); this.currentColorImage = colorImage; } }
//顔のフレームの更新処理 private void updateFaceFrame() { // フレームデータを取得する PXCMCapture.Sample sample = senceManager.QuerySample(); UpdateColorImage(sample.color); //SenceManagerモジュールの顔のデータを更新する faceData.Update(); //検出した顔の数を取得する int numFaces = faceData.QueryNumberOfDetectedFaces(); if (senceManager != null) { //それぞれの顔ごとに情報取得および描画処理を行う for (int i = 0; i < numFaces; ++i) { //顔の情報を取得する PXCMFaceData.Face face = faceData.QueryFaceByIndex(i); // 顔の位置を取得:Depthで取得する var detection = face.QueryDetection(); if (detection != null) { PXCMRectI32 faceRect; detection.QueryBoundingRect(out faceRect); //顔の位置に合わせて長方形を変更 TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y); rect[i].Width = faceRect.w; rect[i].Height = faceRect.h; rect[i].Stroke = Brushes.Blue; rect[i].StrokeThickness = 3; rect[i].RenderTransform = transform; //追加:心拍に関する顔の情報を取得する PXCMFaceData.PulseData pulse = face.QueryPulse(); if (pulse != null) { //顔の位置に合わせて心拍数を表示 tb[i].RenderTransform = new TranslateTransform(transform.X, transform.Y - 30); //追加:心拍数の表示 float hrate = pulse.QueryHeartRate(); tb[i].Text = "HeartRate:" + hrate; } } } } }
void drawCameraVision() { // Camera Sample PXCMCapture.Sample sample = getCameraSample(); // Depth Texture if (showColour == false) { textureFromSample(sample.depth); } else { textureFromSample(sample.color); } }
/// <summary> /// Device-specific implementation of Update. /// Updates data buffers of all active channels with data of current frame. /// </summary> /// <remarks>This method is implicitely called by <see cref="Camera.Update"/> inside a camera lock.</remarks> /// <seealso cref="Camera.Update"/> protected override unsafe void UpdateImpl() { bool synced = true; // Wait until a frame is ready: Synchronized or Asynchronous pxcmStatus status; status = pp.AcquireFrame(synced); if (status < pxcmStatus.PXCM_STATUS_NO_ERROR) { log.Error(Name + ": " + status.ToString()); return; } // get image sample = pp.QuerySample(); long imgTS = sample.color.timeStamp; if (imgTS <= lastTimeStamp) { throw new TimeoutException("THIS IS NOT A TIMEOUT!"); } lastTimeStamp = imgTS; // color image PXCMImage.ImageData colorData; if (sample.color != null) { sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); Bitmap bmp = new Bitmap(sample.color.info.width, sample.color.info.height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); var bmpData = bmp.LockBits(new Rectangle(0, 0, sample.color.info.width, sample.color.info.height), System.Drawing.Imaging.ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format24bppRgb); memcpy(bmpData.Scan0, colorData.planes[0], new UIntPtr(3 * (uint)sample.color.info.width * (uint)sample.color.info.height)); bmp.UnlockBits(bmpData); Bitmap bmp32 = bmp.Clone(new Rectangle(0, 0, widthColor, heightColor), System.Drawing.Imaging.PixelFormat.Format32bppPArgb); colorImage = new ColorCameraImage(bmp32); sample.color.ReleaseAccess(colorData); } // depth PXCMImage.ImageData depthData; if (sample.depth != null) { sample.depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_F32, out depthData); depthImage = new FloatCameraImage(sample.depth.info.width, sample.depth.info.height); CopyImageWithStride(sample.depth.info.width, sample.depth.info.height, 4, depthData, new IntPtr(depthImage.Data)); sample.depth.ReleaseAccess(depthData); } pp.ReleaseFrame(); }
private void ElaborateSample(PXCMCapture.Sample sample, PXCMFaceData.Face face) { if (sample == null) { return; } WriteableBitmap imageRGB = null; if (sample.color != null) { imageRGB = sample.color.GetImage(); } if (face != null) { PXCMFaceData.LandmarksData landmarkData = face.QueryLandmarks(); PXCMFaceData.LandmarkPoint[] landmarkPoints = null; if (landmarkData.QueryPoints(out landmarkPoints)) { foreach (var point in landmarkPoints) { if (point.confidenceImage > 50) { if (point.source.alias != PXCMFaceData.LandmarkType.LANDMARK_NOT_NAMED) { imageRGB.FillEllipseCentered((int)point.image.x, (int)point.image.y, 4, 4, Colors.White); } else { imageRGB.FillEllipseCentered((int)point.image.x, (int)point.image.y, 4, 4, Colors.Yellow); } } } } } if (imageRGB != null) { imageRGB.Freeze(); } Dispatcher.Invoke(() => { this.ImageRGB = imageRGB; }); Process.GetCurrentProcess(); }
//private void TrackLandmarks(PXCMFaceData.Face faceDataFace) //{ // PXCMFaceData.LandmarksData landmarksData = faceDataFace.QueryLandmarks(); // PXCMFaceData.LandmarkPoint[] landmarkPoints; // if (landmarksData.QueryPointsByGroup(PXCMFaceData.LandmarksGroupType.LANDMARK_GROUP_NOSE, out landmarkPoints)) // { // foreach (var landmarkPoint in landmarkPoints) // { // Console.WriteLine("landmarkPoint: " + landmarkPoint.world.ToString()); // } // } //} //private void TrackGaze(PXCMFaceData.Face faceDataFace) //{ // faceDataFace. // faceDataFace.QueryGaze().QueryGazePoint().confidence //} private void TrackImageData(PXCMCapture.Sample captureSample) { PXCMImage imageColor = captureSample.color; PXCMImage.ImageData imageData; pxcmStatus aquireAccessStatus = imageColor.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out imageData); if (aquireAccessStatus >= pxcmStatus.PXCM_STATUS_NO_ERROR) { writeableBitmap = imageData.ToWritableBitmap(0, imageColor.info.width, imageColor.info.height, 96, 96); writeableBitmap.Freeze(); } imageColor.ReleaseAccess(imageData); }
void Update() { //Dynamically Pause/Enable Modules int numberOfEnabledModules = 0; foreach (var option in _senseOptions) { if (option.RefCounter == 0 && option.Enabled ) { if (option.ModuleCUID > 0) { SenseManager.PauseModule(option.ModuleCUID, true); } option.Enabled = false; } else if (option.RefCounter > 0 && !option.Enabled) { if (!option.Initialized) { OnDisable(); OnEnable(); Start(); } if (option.ModuleCUID > 0) { SenseManager.PauseModule(option.ModuleCUID, false); } option.Enabled = true; } if (option.Enabled) { numberOfEnabledModules++; } } //Update Speech commands if changed if (_speechCommandsChanged) { UpdateSpeechCommands(); SpeechManager.Reset(); } // Every frame update all the data if (Initialized && numberOfEnabledModules > 0) { _sts = SenseManager.AcquireFrame(true, 100); if (_sts == pxcmStatus.PXCM_STATUS_NO_ERROR) { if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoColorStream).Enabled) { if (ImageRgbOutput != null) { ImageRgbOutput.Dispose(); } if (_captureSample == null) { _captureSample = SenseManager.QuerySample(); } if (_captureSample.color != null) { ImageRgbOutput = _captureSample.color; ImageRgbOutput.QueryInstance<PXCMAddRef>().AddRef(); } } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoDepthStream).Enabled || _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.PointCloud).Enabled) { if (ImageDepthOutput != null) { ImageDepthOutput.Dispose(); } if (_captureSample == null) { _captureSample = SenseManager.QuerySample(); } if (_captureSample.depth != null) { ImageDepthOutput = _captureSample.depth; ImageDepthOutput.QueryInstance<PXCMAddRef>().AddRef(); if (!_isInitBlob) { PXCMImage.ImageInfo info = ImageDepthOutput.QueryInfo(); BlobExtractor.Init(info); BlobExtractor.SetMaxBlobs(MaxBlobsToDetect); _isInitBlob = true; } if (PointCloud == null) { PointCloud = new PXCMPoint3DF32[ImageDepthOutput.info.width * ImageDepthOutput.info.height]; } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.PointCloud).Enabled) { if (PointCloud == null) { PointCloud = new PXCMPoint3DF32[ImageDepthOutput.info.width * ImageDepthOutput.info.height]; } _sts = Projection.QueryVertices(ImageDepthOutput, PointCloud); } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.UVMap).Enabled) { if (UvMap == null) { UvMap = new PXCMPointF32[ImageDepthOutput.info.width * ImageDepthOutput.info.height]; } Projection.QueryUVMap(ImageDepthOutput, UvMap); } } } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoIRStream).Enabled) { if (ImageIROutput != null) { ImageIROutput.Dispose(); } if (_captureSample == null) { _captureSample = SenseManager.QuerySample(); } if (_captureSample.ir != null) { ImageIROutput = _captureSample.ir; ImageIROutput.QueryInstance<PXCMAddRef>().AddRef(); } } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoSegmentation).Enabled) { if (Image3DSegmentationOutput != null) { Image3DSegmentationOutput.Dispose(); } PXCM3DSeg seg = SenseManager.Query3DSeg(); if (seg != null) { Image3DSegmentationOutput = seg.AcquireSegmentedImage(); } } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Face).Enabled) { FaceModuleOutput.Update(); } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Hand).Enabled) { HandDataOutput.Update(); } _captureSample = null; SenseManager.ReleaseFrame(); } //Speech if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Speech).Enabled) { SpeechManager.QueryRecognizedCommands(out SpeechOutput); } } }