public static void CopyPXCImageToTexture(PXCMImage srcImage, System.IntPtr dstTexture) { if (srcImage != null && dstTexture != System.IntPtr.Zero ) { CopyTextureData(srcImage.QueryNativePointer(), dstTexture); } }
/// <summary> /// Sets the texture for the associated game object with the given image /// </summary> /// <param name='image'> /// Image. /// </param> private void SetTexture(PXCMImage image) { if (image == null) { return; } if (_texture==null) { /* Save size and preallocate the Texture2D */ size.width=image.info.width; size.height=image.info.height; _texture = new Texture2D((int)size.width, (int)size.height, TextureFormat.ARGB32, false); /* Associate the texture to the game object */ GetComponent<Renderer>().sharedMaterial.mainTexture = _texture; } /* Retrieve the image data */ PXCMImage.ImageData data; pxcmStatus sts=image.AcquireAccess(PXCMImage.Access.ACCESS_READ,PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32,out data); if ( sts >= pxcmStatus.PXCM_STATUS_NO_ERROR) { data.ToTexture2D(0, _texture); image.ReleaseAccess(data); /* and display it */ _texture.Apply(); } }
// Depth画像を更新する private void UpdateDepthImage( PXCMImage depthFrame ) { if ( depthFrame == null ) { return; } // データを取得する PXCMImage.ImageData data; pxcmStatus ret = depthFrame.AcquireAccess( PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out data ); if ( ret < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "Depth画像の取得に失敗" ); } // Bitmapに変換する var info = depthFrame.QueryInfo(); var length = data.pitches[0] * info.height; var buffer = data.ToByteArray( 0, length ); ImageDepth.Source = BitmapSource.Create( info.width, info.height, 96, 96, PixelFormats.Bgr32, null, buffer, data.pitches[0] ); // データを解放する depthFrame.ReleaseAccess( data ); }
private void DisplayPicture(PXCMImage image) { //change PXCMImage.ImageData data; if (image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out data) < pxcmStatus.PXCM_STATUS_NO_ERROR) return; m_form.DrawBitmap(data.ToBitmap(0, image.info.width, image.info.height)); m_timer.Tick(""); image.ReleaseAccess(data); }
public Projection(PXCMSession session, PXCMCapture.Device device, PXCMImage.ImageInfo dinfo, PXCMImage.ImageInfo cinfo) { /* retrieve the invalid depth pixel values */ invalid_value = device.QueryDepthLowConfidenceValue(); /* Create the projection instance */ projection = device.CreateProjection(); uvmap = new PXCMPointF32[dinfo.width * dinfo.height]; invuvmap = new PXCMPointF32[cinfo.width * cinfo.height]; }
private void OnSample(PXCMCapture.Sample sample) { if (m_shuttingDown) return; lock (this) { if (m_sample != null) m_sample.Dispose(); m_sample = sample.color; m_sample.QueryInstance<PXCMAddRef>().AddRef(); } }
private void OnDisable() { lock (this) { if (m_sample != null) { m_sample.Dispose(); m_sample = null; } } }
private void OnShutdown() { m_shuttingDown = true; lock (this) { if (m_sample != null) { m_sample.Dispose(); m_sample = null; } } }
public Projection(PXCMSession session, PXCMCapture.Device device, PXCMImage.ImageInfo dinfo) { //: start ros serial node: // rosPublisher.start("192.168.0.10"); /* Create the projection instance */ projection = device.CreateProjection(); height = dinfo.height; width = dinfo.width; numOfPixels = dinfo.width * dinfo.height; UInt16 invalid_value = device.QueryDepthLowConfidenceValue(); obj_detector = new managed_obj_detector.ObjDetector(dinfo.width, dinfo.height, invalid_value); coords = new PXCMPoint3DF32[numOfPixels]; rgb_ir_d_xyz_points = new managed_obj_detector.RgbIrDXyzPoint[numOfPixels]; }
private void UpdateColorImage(PXCMImage colorFrame) { // データを取得する PXCMImage.ImageData data; pxcmStatus ret = colorFrame.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out data); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } // Bitmapに変換する var buffer = data.ToByteArray(0, COLOR_WIDTH * COLOR_HEIGHT * 3); ImageColor.Source = BitmapSource.Create(COLOR_WIDTH, COLOR_HEIGHT, 96, 96, PixelFormats.Bgr24, null, buffer, COLOR_WIDTH * 3); // データを解放する colorFrame.ReleaseAccess(data); }
public double[,] GetDepthData() { Depth = null; PXCMCapture.Sample Image = sm.QuerySample(); PXCMImage depth = Image.depth; if (depth != null) { var DepthWidth = depth.info.width; var DepthHeight = depth.info.height; Device = sm.captureManager.QueryDevice(); Device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); Projection = Device.CreateProjection(); PXCMPoint3DF32[] Vertices = new PXCMPoint3DF32[DepthHeight * DepthWidth]; Status = Projection.QueryVertices(depth, Vertices); Depth = new double[6, (DepthWidth * DepthHeight) / 100]; int j = 0; for (int i = 0; i < DepthWidth * DepthHeight; i += 10) { Depth[0, j] = -Vertices[i].x / 10; Depth[1, j] = Vertices[i].y / 10; Depth[2, j] = Vertices[i].z / 10; j++; if (i % 640 == 0) { i += 6400; } } Projection.Dispose(); } sm.ReleaseFrame(); return(Depth); }
private void loadRgbIrDXyzPointIr(PXCMImage irImage) { byte[] rgb32Pixels = getRGB32Pixels(irImage); if (rgb32Pixels != null) { int numOfPixels = this.rgb_ir_d_xyz_points.Length; for (int i = 0; i < numOfPixels; ++i) { rgb_ir_d_xyz_points[i].ir = rgb32Pixels[4 * i + 0]; } } else { int numOfPixels = this.rgb_ir_d_xyz_points.Length; for (int i = 0; i < numOfPixels; ++i) { rgb_ir_d_xyz_points[i].ir = 0; } } }
public void Loop(LoopObjects loopObjects) { var sample = _camera.Manager.QuerySample(); PXCMImage image = sample?.color; if (image == null) { return; } PXCMImage.ImageData imageData; image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out imageData); Bitmap bitmap = imageData.ToBitmap(0, image.info.width, image.info.height); using (var ms = new MemoryStream()) { bitmap.Save(ms, ImageFormat.Bmp); _camera.ImageStream.CurrentBitmapImage = ms.ToArray(); image.ReleaseAccess(imageData); } }
public override bool OnNewFrame() { if (isCapturing) { try { PXCMImage img = QueryImage(PXCMImage.ImageType.IMAGE_TYPE_COLOR); Bitmap bitmap; sts = img.QueryBitmap(session, out bitmap); Graphics g = cameraPanel.CreateGraphics(); g.DrawImage(bitmap, new Point(0, 0)); cameraImage = new Bitmap(bitmap); } catch (Exception ex) { ;; } } return(true); }
public static IObservable <Bitmap> AsObservable(this PXCMSenseManager manager) { // TODO: refer stream fps var interval = Math.Floor((double)(1000 / colorStreamFPS)); return(Observable.Interval(TimeSpan.FromMilliseconds(interval)) .Where(_ => manager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) .Select(_ => { PXCMCapture.Sample sample = manager.QuerySample(); PXCMImage image = sample.color; PXCMImage.ImageData imageData; image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out imageData); Bitmap bitmapImage = imageData.ToBitmap(0, image.info.width, image.info.height); image.ReleaseAccess(imageData); manager.ReleaseFrame(); return bitmapImage; })); }
public void Loop(LoopObjects loopObjects) { var segmentation = _camera.Manager.Query3DSeg(); if (segmentation == null) { return; } PXCMImage image = segmentation.AcquireSegmentedImage(); PXCMImage.ImageData imageData; image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out imageData); PXCMImage.ImageInfo imageInfo = image.QueryInfo(); using (var bitmap = new Bitmap(imageData.ToBitmap(0, imageInfo.width, imageInfo.height))) { using (var ms = new MemoryStream()) { bitmap.Save(ms, ImageFormat.Bmp); _camera.SegmentationStream.CurrentBitmapImage = ms.ToArray(); image.ReleaseAccess(imageData); } } }
/// <summary> /// take a single depth color int array shot /// </summary> /// <returns></returns> public int[] DepthintArray() { //short[] depthPixel = new short[1]; for (; ;) { if (senseManager.AcquireFrame(true).IsError()) { throw new Exception("Failed to acquire frame"); } else { // capture PXCMCapture.Sample sample = senseManager.QuerySample(); PXCMImage depth = sample.depth; //test PXCMImage.ImageData DepthData; depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_F32, out DepthData); int dwith = depth.info.width; int dheight = depth.info.height; int[] depthPixel = DepthData.ToIntArray(0, dwith * dheight); depth.Dispose(); // release senseManager.ReleaseFrame(); return(depthPixel); //break; } } //should be used if we want to shut down the cam . //senseManager.Dispose(); return(null); }
private void button1_Click(object sender, RoutedEventArgs e) { PXCMSession session = PXCMSession.CreateInstance(); PXCMSession.ImplVersion version = session.QueryVersion(); textBox1.Text = version.major.ToString() + "." + version.minor.ToString(); PXCMSenseManager sm = session.CreateSenseManager(); sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 0, 0); sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, 0); sm.Init(); pxcmStatus status = sm.AcquireFrame(true); PXCMCapture.Sample sample = sm.QuerySample(); PXCMImage image = sample.color; PXCMImage dimage = sample.depth; PXCMImage.ImageData data; PXCMImage.ImageData data2; image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out data); dimage.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_RAW, out data2); WriteableBitmap wbm = data.ToWritableBitmap(0, image.info.width, image.info.height, 96.0, 96.0); WriteableBitmap wbm2 = data2.ToWritableBitmap(0, dimage.info.width, dimage.info.height, 96.0, 96.0); image1.Source = wbm; image2.Source = wbm2; image.ReleaseAccess(data); dimage.ReleaseAccess(data2); sm.ReleaseFrame(); sm.Close(); session.Dispose(); }
/// <summary> /// take a single color photo /// </summary> /// <returns>returns a bitmap</returns> public Bitmap ColorSnapshot() { Bitmap bmp = new Bitmap(10, 10); for (; ;) { if (senseManager.AcquireFrame(true).IsError()) { throw new Exception("Failed to acquire frame"); break; } else { // capture PXCMCapture.Sample sample = senseManager.QuerySample(); PXCMImage MyImg = sample.color; //test PXCMImage.ImageData MyImgData; MyImg.AcquireAccess(PXCMImage.Access.ACCESS_READ, out MyImgData); bmp = MyImgData.ToBitmap(0, MyImg.info.width, MyImg.info.height); // release senseManager.ReleaseFrame(); break; } } ////should be used if we want to shut down the cam . //senseManager.Dispose(); return(bmp); }
private void UpdateColorImage(PXCMImage colorFrame) { // データを取得する PXCMImage.ImageData data; PXCMImage.ImageInfo info = colorFrame.QueryInfo(); Width = info.width; Height = info.height; pxcmStatus ret = colorFrame.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out data); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } // Bitmapに変換する var buffer = data.ToByteArray(0, info.width * info.height * 3); ImageColor.Source = BitmapSource.Create(info.width, info.height, 96, 96, PixelFormats.Bgr24, null, buffer, info.width * 3); // データを解放する colorFrame.ReleaseAccess(data); }
public void DepthToCameraCoordinates(PXCMImage depth, PXCMPoint3DF32[] cameraSpacePts) { /* Retrieve the depth pixels and uvmap */ PXCMImage.ImageData ddata; UInt16[] dpixels; bool isdepth = (depth.info.format == PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH); if (depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, out ddata) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { Int32 dpitch = ddata.pitches[0] / sizeof(Int16); /* aligned width */ Int32 dwidth = (Int32)depth.info.width; Int32 dheight = (Int32)depth.info.height; dpixels = ddata.ToUShortArray(0, isdepth ? dpitch * dheight : dpitch * dheight * 3); depth.ReleaseAccess(ddata); /* Projection Calculation */ PXCMPoint3DF32[] dcords = new PXCMPoint3DF32[dwidth * dheight]; for (Int32 y = 0, k = 0; y < dheight; y++) { for (Int32 x = 0; x < dwidth; x++, k++) { dcords[k].x = x; dcords[k].y = y; dcords[k].z = isdepth ? dpixels[y * dpitch + x] : dpixels[3 * (y * dpitch + x) + 2]; } } pxcmStatus status = projection.ProjectDepthToCamera(dcords, cameraSpacePts); if (status != pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new InvalidOperationException("Projection depth to camera failed"); } } }
public Coord[] handLocation(PXCMImage.ImageData data,int size, int width) { Coord[] locations = new Coord[size]; IntPtr ptr = data.buffer.planes[0]; byte[] rgbValues = new byte[size]; Marshal.Copy(ptr, rgbValues, 0, size); int counter=0; int i; for (i = 0; i < size; i++) { if (rgbValues[i] == 0) { locations[counter++] = new Coord(i,width); } } //locations[counter] = -1; Array.Resize(ref locations, counter - 1); return locations; }
// Update is called once per frame void Update() { if (SenseToolkitManager.Instance == null) { return; } if (SenseToolkitManager.Instance.IsSenseOptionSet(SenseOption.SenseOptionID.VideoSegmentation)) { if (!SenseToolkitManager.Instance.Initialized) { return; } if (SenseToolkitManager.Instance.Initialized) { PXCMImage image = null; image = SenseToolkitManager.Instance.Image3DSegmentationOutput; SetTexture(image); } } }
/// <summary> /// Sets the texture for the associated game object with the given image /// </summary> /// <param name='image'> /// Image. /// </param> private void SetTexture(PXCMImage image) { if (image == null) { return; } if (_texture == null) { /* Save size and preallocate the Texture2D */ size.width = image.info.width; size.height = image.info.height; _texture = new Texture2D((int)size.width, (int)size.height, TextureFormat.ARGB32, false); /* Associate the texture to the game object */ //renderer.sharedMaterial.mainTexture = _texture; // Modified here. Added the texture to the canvas renderer GetComponent <CanvasRenderer>().SetMaterial(GetComponent <CanvasRenderer>().GetMaterial(), _texture); } /* Retrieve the image data */ PXCMImage.ImageData data; pxcmStatus sts = image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out data); if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR) { data.ToTexture2D(0, _texture); image.ReleaseAccess(data); /* and display it */ _texture.Apply(); } }
private void loadRgbIrDXyzPointRgb(PXCMImage colorImage) { byte[] rgb32Pixels = getRGB32Pixels(colorImage); if (rgb32Pixels != null) { int numOfPixels = this.rgb_ir_d_xyz_points.Length; for (int i = 0; i < numOfPixels; ++i) { rgb_ir_d_xyz_points[i].r = rgb32Pixels[4 * i + 2]; rgb_ir_d_xyz_points[i].g = rgb32Pixels[4 * i + 1]; rgb_ir_d_xyz_points[i].b = rgb32Pixels[4 * i + 0]; } } else { int numOfPixels = this.rgb_ir_d_xyz_points.Length; for (int i = 0; i < numOfPixels; ++i) { rgb_ir_d_xyz_points[i].r = 0; rgb_ir_d_xyz_points[i].g = 0; rgb_ir_d_xyz_points[i].b = 0; } } }
// Depth(距離)データを更新する private void UpdateDepthData(PXCMImage depthFrame) { if (depthFrame == null) { return; } // データを取得する PXCMImage.ImageData data; pxcmStatus ret = depthFrame.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, out data); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("Depth画像の取得に失敗"); } // Depthデータを取得する var info = depthFrame.QueryInfo(); depthBuffer = data.ToShortArray(0, info.width * info.height); // データを解放する depthFrame.ReleaseAccess(data); }
public void SimplePipeline() { //UtilMPipeline pp = null; MyUtilMPipeline pp = null; disconnected = false; bool sts = true; /* gesture */ /* Set Source */ //if (form.GetRecordState()) //{ //pp = new UtilMPipeline(form.GetRecordFile(), true); //pp.QueryCapture().SetFilter(form.GetCheckedDevice()); //} //else if (form.GetPlaybackState()) //{ //pp = new UtilMPipeline(form.GetPlaybackFile(), false); //} //else //{ //pp = new UtilMPipeline(); pp = new MyUtilMPipeline(); pp.SetForm(this); // pp.QueryCapture().SetFilter(form.GetCheckedDevice()); //} /* Set Module */ pp.EnableGesture(/*form.GetCheckedModule()*/); /* end egesture*/ /* speech recognition*/ /* Set Audio Source */ // pp.QueryCapture().SetFilter("Microphone Array (Creative GestureCam)"/*form.GetCheckedSource()*/); /* Set Module */ pp.EnableVoiceRecognition(/*"Voice Recognition (Nuance*)"*//*form.GetCheckedModule()*/); /* Set Language */ //pp.SetProfileIndex(form.GetCheckedLanguage()); // pp.SetProfileIndex(0); /* Set Command/Control or Dictation */ /* * if (form.IsCommandControl()) * { * string[] cmds = form.GetCommands(); * if (cmds == null) * { * form.PrintStatus("No Command List. Dictation instead."); * pp.SetVoiceDictation(); * } * else * {*/ pp.SetVoiceCommands(cmds); //pp.SetVoiceDictation(); /* * } * } * else * { * pp.SetVoiceDictation(); * }*/ /* end speech recognition*/ /* Initialization */ //form.UpdateStatus("Init Started"); bool initok = false; try { pp.Init(); initok = true; } catch (Exception e) { pp = null; System.Diagnostics.Debug.Print("Exception - Init Failed"); } if (initok) { // form.UpdateStatus("Streaming"); pp.QueryCapture().device.SetProperty(PXCMCapture.Device.Property.PROPERTY_AUDIO_MIX_LEVEL, 0.2f); while (!this.stop) { if (this.pause) { System.Threading.Thread.Sleep(50); } else { if (!pp.AcquireFrame(true)) { break; } if (!DisplayDeviceConnection(pp.IsDisconnected())) { /* Display Results */ PXCMGesture gesture = pp.QueryGesture(); PXCMImage depth = pp.QueryImage(PXCMImage.ImageType.IMAGE_TYPE_DEPTH); //DisplayPicture(depth, gesture); //DisplayGeoNodes(gesture); DisplayGesture(gesture); //form.UpdatePanel(); } pp.ReleaseFrame(); } } pp.Close(); // pp.Dispose(); } else { //form.UpdateStatus("Init Failed"); sts = false; } if (!sts) { System.Diagnostics.Debug.Print("Init Failed"); } //if (sts) form.UpdateStatus("Stopped"); }
/* Displaying Mask Images */ private unsafe void DisplayPicture(PXCMImage depth, PXCMBlobData blobData) { if (depth == null) return; PXCMImage image = depth; PXCMImage.ImageInfo info = image.QueryInfo(); int numOfBlobs = blobData.QueryNumberOfBlobs(); if (_maxBlobToShow > numOfBlobs) { _maxBlobToShow = numOfBlobs; } PXCMBlobData.IBlob[] blobList = new PXCMBlobData.IBlob[_maxBlobToShow]; PXCMPointI32[][] pointOuter = new PXCMPointI32[_maxBlobToShow][]; PXCMPointI32[][] pointInner = new PXCMPointI32[_maxBlobToShow][]; Bitmap picture = new Bitmap(image.info.width, image.info.height, PixelFormat.Format32bppRgb); PXCMImage.ImageData bdata; pxcmStatus results = pxcmStatus.PXCM_STATUS_NO_ERROR; PXCMBlobData.AccessOrderType accessOrder = PXCMBlobData.AccessOrderType.ACCESS_ORDER_LARGE_TO_SMALL; int accessOrderBy = form.GetAccessOrder(); switch (accessOrderBy) { case 1: accessOrder = PXCMBlobData.AccessOrderType.ACCESS_ORDER_NEAR_TO_FAR; break; case 2: accessOrder = PXCMBlobData.AccessOrderType.ACCESS_ORDER_RIGHT_TO_LEFT; break; case 0: default: accessOrder = PXCMBlobData.AccessOrderType.ACCESS_ORDER_LARGE_TO_SMALL; break; } Rectangle rect = new Rectangle(0, 0, image.info.width, image.info.height); BitmapData bitmapdata = picture.LockBits(rect, ImageLockMode.ReadWrite, picture.PixelFormat); for (int j = 0; j < _maxBlobToShow; j++) { byte tmp1 = (Byte)(255 - (255 / (_maxBlobToShow) * j)); results = blobData.QueryBlobByAccessOrder(j, accessOrder, out blobList[j]); if (results == pxcmStatus.PXCM_STATUS_NO_ERROR) { bool isSegmentationImage = true; results = blobList[j].QuerySegmentationImage(out image); if (results != pxcmStatus.PXCM_STATUS_NO_ERROR) { PXCMImage.ImageInfo imgInfo = new PXCMImage.ImageInfo(); imgInfo.width = 640; imgInfo.height = 480; imgInfo.format = PXCMImage.PixelFormat.PIXEL_FORMAT_Y8; PXCMSession session = PXCMSession.CreateInstance(); if (session != null) { image = session.CreateImage(info); if (image == null) return; } image.AcquireAccess(PXCMImage.Access.ACCESS_WRITE, PXCMImage.PixelFormat.PIXEL_FORMAT_Y8, out bdata); byte* numPtr = (byte*)bitmapdata.Scan0; //dst byte* numPtr2 = (byte*)bdata.planes[0]; //row int imagesize = image.info.width * image.info.height; byte tmp; for (int i = 0; i < imagesize; i++, numPtr += 4, numPtr2++) { tmp = (byte)(0); numPtr[0] = tmp; numPtr[1] = tmp; numPtr[2] = tmp; numPtr[3] = tmp; } image.ReleaseAccess(bdata); isSegmentationImage = false; } results = image.AcquireAccess(PXCMImage.Access.ACCESS_READ_WRITE, PXCMImage.PixelFormat.PIXEL_FORMAT_Y8, out bdata); if (form.GetBlobState() && isSegmentationImage == true) { byte* numPtr = (byte*)bitmapdata.Scan0; //dst byte* numPtr2 = (byte*)bdata.planes[0]; //row int imagesize = image.info.width * image.info.height; for (int i = 0; i < imagesize; i++, numPtr += 4, numPtr2++) { byte tmp = (Byte)(numPtr2[0] == 0 ? 0 : tmp1); tmp |= numPtr[0]; numPtr[0] = tmp; numPtr[1] = tmp; numPtr[2] = tmp; numPtr[3] = 0xff; } } if ((form.GetContourState())) { int contourNumber = blobList[j].QueryNumberOfContours(); if (contourNumber > 0) { for (int k = 0; k < contourNumber; ++k) { int contourSize = blobList[j].QueryContourSize(k); if (blobList[j].IsContourOuter(k) == true) blobList[j].QueryContourPoints(k, out pointOuter[j]); else { blobList[j].QueryContourPoints(k, out pointInner[j]); } } if (results == pxcmStatus.PXCM_STATUS_NO_ERROR && form.GetBlobState() == false) { byte* numPtr = (byte*)bitmapdata.Scan0; //dst byte* numPtr2 = (byte*)bdata.planes[0]; //row int imagesize = image.info.width * image.info.height; byte tmp; for (int i = 0; i < imagesize; i++, numPtr += 4, numPtr2++) { tmp = (byte)(0); numPtr[0] = tmp; numPtr[1] = tmp; numPtr[2] = tmp; numPtr[3] = tmp; } } } } image.ReleaseAccess(bdata); image.Dispose(); } } picture.UnlockBits(bitmapdata); form.DisplayBitmap(picture); ///////// that is my polygon zone Bitmap imageInstance2 = picture; i++; Bitmap croppedImage = null; if (contourMostRight.x > 0) { int rectWidth = (int)(contourMostRight.x - contourMostLeft.x); int rectHeight = (int)(contourMostTop.y - contourMostBottom.y); Rectangle sourceRectangle = new Rectangle(new Point((int)contourMostLeft.x, (int)contourMostBottom.y), new Size(rectWidth, rectHeight)); croppedImage = CropImage(imageInstance2, sourceRectangle); } String[] origArray = { "d:\\origG.jpeg", "d:\\origX.jpeg", "d:\\origY.jpeg" }; for (int i = 0; i < origArray.Length; i++) { Bitmap orig = null; if (File.Exists(origArray[i])) orig = new Bitmap(@origArray[i]); if (orig != null && croppedImage != null) { float diff = 0; orig = ScaleImage(orig, 150, 100); croppedImage = ScaleImage(croppedImage, 150, 100); bool isImageBlank = true; for (int y = 0; y < orig.Height; y++) { for (int x = 0; x < orig.Width; x++) { if(croppedImage.GetPixel(x, y).R > 1 && croppedImage.GetPixel(x, y).B > 1 && croppedImage.GetPixel(x, y).G > 1) { isImageBlank = false; break; } } } if (!isImageBlank && orig.Size.Width == croppedImage.Size.Width) { for (int y = 0; y < orig.Height; y++) { for (int x = 0; x < orig.Width; x++) { diff += (float)Math.Abs(orig.GetPixel(x, y).R - croppedImage.GetPixel(x, y).R) / 255; diff += (float)Math.Abs(orig.GetPixel(x, y).G - croppedImage.GetPixel(x, y).G) / 255; diff += (float)Math.Abs(orig.GetPixel(x, y).B - croppedImage.GetPixel(x, y).B) / 255; } } } float percentMatch = 100 * diff / (orig.Width * orig.Height * 2); Console.WriteLine("diff: {0} %", percentMatch); if (percentMatch >= 90){ Console.WriteLine(origArray[i].ToString()); Environment.Exit(0); } } } /* if (croppedImage != null) { croppedImage.Save("d:\\cropedImage.jpeg", System.Drawing.Imaging.ImageFormat.Jpeg); // croppedImage.Save("d:\\origG.jpeg", System.Drawing.Imaging.ImageFormat.Jpeg); }*/ if (i == 100) Environment.Exit(0); picture.Dispose(); for (int i = 0; i < _maxBlobToShow; i++) { if (form.GetContourState()) { if (pointOuter[i] != null && pointOuter[i].Length > 0) form.DisplayContour(pointOuter[i], i); if (pointInner[i] != null && pointInner[i].Length > 0) form.DisplayContour(pointInner[i], i); } if (form.GetBlobDataPointsState()) { form.DisplayBlobDataPoints(blobList[i], i + 1); } PXCMPoint3DF32 point = blobList[i].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CENTER); contourMostRight = blobList[i].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_LEFT_MOST); contourMostLeft = blobList[i].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_RIGHT_MOST); contourMostBottom = blobList[i].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_TOP_MOST); contourMostTop = blobList[i].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_BOTTOM_MOST); form.DisplayBlobNumber(point, i + 1); } }
private PXCMCapture.VideoStream.ProfileInfo GetConfiguration(PXCMImage.ColorFormat format) { var pinfo = new PXCMCapture.VideoStream.ProfileInfo { imageInfo = { format = format } }; if (((int)format & (int)PXCMImage.ImageType.IMAGE_TYPE_COLOR) != 0) { if (ColorImageProfile.Equals("1280 x 720")) { pinfo.imageInfo.width = 1280; pinfo.imageInfo.height = 720; } else if (ColorImageProfile.Equals("640 x 480")) { pinfo.imageInfo.width = 640; pinfo.imageInfo.height = 480; } pinfo.frameRateMin.numerator = 1; pinfo.frameRateMax.numerator = 25; pinfo.frameRateMin.denominator = pinfo.frameRateMax.denominator = 1; } else { pinfo.imageInfo.width = 320; pinfo.imageInfo.height = 240; pinfo.frameRateMin.numerator = 25; pinfo.frameRateMin.denominator = 1; } return pinfo; }
public void AdvancedPipeline() { PXCMSession session; pxcmStatus sts = PXCMSession.CreateInstance(out session); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { form.UpdateStatus("Failed to create an SDK session"); return; } /* Set Module */ PXCMSession.ImplDesc desc = new PXCMSession.ImplDesc(); desc.friendlyName.set(form.GetCheckedModule()); PXCMEmotion emotionDet; sts = session.CreateImpl<PXCMEmotion>(ref desc, PXCMEmotion.CUID, out emotionDet); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { form.UpdateStatus("Failed to create the emotionDet module"); session.Dispose(); return; } UtilMCapture capture = null; if (form.GetRecordState()) { capture = new UtilMCaptureFile(session, form.GetFileName(), true); capture.SetFilter(form.GetCheckedDevice()); } else if (form.GetPlaybackState()) { capture = new UtilMCaptureFile(session, form.GetFileName(), false); } else { capture = new UtilMCapture(session); capture.SetFilter(form.GetCheckedDevice()); } form.UpdateStatus("Pair moudle with I/O"); for (uint i = 0; ; i++) { PXCMEmotion.ProfileInfo pinfo; sts = emotionDet.QueryProfile(i, out pinfo); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break; sts = capture.LocateStreams(ref pinfo.inputs); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) continue; sts = emotionDet.SetProfile(ref pinfo); if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR) break; } if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { form.UpdateStatus("Failed to pair the emotionDet module with I/O"); capture.Dispose(); emotionDet.Dispose(); session.Dispose(); return; } form.UpdateStatus("Streaming"); PXCMImage[] images = new PXCMImage[PXCMCapture.VideoStream.STREAM_LIMIT]; PXCMScheduler.SyncPoint[] sps = new PXCMScheduler.SyncPoint[2]; while (!form.stop) { PXCMImage.Dispose(images); PXCMScheduler.SyncPoint.Dispose(sps); sts = capture.ReadStreamAsync(images, out sps[0]); if (DisplayDeviceConnection(sts == pxcmStatus.PXCM_STATUS_DEVICE_LOST)) continue; if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break; sts = emotionDet.ProcessImageAsync(images, out sps[1]); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break; PXCMScheduler.SyncPoint.SynchronizeEx(sps); sts=sps[0].Synchronize(); if (DisplayDeviceConnection(sts==pxcmStatus.PXCM_STATUS_DEVICE_LOST)) continue; if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break; /* Display Results */ DisplayPicture(capture.QueryImage(images,PXCMImage.ImageType.IMAGE_TYPE_COLOR)); DisplayLocation(emotionDet); form.UpdatePanel(); } PXCMImage.Dispose(images); PXCMScheduler.SyncPoint.Dispose(sps); capture.Dispose(); emotionDet.Dispose(); session.Dispose(); form.UpdateStatus("Stopped"); }
public void RunEmotionRecognition() { PXCMSession session; pxcmStatus sts = PXCMSession.CreateInstance(out session); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { Console.WriteLine("Failed to create an SDK session"); return; } // Set Module // PXCMSession.ImplDesc desc = new PXCMSession.ImplDesc(); desc.friendlyName.set(moduleName); PXCMEmotion emotionDet; sts = session.CreateImpl<PXCMEmotion>(ref desc, PXCMEmotion.CUID, out emotionDet); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { Console.WriteLine("Failed to create the emotionDet module"); session.Dispose(); return; } UtilMCapture capture = null; capture = new UtilMCapture(session); capture.SetFilter(captureDeviceName); Console.WriteLine("Pair moudle with I/O"); for (uint i = 0; ; i++) { PXCMEmotion.ProfileInfo pinfo; sts = emotionDet.QueryProfile(i, out pinfo); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break; sts = capture.LocateStreams(ref pinfo.inputs); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) continue; sts = emotionDet.SetProfile(ref pinfo); if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR) break; } if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { Console.WriteLine("Failed to pair the emotionDet module with I/O"); capture.Dispose(); emotionDet.Dispose(); session.Dispose(); return; } Console.WriteLine("Streaming"); PXCMImage[] images = new PXCMImage[PXCMCapture.VideoStream.STREAM_LIMIT]; PXCMScheduler.SyncPoint[] sps = new PXCMScheduler.SyncPoint[2]; while (!_shouldStop) { PXCMImage.Dispose(images); PXCMScheduler.SyncPoint.Dispose(sps); sts = capture.ReadStreamAsync(images, out sps[0]); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break; sts = emotionDet.ProcessImageAsync(images, out sps[1]); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break; PXCMScheduler.SyncPoint.SynchronizeEx(sps); sts = sps[0].Synchronize(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break; // Display Results // GetEmoData(emotionDet); Thread.Sleep(500); } PXCMImage.Dispose(images); PXCMScheduler.SyncPoint.Dispose(sps); capture.Dispose(); emotionDet.Dispose(); session.Dispose(); Console.WriteLine("Stopped"); }
public override void OnImage(PXCMImage image) { session = QuerySession(); image.QueryBitmap(session, out lastProcessedBitmap); using (Graphics drawer = Graphics.FromImage(lastProcessedBitmap)) { if (locationStatus != pxcmStatus.PXCM_STATUS_ITEM_UNAVAILABLE) { drawer.DrawRectangle(new Pen(new SolidBrush(Color.Red), 1), new Rectangle(new Point((int)faceLocationData.rectangle.x, (int)faceLocationData.rectangle.y), new Size((int)faceLocationData.rectangle.w, (int)faceLocationData.rectangle.h))); } } oldFacePosition = FacePosition(); AddFacePositionToSmooth(new Point((int)faceLandmarkData[6].position.x, (int)faceLandmarkData[6].position.y)); facePosition = FacePosition(); int xDiffAnt = xDiff; int yDiffAnt = yDiff; xDiff = (oldFacePosition.X - facePosition.X); yDiff = (oldFacePosition.Y - facePosition.Y); double dist = Math.Sqrt(xDiff * xDiff + yDiff * yDiff); if ((int)dist >= MOUSE_ARRAY.Length) { dist = MOUSE_ARRAY.Length - 1; } currentCursorSmooth = 10; cursorPosition.X += xDiff * (MOUSE_ARRAY[(int)dist]); cursorPosition.Y -= yDiff * (MOUSE_ARRAY[(int)dist]); //Off limit screen correction if (cursorPosition.X > SystemInformation.VirtualScreen.Width) { cursorPosition.X = SystemInformation.VirtualScreen.Width; } else if (cursorPosition.X < 0) { cursorPosition.X = 0; } if (cursorPosition.Y > SystemInformation.VirtualScreen.Height) { cursorPosition.Y = SystemInformation.VirtualScreen.Height; } else if (cursorPosition.Y < 0) { cursorPosition.Y = 0; } AddCursorPositionToSmooth(cursorPosition); if (parent.isRunning) { Cursor.Position = CursorPosition(); } //Drawing stuff DrawCircle(lastProcessedBitmap, facePosition, 2, Color.Red); //Show main image recipient.Image = lastProcessedBitmap; }
private void FaceTrackingPipeline() { IsDispose = false; OnStart?.Invoke(this, null); #region Manager Init realSenseManager = RealSenseObjects.Session.CreateSenseManager(); if (realSenseManager == null) { MessageBox.Show( "PXCMSenseManager初始化失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } PXCMCaptureManager captureManager = realSenseManager.captureManager; if (captureManager == null) { MessageBox.Show( "PXCMCaptureManager初始化失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } #endregion #region 基本設定 //設定裝置 captureManager.FilterByDeviceInfo(Form.SelectedDevice); //設定串流類型 captureManager.FilterByStreamProfiles(Form.SelectedDeviceStreamProfile); //啟用臉部追蹤模組 realSenseManager.EnableFace(); PXCMFaceModule faceModule = realSenseManager.QueryFace(); if (faceModule == null) { MessageBox.Show( "取得PXCMFaceModule失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } //建立臉部追蹤模組設定 moduleConfiguration = faceModule.CreateActiveConfiguration(); if (moduleConfiguration == null) { MessageBox.Show( "建立PXCMFaceConfiguration失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } //追蹤模式設定 moduleConfiguration.SetTrackingMode(Form.ModeType); moduleConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT; moduleConfiguration.detection.isEnabled = true; moduleConfiguration.detection.maxTrackedFaces = 4;//最大追蹤4個臉 moduleConfiguration.landmarks.isEnabled = false; moduleConfiguration.pose.isEnabled = false; recognitionConfig = moduleConfiguration.QueryRecognition(); if (recognitionConfig == null) { MessageBox.Show( "建立RecognitionConfiguration失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } recognitionConfig.Enable(); #endregion #region 讀取資料庫數據 if (Form.FaceData != null) { recognitionConfig.SetDatabase(Form.FaceData); moduleConfiguration.ApplyChanges(); } #endregion #region 預備啟動 moduleConfiguration.EnableAllAlerts(); //moduleConfiguration.SubscribeAlert(FaceAlertHandler); pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges(); Form.SetStatus("RealSenseManager初始化中"); if (applyChangesStatus.IsError() || realSenseManager.Init().IsError()) { MessageBox.Show( "RealSenseManager初始化失敗,請檢查設定正確。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } #endregion using (moduleOutput = faceModule.CreateOutput()) { PXCMCapture.Device.StreamProfileSet profiles; PXCMCapture.Device device = captureManager.QueryDevice(); if (device == null) { MessageBox.Show( "取得設備失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles); #region Loop while (!_Stop) { while (_Paush) { Application.DoEvents(); } if (realSenseManager.AcquireFrame(true).IsError()) { break; } var isConnected = realSenseManager.IsConnected(); if (isConnected) { var sample = realSenseManager.QueryFaceSample(); if (sample == null) { realSenseManager.ReleaseFrame(); continue; } #region 畫面取出 PXCMImage image = null; if (Form.ModeType == PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR) { image = sample.ir; } else { image = sample.color; } #endregion moduleOutput.Update();//更新辨識 PXCMFaceConfiguration.RecognitionConfiguration recognition = moduleConfiguration.QueryRecognition(); if (recognition == null) { realSenseManager.ReleaseFrame(); continue; } #region 繪圖與事件 OnFrame?.Invoke(this, new FaceRecognitionEventArgs() { Image = ToBitmap(image) }); FindFace(moduleOutput); #endregion } //發布框 realSenseManager.ReleaseFrame(); } #endregion //更新資料庫緩衝區 //Buffer = moduleOutput.QueryRecognitionModule().GetDatabaseBuffer(); } #region 釋放資源 moduleConfiguration.Dispose(); realSenseManager.Close(); realSenseManager.Dispose(); #endregion IsDispose = true; OnStop?.Invoke(this, null); }
pxcmStatus newHandFrame(PXCMHandModule hand) { if (hand != null) { PXCMHandData handData = hand.CreateOutput(); handData.Update(); PXCMHandData.IHand iHandDataLeft = null, iHandDataRight = null; PXCMHandData.JointData jointData = null; PXCMImage image = null; handData.QueryHandData(PXCMHandData.AccessOrderType.ACCESS_ORDER_LEFT_HANDS, 0, out iHandDataLeft); handData.QueryHandData(PXCMHandData.AccessOrderType.ACCESS_ORDER_RIGHT_HANDS, 0, out iHandDataRight); if (handForm != null && !handForm.IsDisposed) { this.handForm.HandCount = handData.QueryNumberOfHands(); if (iHandDataLeft != null) { iHandDataLeft.QuerySegmentationImage(out image); if (image != null) { PXCMImage.ImageData data = new PXCMImage.ImageData(); image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out data); handForm.LeftHand = data.ToBitmap(0, image.info.width, image.info.height); image.ReleaseAccess(data); } } if (iHandDataRight != null) { iHandDataRight.QuerySegmentationImage(out image); if (image != null) { PXCMImage.ImageData data = new PXCMImage.ImageData(); image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out data); handForm.RightHand = data.ToBitmap(0, image.info.width, image.info.height); image.ReleaseAccess(data); } } } if (iHandDataLeft != null) { if (jointData == null) { iHandDataLeft.QueryTrackedJoint(PXCMHandData.JointType.JOINT_INDEX_TIP, out jointData); } } if (iHandDataRight != null) { if (jointData == null) { iHandDataRight.QueryTrackedJoint(PXCMHandData.JointType.JOINT_INDEX_TIP, out jointData); } } if (jointData != null && canTrack.Checked) { Cursor.Position = new System.Drawing.Point( (int)((640.0f - jointData.positionImage.x) * Screen.PrimaryScreen.Bounds.Width / 640.0f), (int)(jointData.positionImage.y * Screen.PrimaryScreen.Bounds.Height / 480.0f)); PXCMHandData.GestureData gestureData = null; if (handData.IsGestureFired("two_fingers_pinch_open", out gestureData)) { Program.DoMouseClick(); } Console.WriteLine("Z Position: " + jointData.positionWorld.z); } handData.Dispose(); } return(pxcmStatus.PXCM_STATUS_NO_ERROR); }
/* Displaying Depth/Mask Images - for depth image only we use a delay of NumberOfFramesToDelay to sync image with tracking */ private unsafe void DisplayPicture(PXCMImage depth, PXCMHandData handAnalysis) { if (depth == null) return; PXCMImage image = depth; //Mask Image if (form.GetLabelmapState()) { Bitmap labeledBitmap = null; try { labeledBitmap = new Bitmap(image.info.width, image.info.height, PixelFormat.Format32bppRgb); for (int j = 0; j < handAnalysis.QueryNumberOfHands(); j++) { int id; PXCMImage.ImageData data; handAnalysis.QueryHandId(PXCMHandData.AccessOrderType.ACCESS_ORDER_BY_TIME, j, out id); //Get hand by time of appearance PXCMHandData.IHand handData; handAnalysis.QueryHandData(PXCMHandData.AccessOrderType.ACCESS_ORDER_BY_TIME, j, out handData); if (handData != null && (handData.QuerySegmentationImage(out image) >= pxcmStatus.PXCM_STATUS_NO_ERROR)) { if (image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_Y8, out data) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { Rectangle rect = new Rectangle(0, 0, image.info.width, image.info.height); BitmapData bitmapdata = labeledBitmap.LockBits(rect, ImageLockMode.ReadWrite, labeledBitmap.PixelFormat); byte* numPtr = (byte*) bitmapdata.Scan0; //dst byte* numPtr2 = (byte*) data.planes[0]; //row int imagesize = image.info.width*image.info.height; byte num2 = (byte) handData.QueryBodySide(); byte tmp = 0; for (int i = 0; i < imagesize; i++, numPtr += 4, numPtr2++) { tmp = (byte) (LUT[numPtr2[0]]*num2*100); numPtr[0] = (Byte) (tmp | numPtr[0]); numPtr[1] = (Byte) (tmp | numPtr[1]); numPtr[2] = (Byte) (tmp | numPtr[2]); numPtr[3] = 0xff; } labeledBitmap.UnlockBits(bitmapdata); image.ReleaseAccess(data); } } } if (labeledBitmap != null) { form.DisplayBitmap(labeledBitmap); labeledBitmap.Dispose(); } image.Dispose(); } catch (Exception) { if (labeledBitmap != null) { labeledBitmap.Dispose(); } if (image != null) { image.Dispose(); } } }//end label image //Depth Image else { //collecting 3 images inside a queue and displaying the oldest image PXCMImage.ImageInfo info; PXCMImage image2; info = image.QueryInfo(); image2 = form.g_session.CreateImage(info); if (image2 == null) { return; } image2.CopyImage(image); m_images.Enqueue(image2); if (m_images.Count == NumberOfFramesToDelay) { Bitmap depthBitmap; try { depthBitmap = new Bitmap(image.info.width, image.info.height, PixelFormat.Format32bppRgb); } catch (Exception) { image.Dispose(); PXCMImage queImage = m_images.Dequeue(); queImage.Dispose(); return; } PXCMImage.ImageData data3; PXCMImage image3 = m_images.Dequeue(); if (image3.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, out data3) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { float fMaxValue = _maxRange; byte cVal; Rectangle rect = new Rectangle(0, 0, image.info.width, image.info.height); BitmapData bitmapdata = depthBitmap.LockBits(rect, ImageLockMode.ReadWrite, depthBitmap.PixelFormat); byte* pDst = (byte*)bitmapdata.Scan0; short* pSrc = (short*)data3.planes[0]; int size = image.info.width * image.info.height; for (int i = 0; i < size; i++, pSrc++, pDst += 4) { cVal = (byte)((*pSrc) / fMaxValue * 255); if (cVal != 0) cVal = (byte)(255 - cVal); pDst[0] = cVal; pDst[1] = cVal; pDst[2] = cVal; pDst[3] = 255; } try { depthBitmap.UnlockBits(bitmapdata); } catch (Exception) { image3.ReleaseAccess(data3); depthBitmap.Dispose(); image3.Dispose(); return; } form.DisplayBitmap(depthBitmap); image3.ReleaseAccess(data3); } depthBitmap.Dispose(); image3.Dispose(); } } }
public byte[] ColorToDepthCoordinatesByInvUVMap(PXCMImage color, PXCMImage depth, int dots, out int cwidth, out int cheight) { /* Retrieve the color pixels */ byte[] cpixels = color.GetRGB32Pixels(out cwidth, out cheight); if (projection == null || cpixels == null) return cpixels; if (dots >= 9) { // A sample for CreateDepthImageMappedToColor output visualization PXCMImage.ImageData d2cDat; PXCMImage d2c = projection.CreateDepthImageMappedToColor(depth, color); if (d2c == null) { return cpixels; } UInt16[] d2cpixels; if (d2c.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, out d2cDat) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { Int32 d2cwidth = d2cDat.pitches[0] / sizeof(Int16); /* aligned width */ Int32 d2cheight = (Int32)d2c.info.height; d2cpixels = d2cDat.ToUShortArray(0, d2cwidth * d2cheight); for (Int32 y = 0; y < cheight; y++) { for (Int32 x = 0; x < cwidth; x++) { if (d2cpixels[y * d2cwidth + x] == invalid_value) continue; // no mapping based on unreliable depth values cpixels[(y * cwidth + x) * 4] = 0xFF; } } d2c.ReleaseAccess(d2cDat); } d2c.Dispose(); return cpixels; } /* Retrieve the depth pixels and uvmap */ PXCMImage.ImageData ddata; Int16[] dpixels; if (depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, out ddata) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { Int32 dpitch = ddata.pitches[0] / sizeof(Int16); /* aligned width */ Int32 dwidth = (Int32)depth.info.width; Int32 dheight = (Int32)depth.info.height; dpixels = ddata.ToShortArray(0, dpitch * dheight); pxcmStatus sts = projection.QueryInvUVMap(depth, invuvmap); Int32 invuvpitch = color.QueryInfo().width; depth.ReleaseAccess(ddata); if (dots > 1) { // If Depth data is valid just set a blue pixel /* Draw dots onto the color pixels */ for (Int32 y = 0; y < cheight; y++) { for (Int32 x = 0; x < cwidth; x++) { Int32 xx = (Int32)(invuvmap[y * cwidth + x].x * dwidth); Int32 yy = (Int32)(invuvmap[y * cwidth + x].y * dheight); if (xx >= 0 && yy >= 0) { if (dpixels[yy * dpitch + xx] > 0) { cpixels[(y * cwidth + x) * 4] = 0xFF; } } } } } else { // If Depth data is valid just set a blue pixel with briteness depends on Depth value Int32 MAX_LOCAL_DEPTH_VALUE = 4000; Int32[] depth_hist = new Int32[MAX_LOCAL_DEPTH_VALUE]; Array.Clear(depth_hist, 0, depth_hist.Length); Int32 num_depth_points = 0; for (Int32 y = 0; y < dheight; y++) { for (Int32 x = 0; x < dwidth; x++) { Int16 d = dpixels[y * dpitch + x]; if (d > 0 && d < MAX_LOCAL_DEPTH_VALUE) { depth_hist[d]++; num_depth_points++; } } } if (num_depth_points > 0) { for (Int32 i = 1; i < MAX_LOCAL_DEPTH_VALUE; i++) { depth_hist[i] += depth_hist[i - 1]; } for (Int32 i = 1; i < MAX_LOCAL_DEPTH_VALUE; i++) { depth_hist[i] = 255 - (Int32)((float)255 * (float)depth_hist[i] / (float)num_depth_points); } /* Draw dots onto the color pixels */ for (Int32 y = 0; y < cheight; y++) { for (Int32 x = 0; x < cwidth; x++) { Int32 xx = (Int32)(invuvmap[y * cwidth + x].x * dwidth); Int32 yy = (Int32)(invuvmap[y * cwidth + x].y * dheight); if (xx >= 0 && yy >= 0) { Int16 d = dpixels[yy * dpitch + xx]; if (d > 0 && d < MAX_LOCAL_DEPTH_VALUE) { cpixels[(y * cwidth + x) * 4] = (byte)depth_hist[d]; } } } } } } } return cpixels; }
public RenderFrameEventArgs(int index, PXCMImage image) { this.index = index; this.image = image; }
/* Given a full frame, this isolates the pixels that actually * contain the hand in question. It then returns the pixel locations * for this hand. * */ public void handLocation( ref Coord[] locations,PXCMImage.ImageData data, int width) { //Load the depth data from the frame. IntPtr ptr = data.buffer.planes[0]; //Initilize the byte array for the bitmap. byte[] rgbValues = new byte[locations.Length]; //Copy the passed in image data into byte array. Marshal.Copy(ptr, rgbValues, 0, locations.Length); //Loop through the values indetifying the hand values. int counter=0; int i; for (i = 0; i < locations.Length; i++) { if (rgbValues[i] == 0 ) { locations[counter++] = new Coord(i,width); } } //Resize the array to the smaller size. if(counter >1) Array.Resize(ref locations, counter - 1); return; }
private void UpdateBlobImage(PXCMImage depthFrame) { if (depthFrame == null) { return; } // Blobを更新する var sts = blobData.Update(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } // Blobのための画像オブジェクトを作成する var depthInfo = depthFrame.QueryInfo(); depthInfo.format = PXCMImage.PixelFormat.PIXEL_FORMAT_Y8; var session = senseManager.QuerySession(); var blobImage = session.CreateImage(depthInfo); // 表示用画像を初期化する Array.Clear(imageBuffer, 0, imageBuffer.Length); CanvasHandParts.Children.Clear(); // Blobを取得する int numOfBlobs = blobData.QueryNumberOfBlobs(); for (int i = 0; i < numOfBlobs; ++i) { // Blobデータを取得する PXCMBlobData.IBlob blob; sts = blobData.QueryBlobByAccessOrder(i, PXCMBlobData.AccessOrderType.ACCESS_ORDER_NEAR_TO_FAR, out blob); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { continue; } sts = blob.QuerySegmentationImage(out blobImage); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { continue; } // Blob画像を取得する PXCMImage.ImageData data; sts = blobImage.AcquireAccess(PXCMImage.Access.ACCESS_READ, depthInfo.format, out data); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { continue; } // データをコピーする var buffer = data.ToByteArray(0, data.pitches[0] * depthInfo.height); for (int j = 0; j < depthInfo.height * depthInfo.width; ++j) { if (buffer[j] != 0) { imageBuffer[j] = (byte)((i + 1) * 64); } } // Blob画像を解放する blobImage.ReleaseAccess(data); // Blobの輪郭を表示する UpdateContoursImage(blob, i); } // Blob画像オブジェクトを解放する blobImage.Dispose(); // ピクセルデータを更新する imageBitmap.WritePixels(imageRect, imageBuffer, DEPTH_WIDTH * BYTE_PER_PIXEL, 0); }
private void DisplayPicture(PXCMImage image) { PXCMImage.ImageData data; if (image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.ColorFormat.COLOR_FORMAT_RGB32, out data) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { form.DisplayBitmap(data.ToBitmap(image.info.width, image.info.height)); image.ReleaseAccess(ref data); } }
private void DisplayPicture(PXCMImage image) { PXCMImage.ImageData data; pxcmStatus sts = image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out data); if ( sts >= pxcmStatus.PXCM_STATUS_NO_ERROR) { form.DisplayBitmap(data.ToBitmap(0, image.info.width, image.info.height)); timer.Tick(""); image.ReleaseAccess(data); } }
public RealSenseEventArgs(PXCMImage.ImageData source, PXCMImage.ImageInfo info) : this() { this.source = source; this.info = info; }
/// <summary> /// Aligns the rgb and depth image, calculates the aligned ROI and pushes it through the pipeline. /// </summary> /// <param name="depth"></param> /// <param name="depthImage"></param> /// <param name="colorImage"></param> private void PushAlignedRgbAndDepthImageROI(PXCMImage depth, Image<Gray, float> depthImage, Image<Rgb, byte> colorImage) { /* Get UV map */ var uvMapImage = Senz3DUtils.GetDepthUvMap(depth); /* Get RgbOfDepth */ Senz3DUtils.GetRgbOfDepthPixels(depthImage, colorImage, uvMapImage, true, ref _rgbInDepthROI); Stage(new ROI(this, "rgbInDepthROI") { RoiRectangle = _rgbInDepthROI }); Push(); LogFormat("Identified rgbInDepthROI as {0}", _rgbInDepthROI); }
public static byte[] GetRGB32Pixels(PXCMImage image, out int cwidth, out int cheight) { PXCMImage.ImageData cdata; byte[] cpixels = null; cwidth = cheight = 0; if (image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out cdata) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { cwidth = cdata.pitches[0] / sizeof(Int32); cheight = image.info.height; cpixels = cdata.ToByteArray(0, cdata.pitches[0] * cheight); image.ReleaseAccess(cdata); } return cpixels; }
public byte[] DepthToColorCoordinatesByFunction(PXCMImage color, PXCMImage depth, int dots, out int cwidth, out int cheight) { /* Retrieve the color pixels */ byte[] cpixels = color.GetRGB32Pixels(out cwidth, out cheight); if (projection == null || cpixels == null) return cpixels; /* Retrieve the depth pixels and uvmap */ PXCMImage.ImageData ddata; UInt16[] dpixels; bool isdepth = (depth.info.format == PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH); if (depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, out ddata) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { Int32 dpitch = ddata.pitches[0] / sizeof(Int16); /* aligned width */ Int32 dwidth = (Int32)depth.info.width; Int32 dheight = (Int32)depth.info.height; dpixels = ddata.ToUShortArray(0, isdepth ? dpitch * dheight : dpitch * dheight * 3); depth.ReleaseAccess(ddata); /* Projection Calculation */ PXCMPoint3DF32[] dcords = new PXCMPoint3DF32[dwidth * dheight]; for (Int32 y = 0, k = 0; y < dheight; y++) { for (Int32 x = 0; x < dwidth; x++, k++) { dcords[k].x = x; dcords[k].y = y; dcords[k].z = isdepth ? dpixels[y * dpitch + x] : dpixels[3 * (y * dpitch + x) + 2]; } } PXCMPointF32[] ccords = new PXCMPointF32[dwidth * dheight]; projection.MapDepthToColor(dcords, ccords); /* Draw dots onto the color pixels */ for (Int32 y = 0, k = 0; y < dheight; y++) { for (Int32 x = 0; x < dwidth; x++, k++) { UInt16 d = isdepth ? dpixels[y * dpitch + x] : dpixels[3 * (y * dpitch + x) + 2]; if (d == invalid_value) continue; // no mapping based on unreliable depth values Int32 xx = (Int32)ccords[k].x, yy = (Int32)ccords[k].y; PlotXY(cpixels, xx, yy, cwidth, cheight, dots, 2); } } } return cpixels; }
public override bool Process(Trigger trigger) { trigger.ErrorDetected = false; if (!SenseToolkitManager.Instance.IsSenseOptionSet(SenseOption.SenseOptionID.VideoDepthStream)) { trigger.ErrorDetected = true; Debug.LogError("Blob Analysis Module Not Set"); return(false); } if (!(trigger is TrackTrigger)) { trigger.ErrorDetected = true; return(false); } bool success = false; // make sure we have valid values if (RealWorldBoxDimensions.x <= 0) { RealWorldBoxDimensions.x = 1; } if (RealWorldBoxDimensions.y <= 0) { RealWorldBoxDimensions.y = 1; } if (RealWorldBoxDimensions.z <= 0) { RealWorldBoxDimensions.z = 1; } if (SenseToolkitManager.Instance != null && SenseToolkitManager.Instance.Initialized && SenseToolkitManager.Instance.BlobExtractor != null && SenseToolkitManager.Instance.ImageDepthOutput != null) { // Setting max distance for this rule and process the image PXCMBlobExtractor.BlobData _blobData = new PXCMBlobExtractor.BlobData(); SenseToolkitManager.Instance.BlobExtractor.SetMaxDistance(MaxDistance * 10); var sts = SenseToolkitManager.Instance.BlobExtractor.ProcessImage(SenseToolkitManager.Instance.ImageDepthOutput); if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR && SenseToolkitManager.Instance.BlobExtractor.QueryNumberOfBlobs() > 0) { if (BlobIndex >= SenseToolkitManager.Instance.BlobExtractor.QueryNumberOfBlobs()) { return(false); } PXCMImage.ImageInfo info = SenseToolkitManager.Instance.ImageDepthOutput.QueryInfo(); info.format = PXCMImage.PixelFormat.PIXEL_FORMAT_Y8; PXCMImage new_image = SenseToolkitManager.Instance.SenseManager.session.CreateImage(info); // Process Tracking SenseToolkitManager.Instance.BlobExtractor.QueryBlobData(BlobIndex, new_image, out _blobData); new_image.Dispose(); TrackTrigger specificTrigger = (TrackTrigger)trigger; PXCMPointI32 trackedPoint = BlobUtilityClass.GetTrackedPoint(_blobData, BlobPointToTrack); PXCMImage.ImageData data; SenseToolkitManager.Instance.ImageDepthOutput.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_F32, out data); if (_depthArray == null) { _depthArray = new float[SenseToolkitManager.Instance.ImageDepthOutput.info.width * SenseToolkitManager.Instance.ImageDepthOutput.info.height]; } data.ToFloatArray(0, _depthArray); float depth = _depthArray[(int)trackedPoint.x + (int)trackedPoint.y * SenseToolkitManager.Instance.ImageDepthOutput.info.width]; if (_pos_uvz == null) { _pos_uvz = new PXCMPoint3DF32[1] { new PXCMPoint3DF32() }; } _pos_uvz[0].x = trackedPoint.x; _pos_uvz[0].y = trackedPoint.y; _pos_uvz[0].z = depth; if (_pos3d == null) { _pos3d = new PXCMPoint3DF32[1] { new PXCMPoint3DF32() }; } SenseToolkitManager.Instance.Projection.ProjectDepthToCamera(_pos_uvz, _pos3d); Vector3 position = new Vector3(); position.x = -_pos3d[0].x / 10; position.y = _pos3d[0].y / 10; position.z = _pos3d[0].z / 10; SenseToolkitManager.Instance.ImageDepthOutput.ReleaseAccess(data); TrackingUtilityClass.ClampToRealWorldInputBox(ref position, RealWorldBoxCenter, RealWorldBoxDimensions); TrackingUtilityClass.Normalize(ref position, RealWorldBoxCenter, RealWorldBoxDimensions); if (!float.IsNaN(position.x) && !float.IsNaN(position.y) && !float.IsNaN(position.z)) { specificTrigger.Position = position; } else { return(false); } success = true; } } else { return(false); } return(success); }
public byte[] DepthToColorCoordinatesByUVMAP(PXCMImage color, PXCMImage depth, int dots, out int cwidth, out int cheight) { /* Retrieve the color pixels */ byte[] cpixels = color.GetRGB32Pixels(out cwidth, out cheight); if (cpixels == null) return cpixels; /* Retrieve the depth pixels and uvmap */ PXCMImage.ImageData ddata; UInt16[] dpixels; // float[] uvmap; bool isdepth = (depth.info.format == PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH); if (depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, out ddata) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { Int32 dpitch = ddata.pitches[0] / sizeof(short); /* aligned width */ Int32 dwidth = (Int32)depth.info.width; Int32 dheight = (Int32)depth.info.height; dpixels = ddata.ToUShortArray(0, isdepth ? dpitch * dheight : dpitch * dheight * 3); projection.QueryUVMap(depth, uvmap); int uvpitch = depth.QueryInfo().width; depth.ReleaseAccess(ddata); /* Draw dots onto the color pixels */ for (Int32 y = 0; y < dheight; y++) { for (Int32 x = 0; x < dwidth; x++) { UInt16 d = isdepth ? dpixels[y * dpitch + x] : dpixels[3 * (y * dpitch + x) + 2]; if (d == invalid_value) continue; // no mapping based on unreliable depth values float uvx = uvmap[y * uvpitch + x].x, uvy = uvmap[y * uvpitch + x].y; Int32 xx = (Int32)(uvx * cwidth), yy = (Int32)(uvy * cheight); PlotXY(cpixels, xx, yy, cwidth, cheight, dots, 1); } } } return cpixels; }
public void Start() { //create session PXCMSession session; pxcmStatus status = PXCMSession.CreateInstance(out session); if (IsError(status)) { OnError(CamEvent.FAILED_TO_CREATE_SDK_SESSION); return; } //create gesture-module PXCMBase gestureBase; status = session.CreateImpl(PXCMGesture.CUID, out gestureBase); if (IsError(status)) { OnError(CamEvent.FAILED_TO_LOAD_GESTURE_RECOGNITION); session.Dispose(); return; } //create gesture-profile PXCMGesture gesture = (PXCMGesture)gestureBase; PXCMGesture.ProfileInfo profileInfo; status = gesture.QueryProfile(0, out profileInfo); profileInfo.activationDistance = 70; //setup gesture-capture UtilMCapture capture = new UtilMCapture(session); status = capture.LocateStreams(ref profileInfo.inputs); if (IsError(status)) { OnError(CamEvent.FAILED_TO_LOCATE_CAPTURE_MODULE); gesture.Dispose(); capture.Dispose(); session.Dispose(); return; } status = gesture.SetProfile(ref profileInfo); status = gesture.SubscribeAlert(this.OnAlertHandler); status = gesture.SubscribeGesture(100, this.OnGesureHandler); //start capture of frames bool device_lost = false; PXCMImage[] images = new PXCMImage[PXCMCapture.VideoStream.STREAM_LIMIT]; PXCMScheduler.SyncPoint[] syncPoints = new PXCMScheduler.SyncPoint[2]; while (_tracking) { status = capture.ReadStreamAsync(images, out syncPoints[0]); if (IsError(status)) { if (status == pxcmStatus.PXCM_STATUS_DEVICE_LOST) { if (!device_lost) { OnError(CamEvent.DEVICE_DISCONNECTED); } device_lost = true; continue; } OnError(CamEvent.DEVICE_FAILED); break; } if (device_lost) { OnNotify(CamEvent.DEVICE_RECONNECTED); device_lost = false; } status = gesture.ProcessImageAsync(images, out syncPoints[1]); if (IsError(status)) { break; } PXCMScheduler.SyncPoint.SynchronizeEx(syncPoints); if (syncPoints[0].Synchronize(0) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { PXCMGesture.GeoNode data; status = gesture.QueryNodeData(0, PXCMGesture.GeoNode.Label.LABEL_BODY_HAND_PRIMARY, out data); if (!IsError(status)) { if (ShapeHelper.IsPointInsideRect(data.positionImage.x, data.positionImage.y, Constants.FoVWindow)) { //adjust the point to field-of-view window Point cameraPoint = new Point(data.positionImage.x - Constants.FoVWindow.X, data.positionImage.y - Constants.FoVWindow.Y); //cameraPoint = ShapeHelper.RotatePoint(cameraPoint, Constants.FoVCenter, Constants.RotationAngle); OnMovement(cameraPoint); if (data.opennessState != _previousOpenness) { OnOpenClose(data.opennessState, data.openness); _previousOpenness = data.opennessState; } } else { OnNotify(CamEvent.HOVERING_OUTSIDE); } } } foreach (PXCMScheduler.SyncPoint p in syncPoints) { if (p != null) { p.Dispose(); } } foreach (PXCMImage img in images) { if (img != null) { img.Dispose(); } } } if (gesture != null) { gesture.Dispose(); } if (capture != null) { capture.Dispose(); } if (session != null) { session.Dispose(); } }
private static string PixelFormat2String(PXCMImage.PixelFormat format) { switch (format) { case PXCMImage.PixelFormat.PIXEL_FORMAT_YUY2: return "YUY2"; case PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32: return "RGB32"; case PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24: return "RGB24"; } return "NA"; }
public Bitmap ConvertImageToBitmap(PXCMImage image) { PXCMImage.ImageData data; image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.ColorFormat.COLOR_FORMAT_RGB32, out data); Bitmap bitmap = new Bitmap((int)image.imageInfo.width, (int)image.imageInfo.height, data.buffer.pitches[0], PixelFormat.Format32bppRgb, data.buffer.planes[0]); image.ReleaseAccess(ref data); return bitmap; }
public static string ToString(this PXCMImage.PixelFormat format) { return(PXCMImage.PixelFormatToString(format)); }
private void DisplayPicture(PXCMImage depth, PXCMGesture gesture) { PXCMImage image = depth; bool dispose = false; if (form.GetLabelmapState()) { if (gesture.QueryBlobImage(PXCMGesture.Blob.Label.LABEL_SCENE,0,out image)<pxcmStatus.PXCM_STATUS_NO_ERROR) return; dispose = true; } PXCMImage.ImageData data; if (image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.ColorFormat.COLOR_FORMAT_RGB32, out data) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { form.DisplayBitmap(data.ToBitmap(image.info.width,image.info.height)); image.ReleaseAccess(ref data); } if (dispose) image.Dispose(); }
public void StreamColorDepth(String scanType) /* Stream Color and Depth Synchronously or Asynchronously */ { bool sts = true; PXCM3DScan.Configuration scan_config = new PXCM3DScan.Configuration(); String statusString; /* Create an instance of the PXCSenseManager interface */ PXCMSenseManager pp = PXCMSenseManager.CreateInstance(); if (pp == null) { form.UpdateStatus("Failed to create sense manager"); return; } if (pp.captureManager == null) { form.UpdateStatus("Capture manager does not exist"); return; } if (!form.IsModeLive()) { pp.captureManager.SetFileName(form.GetFileName(), form.IsModeRecord()); } /* Set Input Source */ PXCMCapture.DeviceInfo dinfo2 = form.GetCheckedDevice(); if (form.IsModeLive() || form.IsModeRecord()) { pp.captureManager.FilterByDeviceInfo(dinfo2); } if (form.IsModeRecord()) { // Delay recording frames until the scan starts pp.captureManager.SetPause(true); } else if (!form.IsModeLive()) { // Disable real-time mode if we are playing back a file // to ensure that frames are not skipped. pp.captureManager.SetRealtime(false); } /* Set Color & Depth Resolution */ PXCMCapture.Device.StreamProfile cinfo = form.GetColorConfiguration(); if (cinfo.imageInfo.format != 0) { Single cfps = cinfo.frameRate.max; pp.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, cinfo.imageInfo.width, cinfo.imageInfo.height, cfps); } PXCMCapture.Device.StreamProfile dinfo = form.GetDepthConfiguration(); if (dinfo.imageInfo.format != 0) { Single dfps = dinfo.frameRate.max; pp.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, dinfo.imageInfo.width, dinfo.imageInfo.height, dfps); } /* Initialization */ FPSTimer timer = new FPSTimer(form); if (form.IsModeLive()) { form.UpdateStatus("Initializing..."); } /* Enable the 3D Scan video module */ pxcmStatus result = pp.Enable3DScan(); if (result != pxcmStatus.PXCM_STATUS_NO_ERROR) { pp.Close(); pp.Dispose(); form.UpdateStatus("Enable3DScan() returned " + result); return; } /* Initialize the camera system */ result = pp.Init(); form.UpdateStatus(""); device = pp.captureManager.device; if (result >= pxcmStatus.PXCM_STATUS_NO_ERROR && device != null) { bool bAutoExpAndWBChanged = false; bool bAutoExposureEnabled = true; bool bAutoWhiteBalanceEnabled = true; /* Setup the scanning configuration */ if (scanType == "Object") { scan_config.mode = PXCM3DScan.ScanningMode.OBJECT_ON_PLANAR_SURFACE_DETECTION; } else if (scanType == "Face") { scan_config.mode = PXCM3DScan.ScanningMode.FACE; } else if (scanType == "Body") { scan_config.mode = PXCM3DScan.ScanningMode.BODY; } else if (scanType == "Head") { scan_config.mode = PXCM3DScan.ScanningMode.HEAD; } else if (scanType == "Full") { scan_config.mode = PXCM3DScan.ScanningMode.VARIABLE; } /* Select the Targeting Options */ scan_config.options = PXCM3DScan.ReconstructionOption.NONE; if (form.isSolidificationSelected()) { scan_config.options |= (PXCM3DScan.ReconstructionOption.SOLIDIFICATION); } if (form.isTextureSelected()) { scan_config.options |= (PXCM3DScan.ReconstructionOption.TEXTURE); } if (form.isLandmarksSelected()) { scan_config.options |= (PXCM3DScan.ReconstructionOption.LANDMARKS); } //scan_config.useMarker = form.isUseMarkerChecked(); scan_config.flopPreviewImage = form.isFlopPreviewImageSelected(); /* Try to initialize the scanning system */ PXCM3DScan scan = pp.Query3DScan(); sts = false; if (scan == null) { form.UpdateStatus("3DScan module not found."); } else { result = scan.SetConfiguration(scan_config); if (result < pxcmStatus.PXCM_STATUS_NO_ERROR) { scan.Dispose(); // Show the configuration related error code switch (result) { case pxcmStatus.PXCM_STATUS_FEATURE_UNSUPPORTED: form.UpdateStatus("Configuration not supported."); break; case pxcmStatus.PXCM_STATUS_ITEM_UNAVAILABLE: form.UpdateStatus("Face module not found."); break; default: form.UpdateStatus("SetConfiguration returned an error."); break; } } else { sts = true; } } // Conditionally finish the initialization and enter the main loop if (sts == true) { // Subscribe to recieve range and tracking alerts scan.Subscribe(OnAlert); Projection projection = new Projection( pp.session, device, dinfo.imageInfo); Boolean bScanning = false; Boolean bPlaybackStarted = false; form.Invoke(new Action(() => form.SetButtonState(sample3dscan.cs.MainForm.ButtonState.Ce_SSd))); while (form.reconstruct_requested || !form.GetStopState()) { if (form.GetScanRequested()) /* one time latch */ { form.Invoke(new Action(() => form.SetScanRequested(false))); // Delay recording frames until the start of the scan is requested if (form.IsModeRecord()) { pp.captureManager.SetPause(false); } // If the max tri/vert controls are enabled, // use the set values. Otherwise, disabled decimation // by setting the values to zero. /* * scan_config.maxTriangles = form.getMaxTrianglesEnabledChecked() * ? form.getMaxTriangles() : 0; * scan_config.maxVertices = form.getMaxVerticesEnabledChecked() * ? form.getMaxVertices() : 0; */ // Request that the scan starts as soon as possible scan_config.startScan = true; scan.SetConfiguration(scan_config); /* Update the status bar to help users understand what the detector is looking for */ if (form.IsModeLive()) { if (scan_config.mode == PXCM3DScan.ScanningMode.OBJECT_ON_PLANAR_SURFACE_DETECTION) { form.UpdateStatus("Object not detected. Place object on flat surface in center of view."); } } } else if (form.reconstruct_requested) { sts = SaveMesh(scan); } /* Get preview image from the 3D Scan video module */ if (!form.GetStopState()) { /* Wait until a frame is ready: Synchronized or Asynchronous */ if (pp.AcquireFrame() < pxcmStatus.PXCM_STATUS_NO_ERROR) { projection.Dispose(); if (!form.IsModeLive()) { form.Invoke(new Action(() => form.EndScan())); sts = SaveMesh(scan); } break; } /* Get preview image from the 3D Scan video module */ PXCMImage preview_image = scan.AcquirePreviewImage(); pp.ReleaseFrame(); /* Display Image and Status */ if (preview_image != null) { form.SetBitmap(preview_image); if (scan.IsScanning()) { statusString = "Scanning"; timer.Tick(statusString + " "); if (bScanning == false) // Lazy initializer { bScanning = true; // One way latch // Once the scanning process starts, we want to enable the Reconstruct button form.Invoke(new Action(() => form.SetButtonState(sample3dscan.cs.MainForm.ButtonState.Ce_ESe))); // Object, head and body scanning with a rear facing camera involves walking // around the target, which effectivly exposes the camera to the full // environment, similar to a panorama. To avoid undesirable color // inconsistencies (realted to the response of the auto-exposure/wb changes), // it usually works best to disable them. // Note that these property changes are restored (below). if (device.deviceInfo.orientation == PXCMCapture.DeviceOrientation.DEVICE_ORIENTATION_REAR_FACING && scan_config.mode != PXCM3DScan.ScanningMode.FACE && form.IsModeLive()) { bAutoExpAndWBChanged = true; bAutoExposureEnabled = device.QueryColorAutoExposure(); device.SetColorAutoExposure(false); bAutoWhiteBalanceEnabled = device.QueryColorAutoWhiteBalance(); device.SetColorAutoWhiteBalance(false); } } } else { if (!form.IsModeLive() && !form.IsModeRecord()) // In playback mode, automatically request the scan { if (bPlaybackStarted == false) // Lazy initializer { bPlaybackStarted = true; // One way latch form.scan_requested = true; form.Invoke(new Action(() => form.StartScanning(false))); form.Invoke(new Action(() => form.SetButtonState(sample3dscan.cs.MainForm.ButtonState.Ce_ESe))); } } else { if (!form.GetStopState()) { if (isScanReady(form.landmarksChecked())) { form.Invoke(new Action(() => form.EnableReconstruction(true))); } else { form.Invoke(new Action(() => form.EnableReconstruction(false))); } } } } preview_image.Dispose(); } } } projection.Dispose(); scan.Dispose(); } // Restore the default camera properties if (bAutoExpAndWBChanged) { device.SetColorAutoExposure(bAutoExposureEnabled); device.SetColorAutoWhiteBalance(bAutoWhiteBalanceEnabled); } device.Dispose(); device = null; } else { try { form.UpdateStatus(result + ""); } catch { } sts = false; } if (sts) { try { form.UpdateStatus(""); } catch { } } pp.Close(); pp.Dispose(); try { form.Invoke(new Action(() => form.ResetStop())); } catch { } }
void OnDisable() { //Disposses all modules Initialized = false; if (SenseManager == null) { return; } DisposeFunctions.ForEach(i => i.DynamicInvoke()); if (FaceModuleOutput != null) { FaceModuleOutput.Dispose(); FaceModuleOutput = null; } if (HandDataOutput != null) { SenseManager.PauseHand(true); HandDataOutput.Dispose(); HandDataOutput = null; } if (BlobDataOutput != null) { SenseManager.PauseBlob(true); BlobDataOutput.Dispose(); BlobDataOutput = null; } if (ImageRgbOutput != null) { ImageRgbOutput.Dispose(); ImageRgbOutput = null; } if (ImageDepthOutput != null) { ImageDepthOutput.Dispose(); ImageDepthOutput = null; } if (ImageIROutput != null) { ImageIROutput.Dispose(); ImageIROutput = null; } if (Image3DSegmentationOutput != null) { Image3DSegmentationOutput.Dispose(); Image3DSegmentationOutput = null; } if (Projection != null) { Projection.Dispose(); Projection = null; } /* GZ * if (BlobExtractor != null) * { * BlobExtractor.Dispose(); * BlobExtractor = null; * } */ UvMap = null; PointCloud = null; SenseManager.Dispose(); SenseManager = null; }
public void StreamColorDepth() /* Stream Color and Depth Synchronously or Asynchronously */ { try { bool sts = true; /* Create an instance of the PXCMSenseManager interface */ PXCMSenseManager sm = PXCMSenseManager.CreateInstance(); if (sm == null) { SetStatus("Failed to create an SDK pipeline object"); return; } /* Optional: if playback or recoridng */ if ((Playback || Record) && File != null) { sm.captureManager.SetFileName(File, Record); } /* Optional: Set Input Source */ if (!Playback && DeviceInfo != null) { sm.captureManager.FilterByDeviceInfo(DeviceInfo); } /* Set Color & Depth Resolution and enable streams */ if (StreamProfileSet != null) { /* Optional: Filter the data based on the request */ sm.captureManager.FilterByStreamProfiles(StreamProfileSet); /* Enable raw data streaming for specific stream types */ for (int s = 0; s < PXCMCapture.STREAM_LIMIT; s++) { PXCMCapture.StreamType st = PXCMCapture.StreamTypeFromIndex(s); PXCMCapture.Device.StreamProfile info = StreamProfileSet[st]; if (info.imageInfo.format != 0) { /* For simple request, you can also use sm.EnableStream(...) */ PXCMVideoModule.DataDesc desc = new PXCMVideoModule.DataDesc(); desc.streams[st].frameRate.min = desc.streams[st].frameRate.max = info.frameRate.max; desc.streams[st].sizeMin.height = desc.streams[st].sizeMax.height = info.imageInfo.height; desc.streams[st].sizeMin.width = desc.streams[st].sizeMax.width = info.imageInfo.width; desc.streams[st].options = info.options; sm.EnableStreams(desc); } } } /* Initialization */ Timer timer = new Timer(); timer.UpdateStatus += UpdateStatus; SetStatus("Init Started"); if (sm.Init() >= pxcmStatus.PXCM_STATUS_NO_ERROR) { /* Reset all properties */ sm.captureManager.device.ResetProperties(PXCMCapture.StreamType.STREAM_TYPE_ANY); /* Set mirror mode */ PXCMCapture.Device.MirrorMode mirror = Mirror ? PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL : PXCMCapture.Device.MirrorMode.MIRROR_MODE_DISABLED; sm.captureManager.device.SetMirrorMode(mirror); SetStatus("Streaming"); while (!Stop) { /* Wait until a frame is ready: Synchronized or Asynchronous */ if (sm.AcquireFrame(Synced).IsError()) { break; } /* Display images */ PXCMCapture.Sample sample = sm.QuerySample(); /* Render streams */ EventHandler <RenderFrameEventArgs> render = RenderFrame; PXCMImage image = null; if (ColorPanel != PXCMCapture.StreamType.STREAM_TYPE_ANY && render != null) { image = sample[ColorPanel]; render(this, new RenderFrameEventArgs(0, image)); } if (DepthPanel != PXCMCapture.StreamType.STREAM_TYPE_ANY && render != null) { render(this, new RenderFrameEventArgs(1, sample[DepthPanel])); } if (IRPanel != PXCMCapture.StreamType.STREAM_TYPE_ANY && render != null) { render(this, new RenderFrameEventArgs(2, sample[IRPanel])); } if (IRLeftPanel != PXCMCapture.StreamType.STREAM_TYPE_ANY && render != null) { render(this, new RenderFrameEventArgs(3, sample[IRLeftPanel])); } if (IRRightPanel != PXCMCapture.StreamType.STREAM_TYPE_ANY && render != null) { render(this, new RenderFrameEventArgs(4, sample[IRRightPanel])); } /* Optional: Set Mirror State */ mirror = Mirror ? PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL : PXCMCapture.Device.MirrorMode.MIRROR_MODE_DISABLED; if (mirror != sm.captureManager.device.QueryMirrorMode()) { sm.captureManager.device.SetMirrorMode(mirror); } /* Optional: Show performance tick */ if (image != null) { timer.Tick(PXCMImage.PixelFormatToString(image.info.format) + " " + image.info.width + "x" + image.info.height); } sm.ReleaseFrame(); } } else { SetStatus("Init Failed"); sts = false; } sm.Dispose(); if (sts) { SetStatus("Stopped"); } } catch (Exception e) { SetStatus(e.GetType().ToString()); } }
void Update() { //Dynamically Pause/Enable Modules int numberOfEnabledModules = 0; foreach (var option in _senseOptions) { if (option.RefCounter == 0 && option.Enabled) { if (option.ModuleCUID > 0) { SenseManager.PauseModule(option.ModuleCUID, true); } option.Enabled = false; } else if (option.RefCounter > 0 && !option.Enabled) { if (!option.Initialized) { OnDisable(); OnEnable(); Start(); } if (option.ModuleCUID > 0) { SenseManager.PauseModule(option.ModuleCUID, false); } option.Enabled = true; } if (option.Enabled) { numberOfEnabledModules++; } } //Update Speech commands if changed if (_speechCommandsChanged) { UpdateSpeechCommands(); SpeechManager.Reset(); } // Every frame update all the data if (Initialized && numberOfEnabledModules > 0) { _sts = SenseManager.AcquireFrame(true, 0); if (_sts == pxcmStatus.PXCM_STATUS_NO_ERROR) { if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoColorStream).Enabled) { if (ImageRgbOutput != null) { ImageRgbOutput.Dispose(); } if (_captureSample == null) { _captureSample = SenseManager.QuerySample(); } if (_captureSample.color != null) { ImageRgbOutput = _captureSample.color; ImageRgbOutput.AddRef(); } } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoDepthStream).Enabled || _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.PointCloud).Enabled) { if (ImageDepthOutput != null) { ImageDepthOutput.Dispose(); } if (_captureSample == null) { _captureSample = SenseManager.QuerySample(); } if (_captureSample.depth != null) { ImageDepthOutput = _captureSample.depth; ImageDepthOutput.AddRef(); /* GZ * if (!_isInitBlob) * { * PXCMImage.ImageInfo info = ImageDepthOutput.QueryInfo(); * BlobExtractor.Init(info); * BlobExtractor.SetMaxBlobs(MaxBlobsToDetect); * _isInitBlob = true; * } */ if (PointCloud == null) { PointCloud = new PXCMPoint3DF32[ImageDepthOutput.info.width * ImageDepthOutput.info.height]; } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.PointCloud).Enabled) { if (PointCloud == null) { PointCloud = new PXCMPoint3DF32[ImageDepthOutput.info.width * ImageDepthOutput.info.height]; } _sts = Projection.QueryVertices(ImageDepthOutput, PointCloud); } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.UVMap).Enabled) { if (UvMap == null) { UvMap = new PXCMPointF32[ImageDepthOutput.info.width * ImageDepthOutput.info.height]; } Projection.QueryUVMap(ImageDepthOutput, UvMap); } } } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoIRStream).Enabled) { if (ImageIROutput != null) { ImageIROutput.Dispose(); } if (_captureSample == null) { _captureSample = SenseManager.QuerySample(); } if (_captureSample.ir != null) { ImageIROutput = _captureSample.ir; ImageIROutput.AddRef(); } } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoSegmentation).Enabled) { if (Image3DSegmentationOutput != null) { Image3DSegmentationOutput.Dispose(); } PXCM3DSeg seg = SenseManager.Query3DSeg(); if (seg != null) { Image3DSegmentationOutput = seg.AcquireSegmentedImage(); } } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Face).Enabled) { FaceModuleOutput.Update(); } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Hand).Enabled) { HandDataOutput.Update(); } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Blob).Enabled) { BlobDataOutput.Update(); } _captureSample = null; SenseManager.ReleaseFrame(); } //Speech if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Speech).Enabled) { SpeechManager.QueryRecognizedCommands(out SpeechOutput); } } }
public void AdvancedPipeline() { PXCMSession session; pxcmStatus sts = PXCMSession.CreateInstance(out session); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { form.UpdateStatus("Failed to create an SDK session"); return; } /* Set Module */ PXCMSession.ImplDesc desc = new PXCMSession.ImplDesc(); desc.friendlyName.set(form.GetCheckedModule()); PXCMEmotion emotionDet; sts = session.CreateImpl <PXCMEmotion>(ref desc, PXCMEmotion.CUID, out emotionDet); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { form.UpdateStatus("Failed to create the emotionDet module"); session.Dispose(); return; } UtilMCapture capture = null; if (form.GetRecordState()) { capture = new UtilMCaptureFile(session, form.GetFileName(), true); capture.SetFilter(form.GetCheckedDevice()); } else if (form.GetPlaybackState()) { capture = new UtilMCaptureFile(session, form.GetFileName(), false); } else { capture = new UtilMCapture(session); capture.SetFilter(form.GetCheckedDevice()); } form.UpdateStatus("Pair moudle with I/O"); for (uint i = 0; ; i++) { PXCMEmotion.ProfileInfo pinfo; sts = emotionDet.QueryProfile(i, out pinfo); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } sts = capture.LocateStreams(ref pinfo.inputs); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { continue; } sts = emotionDet.SetProfile(ref pinfo); if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } } if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { form.UpdateStatus("Failed to pair the emotionDet module with I/O"); capture.Dispose(); emotionDet.Dispose(); session.Dispose(); return; } form.UpdateStatus("Streaming"); PXCMImage[] images = new PXCMImage[PXCMCapture.VideoStream.STREAM_LIMIT]; PXCMScheduler.SyncPoint[] sps = new PXCMScheduler.SyncPoint[2]; while (!form.stop) { PXCMImage.Dispose(images); PXCMScheduler.SyncPoint.Dispose(sps); sts = capture.ReadStreamAsync(images, out sps[0]); if (DisplayDeviceConnection(sts == pxcmStatus.PXCM_STATUS_DEVICE_LOST)) { continue; } if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } sts = emotionDet.ProcessImageAsync(images, out sps[1]); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } PXCMScheduler.SyncPoint.SynchronizeEx(sps); sts = sps[0].Synchronize(); if (DisplayDeviceConnection(sts == pxcmStatus.PXCM_STATUS_DEVICE_LOST)) { continue; } if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } /* Display Results */ DisplayPicture(capture.QueryImage(images, PXCMImage.ImageType.IMAGE_TYPE_COLOR)); DisplayLocation(emotionDet); form.UpdatePanel(); } PXCMImage.Dispose(images); PXCMScheduler.SyncPoint.Dispose(sps); capture.Dispose(); emotionDet.Dispose(); session.Dispose(); form.UpdateStatus("Stopped"); }
private void AcquireThread() { // Stream data while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { if (click == true) { Thread.Sleep(500); click = false; } // Retrieve the results PXCM3DSeg segmentation = senseManager.Query3DSeg(); if (segmentation != null) { // Get the segmented image PXCMImage segmentedImage = segmentation.AcquireSegmentedImage(); if (segmentedImage != null) { // Access the segmented image data PXCMImage.ImageData segmentedImageData; segmentedImage.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out segmentedImageData); // Lock the backdrop image bitmap bits into system memory and access its data // (Reference: https://msdn.microsoft.com/en-us/library/5ey6h79d%28v=vs.110%29.aspx // (Reference: http://csharpexamples.com/fast-image-processing-c/) Rectangle imageRect = new Rectangle(0, 0, WIDTH, HEIGHT); BitmapData backdropBitmapData = backdrop.LockBits(imageRect, ImageLockMode.ReadWrite, backdrop.PixelFormat); int bytesPerPixel = Bitmap.GetPixelFormatSize(backdropBitmapData.PixelFormat) / 8; int widthInBytes = WIDTH * bytesPerPixel; for (int h = 0; h < HEIGHT; h++) { // Use unsafe keyword to work with pointers for faster image processing // (Required setting: Project -> Properties -> Build -> Allow unsafe code) unsafe { byte *segmentedImagePixel = (byte *)segmentedImageData.planes[0] + h * segmentedImageData.pitches[0]; for (int w = 0; w < widthInBytes; w = w + bytesPerPixel) { byte *backdropPixel = (byte *)backdropBitmapData.Scan0 + (h * backdropBitmapData.Stride); // Substitute segmented background pixels (those containing an alpha channel of zero) // with pixels from the selected backdrop image, if the checkbox is selected if ((segmentedImagePixel[3] <= 0)) { segmentedImagePixel[0] = backdropPixel[w]; segmentedImagePixel[1] = backdropPixel[w + 1]; segmentedImagePixel[2] = backdropPixel[w + 2]; } segmentedImagePixel += 4; } } } // Unlock the backdrop image bitmap bits backdrop.UnlockBits(backdropBitmapData); // Export the image data to a bitmap Bitmap bitmap = segmentedImageData.ToBitmap(0, segmentedImage.info.width, segmentedImage.info.height); // Update the UI by delegating work to the Dispatcher associated with the UI thread this.Dispatcher.Invoke(System.Windows.Threading.DispatcherPriority.Normal, new Action(delegate() { imgBackdrop.Source = ImageUtils.ConvertBitmapToWpf(bitmap); })); // Optionally save a snapshot of the image (captureSnapshot is set in the Capture button's event handler) if (captureSnapshot) { bitmap.Save(path + "MyPic.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); captureSnapshot = false; } segmentedImage.ReleaseAccess(segmentedImageData); segmentedImage.Dispose(); segmentation.Dispose(); bitmap.Dispose(); } } // Resume next frame processing senseManager.ReleaseFrame(); } }