private void KinectOnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs depthImageFrameReadyEventArgs) { using (DepthImageFrame temp = depthImageFrameReadyEventArgs.OpenDepthImageFrame()) { if (temp == null) return; short[] depthData = new short[640 * 480]; byte[] depthColorData = new byte[640 * 480 * 4]; temp.CopyPixelDataTo(depthData); for (int i = 0, i32 = 0; i < depthData.Length && i32 < depthColorData.Length; i++, i32 += 4) { //深度情報のみ取得 int realDepth = depthData[i] >> DepthImageFrame.PlayerIndexBitmaskWidth; //得られた深度を256段階のグレースケールに byte intensity = (byte)(255 - (255 * realDepth / 4095)); depthColorData[i32] = intensity; depthColorData[i32+1] = intensity; depthColorData[i32+2] = intensity; } //深度情報の配列をBitmapにして表示 this.pictureBox1.Image = ConvertToBitmap(depthColorData, 640, 480); } }
/// <summary> /// Event handler for Kinect sensor's DepthFrameReady event /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { // Copy the pixel data from the image to a temporary array depthFrame.CopyPixelDataTo(this.depthPixels); // Convert the depth to RGB int width = 480; int height = 640; int start = width * (height / 2 - 1); //start at the beginning of the middle line for (int i = 0; i < 480; ++i) { // discard the portion of the depth that contains only the player index short depth = (short)(this.depthPixels[i + start] >> DepthImageFrame.PlayerIndexBitmaskWidth); //find the angle to the left (if negative) or right (if positive) of the depth. The kinect's fov is 58 //so we halve it because it's going to be either left or right double angle = (i - 240) / 29 * Math.PI / 180; double xFromRBot = Math.Sin(angle) * depth; double yFromRBot = Math.Cos(angle) * depth; Vector2D posFromRBot = new Vector2D(xFromRBot, yFromRBot); posFromRBot.translate(rbotPos); posFromRBot.rotate(rbotAngle); Vector2D absolutePos = posFromRBot; } } } }
void Sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame imageFrame = e.OpenDepthImageFrame()) { if (imageFrame != null) { depthMap = new Texture2D(Game.GraphicsDevice, imageFrame.Width, imageFrame.Height, false, SurfaceFormat.Color); short[] data = new short[imageFrame.PixelDataLength]; Color[] depthData = new Color[imageFrame.Width * imageFrame.Height]; imageFrame.CopyPixelDataTo(data); ConvertDepthFrame(data, Sensor.DepthStream, ref depthData); depthMap.SetData<Color>(depthData); } else { // imageFrame is null because the request did not arrive in time } } }
void sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { if (this.DepthFrameReady != null) { this.DepthFrameReady(this, e); } }
private void DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { DepthImageFrame frame = e.OpenDepthImageFrame(); if (frame != null) { if (this.first || frame.Format != this.format) { this.InitBuffers(frame); this.DisposeTextures(); } this.FInvalidate = true; this.frameindex = frame.FrameNumber; lock (m_lock) { frame.CopyDepthImagePixelDataTo(this.depthpixels); for (int i16 = 0; i16 < this.width * this.height; i16++) { this.rawdepth[i16] = this.depthpixels[i16].Depth; } } frame.Dispose(); } }
unsafe void sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var image = e.OpenDepthImageFrame()) { if (image != null) { var data = new short[image.PixelDataLength]; image.CopyPixelDataTo(data); BitmapData bitmapData = this.CurrentValue.LockBits(new System.Drawing.Rectangle(0, 0, this.Width, this.Height), ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format24bppRgb); int pointer = 0; int width = this.Width; int height = this.Height; for (int y = 0; y < height; y++) { byte* pDest = (byte*)bitmapData.Scan0.ToPointer() + y * bitmapData.Stride; for (int x = 0; x < width; x++, pointer++, pDest += 3) { int realDepth = data[pointer] >> DepthImageFrame.PlayerIndexBitmaskWidth; byte intensity = (byte)(~(realDepth >> 4)); pDest[0] = intensity; pDest[1] = intensity; pDest[2] = intensity; } } this.CurrentValue.UnlockBits(bitmapData); this.OnNewDataAvailable(); } } }
void kinectSensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var frame = e.OpenDepthImageFrame()) { if (frame == null) return; if (depthFrame32 == null) { pixelData = new short[frame.PixelDataLength]; depthFrame32 = new byte[frame.Width * frame.Height * sizeof(int)]; } frame.CopyPixelDataTo(pixelData); if (bitmap == null) { bitmap = new WriteableBitmap(frame.Width, frame.Height, 96, 96, PixelFormats.Bgra32, null); image.Source = bitmap; } ConvertDepthFrame(pixelData); int stride = bitmap.PixelWidth * sizeof(int); Int32Rect dirtyRect = new Int32Rect(0, 0, bitmap.PixelWidth, bitmap.PixelHeight); bitmap.WritePixels(dirtyRect, depthFrame32, stride, 0); } }
/// <summary> /// 距離カメラのフレーム更新イベント /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void kinect_DepthFrameReady( object sender, DepthImageFrameReadyEventArgs e ) { using ( var depthFrame = e.OpenDepthImageFrame() ) { if ( depthFrame != null ) { imageDepthCamera.Source = depthFrame.ToBitmapSource(); } } }
// 距離カメラのフレーム更新イベント void kinect_DepthFrameReady( object sender, DepthImageFrameReadyEventArgs e ) { // Disposableなのでusingでくくる using ( DepthImageFrame depthFrame = e.OpenDepthImageFrame() ) { if ( depthFrame != null ) { imageDepthCamera.Source = depthFrame.ToBitmapSource(); } } }
void kinectRuntime_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { var frame = e.OpenDepthImageFrame(); if (frame == null) return; depthStreamManager.Update(frame); }
void kinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { ad++; if (kinect != null && !depthFramyBusy) { var ts = new ThreadStart(delegate { depthFrameSetUp(e.OpenDepthImageFrame(), depthPixels, depthArray, depthTarget); }); new Thread(ts).Start(); } }
public void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { // Copy the pixel data from the image to a temporary array depthFrame.CopyDepthImagePixelDataTo(this.DepthPixels); // Get the min and max reliable depth for the current frame int minDepth = depthFrame.MinDepth; int maxDepth = depthFrame.MaxDepth; // Convert the depth to RGB int colorPixelIndex = 0; for (int i = 0; i < this.DepthPixels.Length; ++i) { // Get the depth for this pixel short depth = DepthPixels[i].Depth; // To convert to a byte, we're discarding the most-significant // rather than least-significant bits. // We're preserving detail, although the intensity will "wrap." // Values outside the reliable depth range are mapped to 0 (black). // Note: Using conditionals in this loop could degrade performance. // Consider using a lookup table instead when writing production code. // See the KinectDepthViewer class used by the KinectExplorer sample // for a lookup table example. byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0); // Write out blue byte this.ColorPixels[colorPixelIndex++] = intensity; // Write out green byte this.ColorPixels[colorPixelIndex++] = intensity; // Write out red byte this.ColorPixels[colorPixelIndex++] = intensity; // We're outputting BGR, the last byte in the 32 bits is unused so skip it // If we were outputting BGRA, we would write alpha here. ++colorPixelIndex; } // Write the pixel data into our bitmap this.ColorBitmap.WritePixels( new Int32Rect(0, 0, this.ColorBitmap.PixelWidth, this.ColorBitmap.PixelHeight), this.ColorPixels, this.ColorBitmap.PixelWidth * sizeof(int), 0); } } }
static void sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var depthFrame=e.OpenDepthImageFrame()) { if (depthFrame == null) return; short[] bits = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(bits); foreach (var bit in bits) Console.Write(bit); } }
void Sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var frame = e.OpenDepthImageFrame()) { if (frame != null) { if (_mode == Mode.Depth) { camera.Source = frame.ToBitmap(); } } } }
void sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame()) { if (depthImageFrame == null) { return; } this.Camera.Source = ImageFrameExtensions.ToBitmapSource(depthImageFrame); } return; }
/// <summary> /// DepthImageReady: /// This function will be called every time a new depth frame is ready /// </summary> private void DepthImageReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame imageFrame = e.OpenDepthImageFrame()) { //We expect this to be always true since we are coming from a triggered event if (imageFrame != null) { //Check if the format of the image has changed. //This always happens when you run the program for the first time and every time you minimize the window bool NewFormat = this.lastImageFormat != imageFrame.Format; if (NewFormat) { //Update the image to the new format this.pixelData = new short[imageFrame.PixelDataLength]; this.depthFrame32 = new byte[imageFrame.Width * imageFrame.Height * Bgr32BytesPerPixel]; //Create the new Bitmap this.outputBitmap = new WriteableBitmap( imageFrame.Width, imageFrame.Height, 96, // DpiX 96, // DpiY PixelFormats.Bgr32, null); this.kinectDepthImage.Source = this.outputBitmap; } //Copy the stream to its short version imageFrame.CopyPixelDataTo(this.pixelData); //Convert the pixel data into its RGB Version. //Here is where the magic happens byte[] convertedDepthBits = this.ConvertDepthFrame(this.pixelData, ((KinectSensor)sender).DepthStream); //Copy the RGB matrix to the bitmap to make it visible this.outputBitmap.WritePixels( new Int32Rect(0, 0, imageFrame.Width, imageFrame.Height), convertedDepthBits, imageFrame.Width * Bgr32BytesPerPixel, 0); //Update the Format this.lastImageFormat = imageFrame.Format; } //Since we are coming from a triggered event, we are not expecting anything here, at least for this short tutorial. else { } } }
void Sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var image = e.OpenDepthImageFrame()) { if (image != null) { if (this.data == null) { this.data = new short[image.PixelDataLength]; } image.CopyPixelDataTo(this.data); this.ConvertData(this.data); this.writeableBitmap.WritePixels(new Int32Rect(0, 0, image.Width, image.Height), this.depthFrame32, image.Width * 4, 0); this.OnNewDataAvailable(); } } }
private void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame dFrame = e.OpenDepthImageFrame()) { if (dFrame != null) { dFrame.CopyPixelDataTo(myArray); myBitmap.WritePixels( new Int32Rect(0, 0, myBitmap.PixelWidth, myBitmap.PixelHeight), myArray, myBitmap.PixelWidth * sizeof(short), 0); } } }
void sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { this.TEST.Content = this.sensor.DepthStream.TooNearDepth; using (DepthImageFrame depthimageframe = e.OpenDepthImageFrame()) { if (depthimageframe == null) { return; } short[] pixelData = new short[depthimageframe.PixelDataLength]; int stride = depthimageframe.Width * 2; depthimageframe.CopyPixelDataTo(pixelData); this.DepthController.Source = BitmapSource.Create(depthimageframe.Width, depthimageframe.Height, 96, 96, PixelFormats.Gray16, null, pixelData, stride); } }
void Kinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { if(ClientList.Count > 0) { using(DepthImageFrame frame = e.OpenDepthImageFrame()) { this.DepthImageFrame = frame; if(frame != null) { _memoryStream.SetLength(0); binaryWriter.Write(DepthImageFrame.PlayerIndexBitmask); binaryWriter.Write(DepthImageFrame.PlayerIndexBitmaskWidth); binaryWriter.Write(frame); binaryWriter.Write((int)frame.Format); if(_depth == null || _depth.Length != frame.PixelDataLength) _depth = new short[frame.PixelDataLength]; frame.CopyPixelDataTo(_depth); if(_depthBytes == null || _depthBytes.Length != _depth.Length * 2) _depthBytes = new byte[_depth.Length * 2]; Buffer.BlockCopy(_depth, 0, _depthBytes, 0, _depthBytes.Length); binaryWriter.Write(_depthBytes); _frameCount++; if(_fps == -1 || (_frameCount > 0 && (_frameCount % (GetFps(frame.Format) / _fps)) == 0)) { Parallel.For(0, ClientList.Count, index => { SocketClient sc = ClientList[index]; byte[] data = _memoryStream.ToArray(); sc.Send(BitConverter.GetBytes(data.Length)); sc.Send(data); }); } RemoveClients(); } } } }
void DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { DepthImageFrame imageFrame = e.OpenDepthImageFrame(); if (imageFrame != null) { short[] pixelData = new short[imageFrame.PixelDataLength]; imageFrame.CopyPixelDataTo(pixelData); int temp = 0; //dean double tempX = 0, tempY = 0, tempZ = 0; int i = 0; int counter = 0; for (int y = 0; y < 240; y += s) for (int x = 0; x < 320; x += s) { temp = ((ushort)pixelData[x + y * 320]) >> 3; ((TranslateTransform3D)points[i].Transform).OffsetZ = temp; if ((temp > 30) && (temp < 1000)) //select from the ones { counter++; //if (tempX < points[i].Geometry.Bounds.X) if (tempY < points[i].Geometry.Bounds.Y) { tempY = points[i].Geometry.Bounds.Y; tempX = points[i].Geometry.Bounds.X; } if (points[i].Geometry.Bounds.Z < temp) tempZ = temp; } i++; } this.hitLabel.Content = "counter: " + counter.ToString() + "\n"; if (tempY > hitThreshold && counter > 200) { this.hitLabel2.Content = "Hit! " + tempX.ToString() + " X, " + tempY.ToString() + " Y, " + tempZ.ToString() + " Z\n" + "myX" + ProcessGesture(tempX, tempY, tempZ); //was tempY! } else this.hitLabel2.Content = ""; } }
private void DepthImageReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame imageFrame = e.OpenDepthImageFrame()) { if (imageFrame != null) { // We need to detect if the format has changed. bool haveNewFormat = this.lastImageFormat != imageFrame.Format; if (haveNewFormat) { this.pixelData = new short[imageFrame.PixelDataLength]; this.depthFrame32 = new byte[imageFrame.Width * imageFrame.Height * Bgr32BytesPerPixel]; } imageFrame.CopyPixelDataTo(this.pixelData); byte[] convertedDepthBits = this.ConvertDepthFrame(this.pixelData, ((KinectSensor)sender).DepthStream); // A WriteableBitmap is a WPF construct that enables resetting the Bits of the image. // This is more efficient than creating a new Bitmap every frame. if (haveNewFormat) { this.outputBitmap = new WriteableBitmap( imageFrame.Width, imageFrame.Height, 96, // DpiX 96, // DpiY PixelFormats.Bgr32, null); this.kinectDepthImage.Source = this.outputBitmap; } this.outputBitmap.WritePixels( new Int32Rect(0, 0, imageFrame.Width, imageFrame.Height), convertedDepthBits, imageFrame.Width * Bgr32BytesPerPixel, 0); this.lastImageFormat = imageFrame.Format; UpdateFrameRate(); } } }
private void Kinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame frame = e.OpenDepthImageFrame()) { if (frame != null) { frame.CopyPixelDataTo(this._DepthPixelData); //CreateBetterShadesOfGray(frame, this._DepthPixelData); //See Listing 3-8 DepthImage.Source = BitmapSource.Create(frame.Width, frame.Height, 96, 96, PixelFormats.Gray16, null, _DepthPixelData, frame.Width* frame.BytesPerPixel); CreateDepthHistogram(frame, this._DepthPixelData); } } }
private void DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { DepthImageFrame frame = e.OpenDepthImageFrame(); if (frame != null) { this.FInvalidate = true; if (frame.FrameNumber != this.frameindex) { this.RebuildBuffer(frame.Format, false); this.frameindex = frame.FrameNumber; frame.CopyDepthImagePixelDataTo(this.depthpixels); int cnt = 0; int img = 0; //DepthImagePixel dp; //dp. this.runtime.Runtime.CoordinateMapper.MapDepthFrameToSkeletonFrame(frame.Format, this.depthpixels, this.skelpoints); for (int h = 0; h < this.height; h++) { for (int w = 0; w < this.width; w++) { //this.runtime.Runtime.CoordinateMapper. //SkeletonPoint sp = frame.MapToSkeletonPoint(w, h); SkeletonPoint sp = this.skelpoints[img]; this.world0[cnt] = sp.X; this.world0[cnt + 1] = sp.Y; this.world0[cnt + 2] = sp.Z; this.world0[cnt + 3] = 1.0f; cnt += 4; img++; } } frame.Dispose(); lock (m_lock) { float[] tmp = this.world0; this.world0 = this.world1; this.world1 = tmp; } } } }
void sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { ready = false; using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { depthFrame.CopyDepthImagePixelDataTo(this.depthPixels); int minDepth = depthFrame.MinDepth; int maxDepth = depthFrame.MaxDepth; short myActualPixel; int width = 640; int height = 480; int idx = 0; for (int y = 30; y < height-30; y++) { for (int x = 110; x < width-110; x++) { myActualPixel = (short)(this.depthPixels[y * width + x].Depth - 1000); if (myActualPixel > 0 && myActualPixel <= 230) { //validen Bildpunkt gefunden m_data[idx].X = (float)-(((float)myActualPixel / 230f) - 1); m_data[idx].Y = (float)-(((float)myActualPixel / 230f) - 1); m_data[idx].Z = (float)-(((float)myActualPixel / 230f) - 1); m_data[idx].W = 1f; } else { m_data[idx].X = 0f; m_data[idx].Y = 0f; m_data[idx].Z = 0f; m_data[idx].W = 1f; } idx++; } } ready = true; } } }
private void _sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var frame = e.OpenDepthImageFrame()) { if (frame != null) { DepthImagePixel[] pixels = new DepthImagePixel[frame.PixelDataLength]; frame.CopyDepthImagePixelDataTo(pixels); short[] buff = pixels.Select(pixel => pixel.Depth).ToArray(); //_depthMapPainter.UpdateWith(buff); if (frame.FrameNumber % 1 == 0) { Points = new PointCloundConverter(new RandomSampler(2000)).Convert(buff, frame.Width, frame.Height, frame.Width, 525f).Points; } } } }
private void SensorOnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs depthImageFrameReadyEventArgs) { using (DepthImageFrame depthFrame = depthImageFrameReadyEventArgs.OpenDepthImageFrame()) { if (depthFrame == null) return; try { interactionStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } catch (InvalidOperationException) { // DepthFrame functions may throw when the sensor gets // into a bad state. Ignore the frame in that case. } } }
void DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { DepthImageFrame imageFrame = e.OpenDepthImageFrame(); if (imageFrame != null) { short[] pixelData = new short[imageFrame.PixelDataLength]; imageFrame.CopyPixelDataTo(pixelData); int temp = 0; int i = 0; for (int y = 0; y < 240; y += s) for (int x = 0; x < 320; x += s) { temp = ((ushort)pixelData[x + y * 320]) >> 3; ((TranslateTransform3D)points[i].Transform).OffsetZ = temp; i++; } } }
void runtime_DepthFrameReady(object sender, Microsoft.Kinect.DepthImageFrameReadyEventArgs e) { using (DepthImageFrame frame = e.OpenDepthImageFrame()) { if (frame != null) { try { // Hand data to Interaction framework to be processed this.stream.ProcessDepth(frame.GetRawPixelData(), frame.Timestamp); } catch { // DepthFrame functions may throw when the sensor gets // into a bad state. Ignore the frame in that case. } } } }
private void DepthImageReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame imageFrame = e.OpenDepthImageFrame()) { if (imageFrame != null) { bool NewFormat = this.lastImageFormat != imageFrame.Format; if (NewFormat) { //Update the image to the new format this.pixelData = new short[imageFrame.PixelDataLength]; this.depthFrame32 = new byte[imageFrame.Width * imageFrame.Height * Bgr32BytesPerPixel]; //Create the new Bitmap this.outputBitmap = new WriteableBitmap( imageFrame.Width, imageFrame.Height, 96, // DpiX 96, // DpiY PixelFormats.Bgr32, null); this.kinectDepthImage.Source = this.outputBitmap; } imageFrame.CopyPixelDataTo(this.pixelData); byte[] convertedDepthBits = this.ConvertDepthFrame(this.pixelData, ((KinectSensor)sender).DepthStream); this.outputBitmap.WritePixels( new Int32Rect(0, 0, imageFrame.Width, imageFrame.Height), convertedDepthBits, imageFrame.Width * Bgr32BytesPerPixel, 0); //Update the Format this.lastImageFormat = imageFrame.Format; } else { // imageFrame is null because the request did not arrive in time } } }
/// <summary> /// The event handler for depth detection for the Kinect. /// </summary> /// <param name="sender">The sender of the event.</param> /// <param name="depthImageFrameReadyEventArgs">The arguments for the event.</param> private void SensorOnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs depthImageFrameReadyEventArgs) { using (DepthImageFrame depthFrame = depthImageFrameReadyEventArgs.OpenDepthImageFrame()) { if (depthFrame != null) { try { interactionStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } catch (InvalidOperationException) { // DepthFrame functions may throw when the sensor gets // into a bad state. Ignore the frame in that case. Console.WriteLine("Kinect threw bad information for depth garbage. Potential Error with kinect..."); } } } }