public void InfraredFrameArrival(InfraredFrame df, double fps, ref bool processed, WriteableBitmap infraredBitmap) { using (Microsoft.Kinect.KinectBuffer infraredBuffer = df.LockImageBuffer()) { // verify data and write the new infrared frame data to the display bitmap if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)) && (this.infraredFrameDescription.Width == infraredBitmap.PixelWidth) && (this.infraredFrameDescription.Height == infraredBitmap.PixelHeight)) { ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); processed = true; if (infraredRecording) { this.infraredBinaryBuffer.Enqueue((byte[])(infraredPixelBuffer.Clone())); this.frameCount++; if (fps < 16.0) { Console.WriteLine("fps drop yaşandı"); this.infraredBinaryBuffer.Enqueue((byte[])(infraredPixelBuffer.Clone())); this.frameCount++; } } } } }
public void PollMostRecentInfraredFrame() { MultiSourceFrame multiFrame = _reader.AcquireLatestFrame(); if (multiFrame == null) { return; } using (InfraredFrame frame = multiFrame.InfraredFrameReference.AcquireFrame()) { if (frame == null) { return; // Could not find multi-frame or infrared-frame } using (KinectBuffer buffer = frame.LockImageBuffer()) { if (InfraredFrameDescription.Width * InfraredFrameDescription.Height == buffer.Size / InfraredFrameDescription.BytesPerPixel) { ProcessInfraredFrameData(buffer.UnderlyingBuffer, buffer.Size); } } } }
private void InfraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { if (this.processingInfraredFrame) { return; } this.processingInfraredFrame = true; bool infraredFrameProcessed = false; using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel))) { this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size, this.infraredFrameDescription.BytesPerPixel); infraredFrameProcessed = true; } } } } if (infraredFrameProcessed) { this.Rescale(this.infraredPixels, this.truncatedInfraredPixels); this.infraredFrameCallback(this.truncatedInfraredPixels); } this.processingInfraredFrame = false; }
/// <summary> /// Handles the infrared frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> public void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { bool infraredFrameProcessed = false; // InfraredFrame is IDisposable using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame != null) { // the fastest way to process the infrared frame data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { // verify data and write the new infrared frame data to the display bitmap if (((infraredFrame.FrameDescription.Width * infraredFrame.FrameDescription.Height) == (infraredBuffer.Size / infraredFrame.FrameDescription.BytesPerPixel)) && (infraredFrame.FrameDescription.Width == this.infraredSource.PixelWidth) && (infraredFrame.FrameDescription.Height == this.infraredSource.PixelHeight)) { this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); infraredFrameProcessed = true; } } } } if (infraredFrameProcessed) { this.RenderInfraredPixels(); } }
public WriteableBitmap ParseToWriteableBitmap(InfraredFrame infraredFrame) { WriteableBitmap infraredBitmap = new WriteableBitmap(infraredFrame.FrameDescription.Width, infraredFrame.FrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { ConvertInfraredFrameData(infraredBitmap, infraredFrame.FrameDescription, infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); } return(infraredBitmap); }
/// <summary> /// Store infrared image /// </summary> /// <param name="infraredFrame">infrared frame to be stored</param> /// <param name="frameNumber">frame number</param> public static void Handle_InfraredFrame(InfraredFrame infraredFrame, String frameNumber) { using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { BitmapSource bitmapSource = BitmapSource.Create(infraredWidth, infraredHeight, 96.0, 96.0, PixelFormats.Gray16, null, infraredBuffer.UnderlyingBuffer, (int)infraredBuffer.Size, infraredWidth << 1); String infraredPath = FramesAndPaths.GetImageFilePath(FramesAndPaths.FileType.InfraredImage, frameNumber); bitmapSource.Save(infraredPath + ".jpg", ImageFormat.Jpeg); } // Release infraredFrame infraredFrame.Dispose(); }
private void MultiSource_FrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); using (InfraredFrame infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame()) { if (infraredFrame != null) { using (KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); } } } using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { if (bodyFrame != null) { bodyFrame.GetAndRefreshBodyData(bodies); using (DrawingContext dc = drawingGroup.Open()) { dc.DrawImage(infraredBitmap, new Rect(0, 0, infraredBitmap.Width, infraredBitmap.Height)); for (int i = 0; i < 6; i++) { if (faceFrameSources[i].IsTrackingIdValid) { if (faceFrameResults[i] != null) { DrawFace(i, faceFrameResults[i], dc); } } else { if (bodies[i].IsTracked) { faceFrameSources[i].TrackingId = bodies[i].TrackingId; } } } } } } }
private void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { using (InfraredFrame infrFrame = e.FrameReference.AcquireFrame()) { if (infrFrame != null) { using (KinectBuffer infrBuffer = infrFrame.LockImageBuffer()) { if ((infrFrame.FrameDescription.Width * infrFrame.FrameDescription.Height) == (infrBuffer.Size / infrFrame.FrameDescription.BytesPerPixel)) { this.ProcessInfraredFrameData(infrBuffer.UnderlyingBuffer, infrBuffer.Size, infrFrame.FrameDescription.BytesPerPixel); } } } } }
private void InfraredFrameReader_FrameArrived(InfraredFrameReference infraredFrameReference) { using (InfraredFrame infraredFrame = infraredFrameReference.AcquireFrame()) { if (infraredFrame != null) { FrameDescription infraredFrameDescription = infraredFrame.FrameDescription; using (KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { using (var dest = ImagePool.GetOrCreate(infraredFrameDescription.Width, infraredFrameDescription.Height, Imaging.PixelFormat.Gray_16bpp)) { infraredFrame.CopyFrameDataToIntPtr(dest.Resource.ImageData, (uint)(infraredFrameDescription.Width * infraredFrameDescription.Height * 2)); var time = this.pipeline.GetCurrentTimeFromElapsedTicks(infraredFrameReference.RelativeTime.Ticks); this.InfraredImage.Post(dest, time); } } } } }
/// <summary> /// Processes the infrared frame. /// </summary> /// <param name="frame">The frame.</param> private void ProcessInfraredFrame(InfraredFrame frame) { if (frame != null) { FrameDescription frameDescription = frame.FrameDescription; // the fastest way to process the body index data is to directly access // the underlying buffer using (KinectBuffer buffer = frame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((frameDescription.Width * frameDescription.Height) == (buffer.Size / frameDescription.BytesPerPixel)) && (frameDescription.Width == this.infraredTexture.Width) && (frameDescription.Height == this.infraredTexture.Height)) { frame.CopyFrameDataToIntPtr(this.infraredTexturePointer, (uint)this.infraredPointerSize); this.updateInfraredTexture = true; } } } }
internal IFrame(InfraredFrame InfraredFrame) { this.frameDescriptor = InfraredFrame.FrameDescription; this.width = InfraredFrame.FrameDescription.Width; this.height = InfraredFrame.FrameDescription.Height; this.pixels = new ushort[width * height]; // this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription; this.WriteableBitmap = new WriteableBitmap( this.width, this.height, 96.0, 96.0, PixelFormats.Gray8, null); using (KinectBuffer depthBuffer = InfraredFrame.LockImageBuffer()) { // ushort maxDepth = ushort.MaxValue; this.ProcessDepthFrameData(InfraredFrame, depthBuffer.UnderlyingBuffer, depthBuffer.Size); } this.ProcessBitmap(); // creates bitmap, and bitmap source }
private void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { // InfraredFrame is IDisposable using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame != null) { // the fastest way to process the infrared frame data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { // verify data and write the new infrared frame data to the display bitmap var t = this.infraredFrameDescription; var tt = this.infraredBitmap; if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)) && (this.infraredFrameDescription.Width == this.infraredBitmap.PixelWidth) && (this.infraredFrameDescription.Height == this.infraredBitmap.PixelHeight)) { this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); } } } } }
/// <summary> /// Handles the multisource frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private unsafe void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { // Create instance of EMGUargs which holds the output of data from the kinect EMGUargs emguArgs = new EMGUargs(); MultiSourceFrameReference frameReference = e.FrameReference; // Variables initialized to null for easy check of camera failures MultiSourceFrame multiSourceFrame = null; InfraredFrame infraredFrame = null; ColorFrame colorFrame = null; DepthFrame depthFrame = null; // Acquire frame from the Kinect multiSourceFrame = frameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } try { /* * DepthSpacePoint dp = new DepthSpacePoint * { * X = 50, * Y = 20 * }; * DepthSpacePoint[] dps = new DepthSpacePoint[] { dp }; * ushort[] depths = new ushort[] { 2000 }; * CameraSpacePoint[] ameraSpacePoints = new CameraSpacePoint[1]; * * mapper.MapDepthPointsToCameraSpace(dps, depths, ameraSpacePoints); */ InfraredFrameReference infraredFrameReference = multiSourceFrame.InfraredFrameReference; infraredFrame = infraredFrameReference.AcquireFrame(); DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference; depthFrame = depthFrameReference.AcquireFrame(); // Check whether needed frames are avaliable if (infraredFrame == null || depthFrame == null) { return; } // the fastest way to process the depth frame data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and write the new depth frame data to the display bitmap if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel))) { // Conversion to needed EMGU image Mat depthImage = this.ProcessDepthFrameData(depthFrame); emguArgs.DepthImage = depthImage; emguArgs.DepthFrameDimension = new FrameDimension(depthFrameDescription.Width, depthFrameDescription.Height); } //BgrToDephtPixel(depthBuffer.UnderlyingBuffer, depthBuffer.Size); depthFrame.Dispose(); depthFrame = null; } // IR image FrameDescription infraredFrameDescription = infraredFrame.FrameDescription; // the fastest way to process the infrared frame data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { // verify data and write the new infrared frame data to the display bitmap if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel))) { // Conversion to needed EMGU image Mat infraredImage = this.ProcessInfaredFrameData(infraredFrame); emguArgs.InfraredImage = infraredImage; emguArgs.InfraredFrameDimension = new FrameDimension(infraredFrameDescription.Width, infraredFrameDescription.Height); // infraredImage.Dispose(); } infraredFrame.Dispose(); infraredFrame = null; // Check as to whether or not the color image is needed for mainwindow view if (generateColorImage) { ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference; colorFrame = colorFrameReference.AcquireFrame(); if (colorFrame == null) { return; } // color image FrameDescription colorFrameDescription = colorFrame.FrameDescription; // the fastest way to process the color frame data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { // Conversion to needed EMGU image Mat colorImage = this.ProcessColorFrameData(colorFrame); emguArgs.Colorimage = colorImage; emguArgs.ColorFrameDimension = new FrameDimension(colorFrameDescription.Width, colorFrameDescription.Height); } // We're done with the colorFrame colorFrame.Dispose(); colorFrame = null; } } // Call the processing finished event for the conversion to EMGU images OnEmguArgsProcessed(emguArgs); } catch (Exception ex) { // ignore if the frame is no longer available Console.WriteLine("FRAME CHRASHED: " + ex.ToString()); } finally { // generate event at send writeable bitmaps for each frame, and cleanup. // only generate event if the mainwindow is shown. // DepthFrame, ColorFrame are Disposable. if (colorFrame != null) { colorFrame.Dispose(); colorFrame = null; } if (depthFrame != null) { depthFrame.Dispose(); depthFrame = null; } // infraredFrame is Disposable if (infraredFrame != null) { infraredFrame.Dispose(); infraredFrame = null; } if (multiSourceFrame != null) { multiSourceFrame = null; } } }
public IImageData GetData() { return(new KinectBufferImageData(frame.FrameDescription, frame.LockImageBuffer())); }
// *** Only StoreApp with WindowsPreview.Kinect; // private void msfr_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs args) private void msfr_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { // using (MultiSourceFrame msf = e.FrameReference.AcquireFrame()) *** Only StoreApp with WindowsPreview.Kinect; // { MultiSourceFrame msf = e.FrameReference.AcquireFrame(); if (msf != null) { using (BodyFrame bodyframe = msf.BodyFrameReference.AcquireFrame()) { using (InfraredFrame irf = msf.InfraredFrameReference.AcquireFrame()) { if (bodyframe != null && irf != null) { /* *** only with Windows.UI.Xaml.Media.Imaging; * irf.CopyFrameDataToArray(irData); * for (int i = 0; i < irData.Length; i++) * { * * byte intensity = (byte)(irData[i] >> 8); * irDataConverted[i * 4] = intensity; * irDataConverted[i * 4 + 1] = intensity; * irDataConverted[i * 4 + 2] = intensity; * irDataConverted[i * 4 + 3] = 255; * } * * irDataConverted.CopyTo(irBitmap.PixelBuffer); * irBitmap.Invalidate(); */ // The below is from Kinect Studio WPF infrared sample // irf.CopyFrameDataToArray(irData); using (Microsoft.Kinect.KinectBuffer infraredBuffer = irf.LockImageBuffer()) { // verify data and write the new infrared frame data to the display bitmap if (((this.fd.Width * this.fd.Height) == (infraredBuffer.Size / this.fd.BytesPerPixel)) && (this.fd.Width == this.irBitmap.PixelWidth) && (this.fd.Height == this.irBitmap.PixelHeight)) { this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); } } bodyframe.GetAndRefreshBodyData(bodies); bodyCanvas.Children.Clear(); foreach (Body b in bodies) { if (b.IsTracked) { Joint hand = b.Joints[JointType.HandLeft]; if (hand.TrackingState == TrackingState.Tracked) { DepthSpacePoint dsp = ksensor.CoordinateMapper.MapCameraPointToDepthSpace(hand.Position); var circle = CreateCircle(dsp); tbox.Content = "x:" + (dsp.X / 2).ToString() + " y" + (dsp.Y / 2).ToString(); // bodyCanvas.Children.Add(circle); DetectPageTurn(dsp, circle); // Canvas.SetLeft(circle, dsp.X); //Canvas.SetTop(circle, dsp.Y); } } } } } } msf = null; } //} }
private void OnFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { // Acquire current Kinect frame reference. MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // Record the current Unix epoch timestamp and convert it to a byte array for serialization. long timestamp = DateTimeOffset.Now.ToUnixTimeMilliseconds(); byte[] timestampBytes = BitConverter.GetBytes(timestamp); // If clients exist, convert the RGB frame to a byte array and send it followed by a timestamp. if (this.colorConnector.HasClients) { using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { if (colorFrame != null) { // Allocate a new byte buffer to store this RGB frame and timestamp. var colorArraySize = colorFrame.ColorFrameSource.FrameDescription.Height * colorFrame.ColorFrameSource.FrameDescription.Width * 3; // BGR8 = 24bpp var colorBuffer = new byte[colorArraySize + sizeof(long)]; var colorRect = new Rectangle(0, 0, colorFrame.ColorFrameSource.FrameDescription.Width, colorFrame.ColorFrameSource.FrameDescription.Height); // Wrap RGB frames into bitmap buffers. var bmp32 = new Bitmap(colorFrame.ColorFrameSource.FrameDescription.Width, colorFrame.ColorFrameSource.FrameDescription.Height, System.Drawing.Imaging.PixelFormat.Format32bppRgb); var bmp24 = new Bitmap(bmp32.Width, bmp32.Height, bmp32.Width * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, Marshal.UnsafeAddrOfPinnedArrayElement(colorBuffer, 0)); // Lock the bitmap's bits. System.Drawing.Imaging.BitmapData bmpData = bmp32.LockBits(colorRect, System.Drawing.Imaging.ImageLockMode.ReadWrite, bmp32.PixelFormat); IntPtr bmpPtr = bmpData.Scan0; colorFrame.CopyConvertedFrameDataToIntPtr(bmpPtr, (uint)(bmpData.Width * bmpData.Height * 4), ColorImageFormat.Bgra); bmp32.UnlockBits(bmpData); // Convert from 32bpp to 24bpp using System.Drawing. using (Graphics gr = Graphics.FromImage(bmp24)) { gr.DrawImage(bmp32, new Rectangle(0, 0, bmp24.Width, bmp24.Height)); } // Append the system timestamp to the end of the buffer. Buffer.BlockCopy(timestampBytes, 0, colorBuffer, (int)colorArraySize, sizeof(long)); // Transmit the byte buffer to color clients. this.colorConnector.Broadcast(colorBuffer); } } } // If clients exist, convert the RGB frame to a byte array and send it followed by a timestamp. if (this.depthConnector.HasClients) { using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { if (depthFrame != null) { // Allocate a new byte buffer to store this depth frame and timestamp. var depthArraySize = depthFrame.DepthFrameSource.FrameDescription.Height * depthFrame.DepthFrameSource.FrameDescription.Width * depthFrame.DepthFrameSource.FrameDescription.BytesPerPixel; var depthBuffer = new byte[depthArraySize + sizeof(long)]; // Convert the depth frame into the byte buffer. using (var depthFrameBuffer = depthFrame.LockImageBuffer()) { Marshal.Copy(depthFrameBuffer.UnderlyingBuffer, depthBuffer, 0, (int)depthFrameBuffer.Size); } // Append the system timestamp to the end of the buffer. Buffer.BlockCopy(timestampBytes, 0, depthBuffer, (int)depthArraySize, sizeof(long)); // Transmit the byte buffer to color clients. this.depthConnector.Broadcast(depthBuffer); } } } // If clients exist, convert the IR frame to a byte array and send it followed by a timestamp. if (this.irConnector.HasClients) { using (InfraredFrame irFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame()) { if (irFrame != null) { // Allocate a new byte buffer to store this IR frame and timestamp. var irArraySize = irFrame.InfraredFrameSource.FrameDescription.Height * irFrame.InfraredFrameSource.FrameDescription.Width * irFrame.InfraredFrameSource.FrameDescription.BytesPerPixel; var irBuffer = new byte[irArraySize + sizeof(long)]; // Convert the IR frame into the byte buffer. using (var irFrameBuffer = irFrame.LockImageBuffer()) { Marshal.Copy(irFrameBuffer.UnderlyingBuffer, irBuffer, 0, (int)irFrameBuffer.Size); } // Append the system timestamp to the end of the buffer. Buffer.BlockCopy(timestampBytes, 0, irBuffer, (int)irArraySize, sizeof(long)); // Transmit the byte buffer to color clients. this.irConnector.Broadcast(irBuffer); } } } // If clients exist, convert the tracked skeletons to a JSON array and send it with a timestamp. if (this.bodyConnector.HasClients || this.faceConnector.HasClients) { using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { if (bodyFrame != null) { var bodyArray = new Body[this.kinect.BodyFrameSource.BodyCount]; bodyFrame.GetAndRefreshBodyData(bodyArray); // Configure tracking IDs for bodies that have been added. if (this.faceConnector.HasClients) { for (var i = 0; i < bodyArray.Length; ++i) { // Only process the actively tracked bodies. Body body = bodyArray[i]; if (!body.IsTracked) { continue; } // Activate the corresponding face tracker using this body's tracking ID. faceSources[i].TrackingId = body.TrackingId; } } // Serialize body tracking information to clients. if (this.bodyConnector.HasClients) { // Iterate through the full list of bodies (which might not all be tracked). List <Body> bodyList = new List <Body>(); for (var i = 0; i < bodyArray.Length; ++i) { // Only process the actively tracked bodies. Body body = bodyArray[i]; if (!body.IsTracked) { continue; } // Add this body to the list of bodies that are serialized to clients. bodyList.Add(body); } // Combine the body array with a timestamp. // TODO: suppress invalid face tracking information! Dictionary <string, object> bodyJson = new Dictionary <string, object> { { "Time", timestamp }, { "Bodies", bodyList } }; string json = JsonConvert.SerializeObject(bodyJson, new JsonSerializerSettings { ContractResolver = new BodyContractResolver() }) + "\n"; byte[] bytes = System.Text.Encoding.ASCII.GetBytes(json); this.bodyConnector.Broadcast(bytes); } } } } }