private void OnMultipleFramesArrivedHandler(object sender, MultiSourceFrameArrivedEventArgs e) { init = true; // Retrieve multisource frame reference MultiSourceFrameReference multiRef = e.FrameReference; MultiSourceFrame multiFrame = null; try { AllFrameWatch.Again(); multiFrame = multiRef.AcquireFrame(); if (multiFrame == null) { AllFrameWatch.Stop(); return; } HandleDepthFrame(multiFrame.DepthFrameReference); // Motion check if (Task.StandBy) { AllFrameWatch.Stop(); return; } HandleColorFrame(multiFrame.ColorFrameReference); HandleBodyFrame(multiFrame.BodyFrameReference); HandleBodyIndexFrame(multiFrame.BodyIndexFrameReference); AllFrameWatch.Stop(); } catch (Exception) { /* ignore if the frame is no longer available */ } finally { } }
// Dictionary<string, string> words = new Dictionary<string, string>(); private static void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { lock (updateLock) { frameReference = e.FrameReference; } dataAvailable.Set(); }
/// <summary> /// Handles frame data arriving from the sensor /// </summary> /// <param name="sender">Object sending the event</param> /// <param name="e">Event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { lock (newFrameLock) { multiFrameReference = e.FrameReference; dataAvailable.Set(); } }
void multiFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrameReference frameReference = e.FrameReference; MultiSourceFrame multiSourceFrame = null; DepthFrame depthFrame = null; ColorFrame colorFrame = null; try { multiSourceFrame = frameReference.AcquireFrame(); if (multiSourceFrame == null) { return; } depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); if (depthFrame == null || colorFrame == null) { return; } depthFrame.CopyFrameDataToArray(depthImagePixels); colorFrame.CopyConvertedFrameDataToArray(colorImagePixels, ColorImageFormat.Bgra); } catch (Exception ex) { MessageBox.Show(ex.ToString()); } finally { if (depthFrame != null) { depthFrame.Dispose(); depthFrame = null; } if (colorFrame != null) { colorFrame.Dispose(); colorFrame = null; } if (multiSourceFrame != null) { multiSourceFrame = null; } } UpdateFusionFrame(); DrawFusionFrame(); }
void multiSourceFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrameReference msFrameReference = e.FrameReference; try { MultiSourceFrame msFrame = msFrameReference.AcquireFrame(); if (msFrame != null) { LongExposureInfraredFrameReference leirFrameReference = msFrame.LongExposureInfraredFrameReference; InfraredFrameReference irFrameReference = msFrame.InfraredFrameReference; ColorFrameReference colorFrameReference = msFrame.ColorFrameReference; DepthFrameReference depthFrameReference = msFrame.DepthFrameReference; BodyFrameReference bodyFrameReference = msFrame.BodyFrameReference; switch (this.imageType) { case ImageType.Color: useColorFrame(colorFrameReference); break; case ImageType.Depth: useDepthFrame(depthFrameReference); break; case ImageType.IR: useIRFrame(irFrameReference); break; case ImageType.LEIR: useLIRFrame(leirFrameReference); break; } useBodyFrame(bodyFrameReference); //updatePulse(colorFrameReference, irFrameReference, bodyFrameReference); } } catch (Exception ex) { } }
/// <summary> /// Update frame data of color / depth frames /// </summary> /// <param name="frameReference"></param> #region Update from Kinect sensor public void Update(MultiSourceFrameReference frameReference) { var multiSourceFrame = frameReference.AcquireFrame(); if (multiSourceFrame == null) { logger.Trace("Abort update since MultiSourceFrame is null"); return; } using (var colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { if (colorFrame == null) { logger.Trace("Abort update since ColorFrame is null"); return; } using (var depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { if (depthFrame == null) { logger.Trace("Abort update since DepthFrame is null"); return; } UpdateColorData(colorFrame); UpdateDepthData(depthFrame); if (OnDataUpdate != null) { OnDataUpdate(this); } } } }
/// <summary> /// Handles the multisource frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private unsafe void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { // Create instance of EMGUargs which holds the output of data from the kinect EMGUargs emguArgs = new EMGUargs(); MultiSourceFrameReference frameReference = e.FrameReference; // Variables initialized to null for easy check of camera failures MultiSourceFrame multiSourceFrame = null; InfraredFrame infraredFrame = null; ColorFrame colorFrame = null; DepthFrame depthFrame = null; // Acquire frame from the Kinect multiSourceFrame = frameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } try { /* * DepthSpacePoint dp = new DepthSpacePoint * { * X = 50, * Y = 20 * }; * DepthSpacePoint[] dps = new DepthSpacePoint[] { dp }; * ushort[] depths = new ushort[] { 2000 }; * CameraSpacePoint[] ameraSpacePoints = new CameraSpacePoint[1]; * * mapper.MapDepthPointsToCameraSpace(dps, depths, ameraSpacePoints); */ InfraredFrameReference infraredFrameReference = multiSourceFrame.InfraredFrameReference; infraredFrame = infraredFrameReference.AcquireFrame(); DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference; depthFrame = depthFrameReference.AcquireFrame(); // Check whether needed frames are avaliable if (infraredFrame == null || depthFrame == null) { return; } // the fastest way to process the depth frame data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and write the new depth frame data to the display bitmap if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel))) { // Conversion to needed EMGU image Mat depthImage = this.ProcessDepthFrameData(depthFrame); emguArgs.DepthImage = depthImage; emguArgs.DepthFrameDimension = new FrameDimension(depthFrameDescription.Width, depthFrameDescription.Height); } //BgrToDephtPixel(depthBuffer.UnderlyingBuffer, depthBuffer.Size); depthFrame.Dispose(); depthFrame = null; } // IR image FrameDescription infraredFrameDescription = infraredFrame.FrameDescription; // the fastest way to process the infrared frame data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { // verify data and write the new infrared frame data to the display bitmap if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel))) { // Conversion to needed EMGU image Mat infraredImage = this.ProcessInfaredFrameData(infraredFrame); emguArgs.InfraredImage = infraredImage; emguArgs.InfraredFrameDimension = new FrameDimension(infraredFrameDescription.Width, infraredFrameDescription.Height); // infraredImage.Dispose(); } infraredFrame.Dispose(); infraredFrame = null; // Check as to whether or not the color image is needed for mainwindow view if (generateColorImage) { ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference; colorFrame = colorFrameReference.AcquireFrame(); if (colorFrame == null) { return; } // color image FrameDescription colorFrameDescription = colorFrame.FrameDescription; // the fastest way to process the color frame data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { // Conversion to needed EMGU image Mat colorImage = this.ProcessColorFrameData(colorFrame); emguArgs.Colorimage = colorImage; emguArgs.ColorFrameDimension = new FrameDimension(colorFrameDescription.Width, colorFrameDescription.Height); } // We're done with the colorFrame colorFrame.Dispose(); colorFrame = null; } } // Call the processing finished event for the conversion to EMGU images OnEmguArgsProcessed(emguArgs); } catch (Exception ex) { // ignore if the frame is no longer available Console.WriteLine("FRAME CHRASHED: " + ex.ToString()); } finally { // generate event at send writeable bitmaps for each frame, and cleanup. // only generate event if the mainwindow is shown. // DepthFrame, ColorFrame are Disposable. if (colorFrame != null) { colorFrame.Dispose(); colorFrame = null; } if (depthFrame != null) { depthFrame.Dispose(); depthFrame = null; } // infraredFrame is Disposable if (infraredFrame != null) { infraredFrame.Dispose(); infraredFrame = null; } if (multiSourceFrame != null) { multiSourceFrame = null; } } }
private void ProcessFrameData(MultiSourceFrameArrivedEventArgs e) { MultiSourceFrameReference frameReference = e.FrameReference; MultiSourceFrame multiSourceFrame = null; DepthFrame depthFrame = null; ColorFrame colorFrame = null; try { multiSourceFrame = frameReference.AcquireFrame(); if (multiSourceFrame != null) { lock (rawDataLock) { ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference; DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference; colorFrame = colorFrameReference.AcquireFrame(); depthFrame = depthFrameReference.AcquireFrame(); if ((depthFrame != null) && (colorFrame != null)) { FrameDescription colorFrameDescription = colorFrame.FrameDescription; int colorWidth = colorFrameDescription.Width; int colorHeight = colorFrameDescription.Height; if ((colorWidth * colorHeight * sizeof(int)) == colorImagePixels.Length) { colorFrame.CopyConvertedFrameDataToArray(colorImagePixels, ColorImageFormat.Bgra); } FrameDescription depthFrameDescription = depthFrame.FrameDescription; int depthWidth = depthFrameDescription.Width; int depthHeight = depthFrameDescription.Height; if ((depthWidth * depthHeight) == depthImagePixels.Length) { depthFrame.CopyFrameDataToArray(depthImagePixels); } } } } } catch (Exception) { // ignore if the frame is no longer available } finally { // MultiSourceFrame, DepthFrame, ColorFrame, BodyIndexFrame are IDispoable if (depthFrame != null) { depthFrame.Dispose(); depthFrame = null; } if (colorFrame != null) { colorFrame.Dispose(); colorFrame = null; } if (multiSourceFrame != null) { multiSourceFrame = null; } } }
private void KinectConnector_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrameReference frameReference = e.FrameReference; try { MultiSourceFrame reference = frameReference.AcquireFrame(); // ColorFrame is IDisposable using (ColorFrame colorFrame = reference.ColorFrameReference.AcquireFrame()) { if (colorFrame != null) { if (m_ColorImgArrayBuffer == null) { m_ColorImgArrayBuffer = new byte[m_ColorFrameDescription.Width * m_ColorFrameDescription.Height * 4]; } if (colorFrame.RawColorImageFormat == ColorImageFormat.Rgba) { colorFrame.CopyRawFrameDataToArray(this.m_ColorImgArrayBuffer); } else { colorFrame.CopyConvertedFrameDataToArray(this.m_ColorImgArrayBuffer, ColorImageFormat.Rgba); } Image <Rgba, byte> t = new Image <Rgba, byte>(GetColorFrameDescription().Width, GetColorFrameDescription().Height); t.Bytes = m_ColorImgArrayBuffer; m_ColorImg = new Image <Bgra, byte>(t.Width, t.Height); CvInvoke.cvCopy(t.Convert <Bgra, byte>().Ptr, m_ColorImg.Ptr, IntPtr.Zero); } } using (DepthFrame depthFrame = reference.DepthFrameReference.AcquireFrame()) { if (depthFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.m_DepthFrameDescription.Width * this.m_DepthFrameDescription.Height) == (depthBuffer.Size / this.m_DepthFrameDescription.BytesPerPixel))) { int size = m_DepthFrameDescription.Width * m_DepthFrameDescription.Height * 2; byte[] managedArray = new byte[size]; Marshal.Copy(depthBuffer.UnderlyingBuffer, managedArray, 0, size); Image <Gray, Int16> t = new Image <Gray, Int16>(GetDepthFrameDescription().Width, GetDepthFrameDescription().Height); t.Bytes = managedArray; t = t.ConvertScale <Int16>(KinectManager.SCALE_FACTOR, 0); m_DepthImg = t; } } } } this.OnAllFramesReady(this, m_ColorImg, m_DepthImg); } catch (Exception ex) { // ignore if the frame is no longer available } }