void useLIRFrame(LongExposureInfraredFrameReference leIRFrameReference) { try { LongExposureInfraredFrame leIRFrame = leIRFrameReference.AcquireFrame(); if (leIRFrame != null) { using (leIRFrame) { leIRFrame.CopyFrameDataToArray(this.irLEImagePixelData); this.updateBitmap(leIRFrame.FrameDescription.Width, leIRFrame.FrameDescription.Height, this.irLEImagePixelData, false); this.pictureBox1.Image = new Bitmap(this.colorImageBitmap, this.pictureBox1.Width, this.pictureBox1.Height); } } } catch (Exception er) { string message = er.Message; Console.WriteLine(message); // Don't worry about empty frames. } }
internal static void CopyToFrameToPixelArray(this LongExposureInfraredFrame longExposureInfraredFrame, ref ushort[] frameData, ref byte[] pixels) { var pixelIndex = 0; longExposureInfraredFrame.CopyFrameDataToArray(frameData); foreach (var intensity in frameData.Select(framePixel => (byte)(framePixel >> 8))) { pixels[pixelIndex++] = intensity; pixels[pixelIndex++] = intensity; pixels[pixelIndex++] = intensity; ++pixelIndex; } }
/// <summary> /// Device-specific implementation of Update. /// Updates data buffers of all active channels with data of current frame. /// </summary> /// <remarks>This method is implicitely called by <see cref="Camera.Update"/> inside a camera lock.</remarks> /// <seealso cref="Camera.Update"/> protected override void UpdateImpl() { // TODO: This method could yield rather asynchronous channels. If necessary: Try to find a mechanism that updates frames that are already fetched when waiting for others that are not yet available. MultiSourceFrame multiSourceFrame = null; bool bodyIndexRequired = IsChannelActive(CustomChannelNames.BodyIndex); bool depthRequired = IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage); bool amplitudeRequired = IsChannelActive(ChannelNames.Amplitude); bool colorRequired = IsChannelActive(ChannelNames.Color); bool longExposureIRRequired = IsChannelActive(CustomChannelNames.LongExposureIR); do { if (!dataAvailable.WaitOne(UpdateTimeoutMilliseconds)) { throw ExceptionBuilder.BuildFromID(typeof(MetriCam2Exception), this, 005); } lock (newFrameLock) { try { if (multiFrameReference != null) { multiSourceFrame = multiFrameReference.AcquireFrame(); } } catch (Exception) { // ignore if the frame is no longer available continue;// throw } } try { // fetch depth? if (depthRequired) { DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference; if (depthFrameReference != null) { // always synchornize on depth frames if possible. if (lastTimeStamp == GetAbsoluteTimeStamp(depthFrameReference.RelativeTime.Ticks)) { continue; } using (DepthFrame depthFrame = depthFrameReference.AcquireFrame()) { if (depthFrame == null) { continue; } depthFrameDescription = depthFrame.FrameDescription; int depthWidth = depthFrameDescription.Width; int depthHeight = depthFrameDescription.Height; if ((depthWidth * depthHeight) == this.depthFrameData.Length) { lock (this.depthFrameData) { depthFrame.CopyFrameDataToArray(this.depthFrameData); lastTimeStamp = GetAbsoluteTimeStamp(depthFrameReference.RelativeTime.Ticks); timestampDepth = lastTimeStamp; } depthRequired = false; } } } } // fetch IR? if (amplitudeRequired) { InfraredFrameReference irFrameReference = multiSourceFrame.InfraredFrameReference; if (irFrameReference != null) { // If depth data is inactive, synchronize on IR frames. If depth and IR are inactive, we synchronize on color frames. if (!(IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage)) && lastTimeStamp == GetAbsoluteTimeStamp(irFrameReference.RelativeTime.Ticks)) { continue; } using (InfraredFrame irFrame = irFrameReference.AcquireFrame()) { if (irFrame == null) { continue; } FrameDescription irFrameDescription = irFrame.FrameDescription; int irWidth = irFrameDescription.Width; int irHeight = irFrameDescription.Height; if ((irWidth * irHeight) == this.irFrameData.Length) { lock (this.irFrameData) { irFrame.CopyFrameDataToArray(this.irFrameData); lastTimeStamp = GetAbsoluteTimeStamp(irFrameReference.RelativeTime.Ticks); timestampIR = lastTimeStamp; } amplitudeRequired = false; } } } } // (always) fetch body frame BodyFrameReference bodyFrameReference = multiSourceFrame.BodyFrameReference; if (bodyFrameReference != null) { using (BodyFrame bodyFrame = bodyFrameReference.AcquireFrame()) { if (bodyFrame != null) { this.bodies = new Body[bodyFrame.BodyCount]; using (bodyFrame) { bodyFrame.GetAndRefreshBodyData(this.bodies); } } else { // TODO: check if channel is activated. } } } // fetch color? if (colorRequired) { ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference; if (colorFrameReference == null) { continue; } // If depth and IR data is inactive, synchronize on color frames. If color, depth and IR are inactive, we don't care for synchronization. if (!(IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage) || IsChannelActive(ChannelNames.Amplitude)) && lastTimeStamp == GetAbsoluteTimeStamp(colorFrameReference.RelativeTime.Ticks)) { continue; } using (ColorFrame colorFrame = colorFrameReference.AcquireFrame()) { //FrameDescription colorFrameDescription = colorFrame.FrameDescription; //int cWidth = colorFrameDescription.Width; //int cHeight = colorFrameDescription.Width; if (colorFrame != null) { using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { lock (this.colorFrameData) { colorFrame.CopyConvertedFrameDataToArray(this.colorFrameData, ColorImageFormat.Bgra); lastTimeStamp = GetAbsoluteTimeStamp(colorFrameReference.RelativeTime.Ticks); timestampColor = lastTimeStamp; } } colorRequired = false; } } } // fetch long exposure IR? (this is independent of the IR images and are acquired at the same rate, so every new frame also // has one of these.) if (longExposureIRRequired) { LongExposureInfraredFrameReference longExposureIRFrameRef = multiSourceFrame.LongExposureInfraredFrameReference; using (LongExposureInfraredFrame longIRFrame = longExposureIRFrameRef.AcquireFrame()) { if (longIRFrame == null) { continue; } int longIRWidth = longIRFrame.FrameDescription.Width; int longIRHeight = longIRFrame.FrameDescription.Height; if (longExposureIRData == null || (longIRWidth * longIRHeight) != longExposureIRData.Length) { longExposureIRData = new ushort[longIRWidth * longIRHeight]; } longIRFrame.CopyFrameDataToArray(longExposureIRData); longExposureIRRequired = false; } } // fetch body index frames? if (bodyIndexRequired) { BodyIndexFrameReference bodyIndexFrameRef = multiSourceFrame.BodyIndexFrameReference; using (BodyIndexFrame bodyIndexFrame = bodyIndexFrameRef.AcquireFrame()) { if (bodyIndexFrame == null) { log.Debug("bodyIndexFrame is NULL."); continue; } int bodyIndexWidth = bodyIndexFrame.FrameDescription.Width; int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height; if (bodyIndexData == null || (bodyIndexWidth * bodyIndexHeight) != bodyIndexData.Length) { bodyIndexData = new byte[bodyIndexWidth * bodyIndexHeight]; } bodyIndexFrame.CopyFrameDataToArray(bodyIndexData); bodyIndexRequired = false; } } } catch (Exception) { // ignore if the frame is no longer available } finally { multiSourceFrame = null; } } while (depthRequired || colorRequired || bodyIndexRequired || longExposureIRRequired || amplitudeRequired); }