Beispiel #1
0
        private void HandleBodyIndexFrame(BodyIndexFrameReference reference)
        {
            if (Task.StandBy)
            {
                BodyIndexWatch.Reset(); return;
            }

            BodyIndexWatch.Again();
            using (var frame = reference.AcquireFrame()) {
                if (frame == null)
                {
                    return;
                }

                frame.CopyFrameDataToArray(BodyIndex.Pixels);

                /*
                 * using (Microsoft.Kinect.KinectBuffer buffer = indexFrame.LockImageBuffer()) {
                 * IntPtr ptr = buffer.UnderlyingBuffer;
                 * RefreshBodyArea(ptr, buffer.Size);
                 * }
                 */
                BodyIndex.Stamp.Time = System.DateTime.Now;
            }
            BodyIndexWatch.Stop();
        }
        //Processes the Frame data from the Kinect camera.
        //Since events are called synchronously, this would bottleneck and cause an issue with framerate
        //By threading, we process the info on seperate threads, allowing execution to coninue with the rest of the game
        private void ProcessRGBVideo(ColorFrameReference aReference, BodyIndexFrameReference bifRef, DepthFrameReference depthRef)
        {
            using (ColorFrame colorImageFrame = aReference.AcquireFrame())
            {
                if (colorImageFrame != null)
                {
                    using (BodyIndexFrame bodyIndexFrame = bifRef.AcquireFrame())
                    {
                        if (bodyIndexFrame != null)
                        {
                            using (DepthFrame depthFrame = depthRef.AcquireFrame())
                            {
                                if (depthFrame != null)
                                {
                                    int depthHeight = depthFrame.FrameDescription.Height;
                                    int depthWidth = depthFrame.FrameDescription.Width;

                                    int colorHeight = colorImageFrame.FrameDescription.Height;
                                    int colorWidth = colorImageFrame.FrameDescription.Width;

                                    ushort[] _depthData = new ushort[depthFrame.FrameDescription.Width * depthFrame.FrameDescription.Height];
                                    byte[] _bodyData = new byte[bodyIndexFrame.FrameDescription.Width * bodyIndexFrame.FrameDescription.Height];
                                    byte[] _colorData = new byte[colorImageFrame.FrameDescription.Width * colorImageFrame.FrameDescription.Height * 4];
                                    ColorSpacePoint[] _colorPoints = new ColorSpacePoint[depthWidth * depthHeight];

                                    depthFrame.CopyFrameDataToArray(_depthData);
                                    bodyIndexFrame.CopyFrameDataToArray(_bodyData);
                                    colorImageFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Rgba);

                                    iSensor.CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                                    Color[] color = new Color[depthWidth * depthHeight];
                                    Color c;

                                    for (int y = 0; y < depthHeight; ++y)
                                    {
                                        for (int x = 0; x < depthWidth; ++x)
                                        {
                                            int depthIndex = (y * depthHeight) + x;

                                            byte player = _bodyData[depthIndex];

                                            // Check whether this pixel belong to a human!!!
                                            if (player != 0xff)
                                            {
                                                ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                                                int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                                                int colorY = (int)Math.Floor(colorPoint.Y + 0.5);
                                                int colorIndex = ((colorY * colorWidth) + colorX);

                                                if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                                                {

                                                    int displayIndex = colorIndex * 4;

                                                    c = new Color(_colorData[displayIndex + 0], _colorData[displayIndex + 1], _colorData[displayIndex + 2], 0xff);
                                                    color[depthIndex] = c;
                                                }
                                            }
                                        }
                                    }

                                    if (iGraphicsDevice.IsDisposed) return;
                                    var video = new Texture2D(iGraphicsDevice, depthWidth, depthHeight);

                                    video.SetData(color);

                                    lock (iVideoLock)
                                    {
                                        iRGBVideo = video;
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
    private void HandleBodyIndexFrame(BodyIndexFrameReference reference) {
      if (Task.StandBy) { BodyIndexWatch.Reset(); return; }

      BodyIndexWatch.Again();
      using (var frame = reference.AcquireFrame()) {
        if (frame == null) return;

        frame.CopyFrameDataToArray(BodyIndex.Pixels);
        /*
        using (Microsoft.Kinect.KinectBuffer buffer = indexFrame.LockImageBuffer()) {
          IntPtr ptr = buffer.UnderlyingBuffer;
          RefreshBodyArea(ptr, buffer.Size);
        }
        */
        BodyIndex.Stamp.Time = System.DateTime.Now;
      }
      BodyIndexWatch.Stop();
    }
Beispiel #4
0
        /// <summary>
        /// Device-specific implementation of Update.
        /// Updates data buffers of all active channels with data of current frame.
        /// </summary>
        /// <remarks>This method is implicitely called by <see cref="Camera.Update"/> inside a camera lock.</remarks>
        /// <seealso cref="Camera.Update"/>
        protected override void UpdateImpl()
        {
            // TODO: This method could yield rather asynchronous channels. If necessary: Try to find a mechanism that updates frames that are already fetched when waiting for others that are not yet available.
            MultiSourceFrame multiSourceFrame       = null;
            bool             bodyIndexRequired      = IsChannelActive(CustomChannelNames.BodyIndex);
            bool             depthRequired          = IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage);
            bool             amplitudeRequired      = IsChannelActive(ChannelNames.Amplitude);
            bool             colorRequired          = IsChannelActive(ChannelNames.Color);
            bool             longExposureIRRequired = IsChannelActive(CustomChannelNames.LongExposureIR);

            do
            {
                if (!dataAvailable.WaitOne(UpdateTimeoutMilliseconds))
                {
                    throw ExceptionBuilder.BuildFromID(typeof(MetriCam2Exception), this, 005);
                }

                lock (newFrameLock)
                {
                    try
                    {
                        if (multiFrameReference != null)
                        {
                            multiSourceFrame = multiFrameReference.AcquireFrame();
                        }
                    }
                    catch (Exception)
                    {
                        // ignore if the frame is no longer available
                        continue;// throw
                    }
                }

                try
                {
                    // fetch depth?
                    if (depthRequired)
                    {
                        DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                        if (depthFrameReference != null)
                        {
                            // always synchornize on depth frames if possible.
                            if (lastTimeStamp == GetAbsoluteTimeStamp(depthFrameReference.RelativeTime.Ticks))
                            {
                                continue;
                            }
                            using (DepthFrame depthFrame = depthFrameReference.AcquireFrame())
                            {
                                if (depthFrame == null)
                                {
                                    continue;
                                }

                                depthFrameDescription = depthFrame.FrameDescription;
                                int depthWidth  = depthFrameDescription.Width;
                                int depthHeight = depthFrameDescription.Height;
                                if ((depthWidth * depthHeight) == this.depthFrameData.Length)
                                {
                                    lock (this.depthFrameData)
                                    {
                                        depthFrame.CopyFrameDataToArray(this.depthFrameData);
                                        lastTimeStamp  = GetAbsoluteTimeStamp(depthFrameReference.RelativeTime.Ticks);
                                        timestampDepth = lastTimeStamp;
                                    }
                                    depthRequired = false;
                                }
                            }
                        }
                    }

                    // fetch IR?
                    if (amplitudeRequired)
                    {
                        InfraredFrameReference irFrameReference = multiSourceFrame.InfraredFrameReference;
                        if (irFrameReference != null)
                        {
                            // If depth data is inactive, synchronize on IR frames. If depth and IR are inactive, we synchronize on color frames.
                            if (!(IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage)) && lastTimeStamp == GetAbsoluteTimeStamp(irFrameReference.RelativeTime.Ticks))
                            {
                                continue;
                            }

                            using (InfraredFrame irFrame = irFrameReference.AcquireFrame())
                            {
                                if (irFrame == null)
                                {
                                    continue;
                                }

                                FrameDescription irFrameDescription = irFrame.FrameDescription;
                                int irWidth  = irFrameDescription.Width;
                                int irHeight = irFrameDescription.Height;
                                if ((irWidth * irHeight) == this.irFrameData.Length)
                                {
                                    lock (this.irFrameData)
                                    {
                                        irFrame.CopyFrameDataToArray(this.irFrameData);
                                        lastTimeStamp = GetAbsoluteTimeStamp(irFrameReference.RelativeTime.Ticks);
                                        timestampIR   = lastTimeStamp;
                                    }
                                    amplitudeRequired = false;
                                }
                            }
                        }
                    }

                    // (always) fetch body frame
                    BodyFrameReference bodyFrameReference = multiSourceFrame.BodyFrameReference;
                    if (bodyFrameReference != null)
                    {
                        using (BodyFrame bodyFrame = bodyFrameReference.AcquireFrame())
                        {
                            if (bodyFrame != null)
                            {
                                this.bodies = new Body[bodyFrame.BodyCount];
                                using (bodyFrame)
                                {
                                    bodyFrame.GetAndRefreshBodyData(this.bodies);
                                }
                            }
                            else
                            {
                                // TODO: check if channel is activated.
                            }
                        }
                    }

                    // fetch color?
                    if (colorRequired)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        if (colorFrameReference == null)
                        {
                            continue;
                        }
                        // If depth and IR data is inactive, synchronize on color frames. If color, depth and IR are inactive, we don't care for synchronization.
                        if (!(IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage) || IsChannelActive(ChannelNames.Amplitude)) && lastTimeStamp == GetAbsoluteTimeStamp(colorFrameReference.RelativeTime.Ticks))
                        {
                            continue;
                        }

                        using (ColorFrame colorFrame = colorFrameReference.AcquireFrame())
                        {
                            //FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                            //int cWidth = colorFrameDescription.Width;
                            //int cHeight = colorFrameDescription.Width;
                            if (colorFrame != null)
                            {
                                using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                                {
                                    lock (this.colorFrameData)
                                    {
                                        colorFrame.CopyConvertedFrameDataToArray(this.colorFrameData, ColorImageFormat.Bgra);
                                        lastTimeStamp  = GetAbsoluteTimeStamp(colorFrameReference.RelativeTime.Ticks);
                                        timestampColor = lastTimeStamp;
                                    }
                                }
                                colorRequired = false;
                            }
                        }
                    }

                    // fetch long exposure IR? (this is independent of the IR images and are acquired at the same rate, so every new frame also
                    // has one of these.)
                    if (longExposureIRRequired)
                    {
                        LongExposureInfraredFrameReference longExposureIRFrameRef = multiSourceFrame.LongExposureInfraredFrameReference;
                        using (LongExposureInfraredFrame longIRFrame = longExposureIRFrameRef.AcquireFrame())
                        {
                            if (longIRFrame == null)
                            {
                                continue;
                            }

                            int longIRWidth  = longIRFrame.FrameDescription.Width;
                            int longIRHeight = longIRFrame.FrameDescription.Height;
                            if (longExposureIRData == null || (longIRWidth * longIRHeight) != longExposureIRData.Length)
                            {
                                longExposureIRData = new ushort[longIRWidth * longIRHeight];
                            }
                            longIRFrame.CopyFrameDataToArray(longExposureIRData);
                            longExposureIRRequired = false;
                        }
                    }

                    // fetch body index frames?
                    if (bodyIndexRequired)
                    {
                        BodyIndexFrameReference bodyIndexFrameRef = multiSourceFrame.BodyIndexFrameReference;
                        using (BodyIndexFrame bodyIndexFrame = bodyIndexFrameRef.AcquireFrame())
                        {
                            if (bodyIndexFrame == null)
                            {
                                log.Debug("bodyIndexFrame is NULL.");
                                continue;
                            }

                            int bodyIndexWidth  = bodyIndexFrame.FrameDescription.Width;
                            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;
                            if (bodyIndexData == null || (bodyIndexWidth * bodyIndexHeight) != bodyIndexData.Length)
                            {
                                bodyIndexData = new byte[bodyIndexWidth * bodyIndexHeight];
                            }
                            bodyIndexFrame.CopyFrameDataToArray(bodyIndexData);
                            bodyIndexRequired = false;
                        }
                    }
                }
                catch (Exception)
                {
                    // ignore if the frame is no longer available
                }
                finally
                {
                    multiSourceFrame = null;
                }
            } while (depthRequired || colorRequired || bodyIndexRequired || longExposureIRRequired || amplitudeRequired);
        }