コード例 #1
0
        private void DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            if (null == e.FrameReference)
            {
                return;
            }

            // If you do not dispose of the frame, you never get another one...
            using (DepthFrame _DepthFrame = e.FrameReference.AcquireFrame()) {
                if (null == _DepthFrame)
                {
                    return;
                }

                BitmapToDisplay.Lock();
                _DepthFrame.CopyFrameDataToIntPtr(
                    BitmapToDisplay.BackBuffer,
                    Convert.ToUInt32(BitmapToDisplay.BackBufferStride * BitmapToDisplay.PixelHeight));
                BitmapToDisplay.AddDirtyRect(
                    new Int32Rect(
                        0,
                        0,
                        _DepthFrame.FrameDescription.Width,
                        _DepthFrame.FrameDescription.Height));
                BitmapToDisplay.Unlock();
            }
        }
コード例 #2
0
        private void Reader_DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            //check if depth frame processing is enabled
            if (!depthRadioButton.IsChecked ?? false)
            {
                return;
            }

            using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) {
                if (depthFrame == null)
                {
                    return;
                }

                FrameDescription depthFrameDescription = depthFrame.FrameDescription;

                using (Mat depthMat = new Mat(depthFrameDescription.Height, depthFrameDescription.Width, DepthType.Cv16U, 1))
                    using (Mat convertedMat = new Mat(depthFrameDescription.Height, depthFrameDescription.Width, DepthType.Cv8U, 1))
                    {
                        depthFrame.CopyFrameDataToIntPtr(depthMat.DataPointer, depthFrameDescription.BytesPerPixel * depthFrameDescription.LengthInPixels);
                        depthMat.ConvertTo(convertedMat, DepthType.Cv8U, 1 / 256d);
                        CvInvoke.Imshow("depth", convertedMat);
                    }
            }
        }
コード例 #3
0
    private void MultiFrameArrived(object sender, MultiSourceFrameArrivedEventArgs args)
    {
        bool             dataReceived     = false;
        MultiSourceFrame multiSourceFrame = args.FrameReference.AcquireFrame();

        using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) {
            if (depthFrame != null)
            {
                var pDepthData = GCHandle.Alloc(pDepthBuffer, GCHandleType.Pinned);
                depthFrame.CopyFrameDataToIntPtr(pDepthData.AddrOfPinnedObject(), (uint)pDepthBuffer.Length * sizeof(ushort));
                pDepthData.Free();
                dataReceived = true;
            }
        }

        using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) {
            if (colorFrame != null)
            {
                var pColorData = GCHandle.Alloc(pColorBuffer, GCHandleType.Pinned);
                colorFrame.CopyConvertedFrameDataToIntPtr(pColorData.AddrOfPinnedObject(), (uint)pColorBuffer.Length, ColorImageFormat.Bgra);
                pColorData.Free();
                dataReceived = true;
            }
        }

        using (BodyIndexFrame bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame()) {
            if (bodyIndexFrame != null)
            {
                var pBodyIndexData = GCHandle.Alloc(pBodyIndexBuffer, GCHandleType.Pinned);
                bodyIndexFrame.CopyFrameDataToIntPtr(pBodyIndexData.AddrOfPinnedObject(), (uint)pBodyIndexBuffer.Length);
                pBodyIndexData.Free();
                dataReceived = true;
            }
        }

        using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) {
            if (bodyFrame != null)
            {
                if (pBodyData == null)
                {
                    pBodyData = new Body[bodyFrame.BodyCount];
                }
                bodyFrame.GetAndRefreshBodyData(pBodyData);
                dataReceived = true;
            }
        }

        if (dataReceived)
        {
//////// FPS - BEGIN
            frameCount++;
//////// FPS - END
            ProcessFrame();
        }
    }
コード例 #4
0
        /// <summary>
        /// Directly accesses the underlying image buffer of the DepthFrame to
        /// create a displayable bitmap.
        /// This function requires the /unsafe compiler option as we make use of direct
        /// access to the native memory pointed to by the depthFrameData pointer.
        /// </summary>
        /// <param name="depthFrameData">Pointer to the DepthFrame image data</param>
        /// <param name="depthFrameDataSize">Size of the DepthFrame image data</param>
        /// <param name="minDepth">The minimum reliable depth value for the frame</param>
        /// <param name="maxDepth">The maximum reliable depth value for the frame</param>
        private unsafe Mat ProcessDepthFrameData(DepthFrame depthFrame)
        {
            // create EMGU and copy the Frame Data into it

            // Generate Mat used for EMGU images
            Mat mat = new Mat(depthFrameDescription.Height, depthFrameDescription.Width, DepthType.Cv16U, 1);

            // Move data to new Mat
            depthFrame.CopyFrameDataToIntPtr(mat.DataPointer, (uint)(depthFrameDescription.Width * depthFrameDescription.Height * 2));



            return(mat);
        }
コード例 #5
0
 private void DepthFrameReader_FrameArrived(DepthFrame depthFrame)
 {
     if (depthFrame != null)
     {
         FrameDescription depthFrameDescription = depthFrame.FrameDescription;
         using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
         {
             using (var dest = ImagePool.GetOrCreate(depthFrameDescription.Width, depthFrameDescription.Height, Imaging.PixelFormat.Gray_16bpp))
             {
                 depthFrame.CopyFrameDataToIntPtr(dest.Resource.ImageData, (uint)(depthFrameDescription.Width * depthFrameDescription.Height * 2));
                 var time = this.pipeline.GetCurrentTimeFromElapsedTicks(depthFrame.RelativeTime.Ticks);
                 this.DepthImage.Post(dest, time);
             }
         }
     }
 }
コード例 #6
0
        void depthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            using (DepthFrame depthFrame = e.FrameReference.AcquireFrame())
            {
                if (depthFrame != null)
                {
                    FrameDescription desc = depthFrame.FrameDescription;

                    KinectBase.DepthFrameEventArgs depthE = new KinectBase.DepthFrameEventArgs();
                    depthE.bytesPerPixel = 2;  //This is fixed to 2 because we are using a ushort to hold the depth image
                    depthE.perPixelExtra = 2;  //We always have an extra two bytes per pixel because we are storing a Gray16 in a bgr32 format
                    depthE.height        = desc.Height;
                    depthE.width         = desc.Width;
                    depthE.kinectID      = kinectID;
                    depthE.timeStamp     = depthFrame.RelativeTime;
                    depthE.reliableMin   = (float)depthFrame.DepthMinReliableDistance / (float)ushort.MaxValue;
                    depthE.reliableMax   = (float)depthFrame.DepthMaxReliableDistance / (float)ushort.MaxValue;

                    //Get all the data for the depth, and store the bytes for the Gray16 in the blue and green channels of a bgr32
                    IntPtr depthImagePtr = Marshal.AllocHGlobal((int)(depthE.bytesPerPixel * desc.LengthInPixels));
                    depthFrame.CopyFrameDataToIntPtr(depthImagePtr, (uint)depthE.bytesPerPixel * desc.LengthInPixels);
                    //depthE.image = new byte[desc.LengthInPixels * (depthE.perPixelExtra + depthE.bytesPerPixel)];
                    depthE.image = depthImagePool.GetObject();
                    unsafe
                    {
                        fixed(byte *pDst = depthE.image)
                        {
                            ushort *pD = (ushort *)pDst;
                            ushort *pS = (ushort *)depthImagePtr.ToPointer();

                            for (int n = 0; n < desc.LengthInPixels; n++)
                            {
                                *pD = *pS;
                                pD += 2;
                                pS++;
                            }
                        }
                    }
                    Marshal.FreeHGlobal(depthImagePtr);

                    OnDepthFrameReceived(depthE);
                }
            }
        }
コード例 #7
0
        private void DepthFrameReady(object sender, DepthFrameArrivedEventArgs e)
        {
            DepthFrame frame = e.FrameReference.AcquireFrame();

            if (frame != null)
            {
                using (frame)
                {
                    lock (m_lock)
                    {
                        frame.CopyFrameDataToIntPtr(depthData, 512 * 424 * 2);
                        this.runtime.Runtime.CoordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr(depthData, 512 * 424 * 2, colpoints, 1920 * 1080 * 8);
                    }

                    this.FInvalidate = true;
                    this.frameindex  = frame.RelativeTime.Ticks;
                }
            }
        }
コード例 #8
0
        private void DepthFrameReady(object sender, DepthFrameArrivedEventArgs e)
        {
            DepthFrame frame = e.FrameReference.AcquireFrame();

            if (frame != null)
            {
                using (frame)
                {
                    lock (m_lock)
                    {
                        frame.CopyFrameDataToIntPtr(depthData, 512 * 424 * 2);
                        this.runtime.Runtime.CoordinateMapper.MapDepthFrameToColorSpaceUsingIntPtr(depthData, 512 * 424 * 2, colpoints, 512 * 424 * 8);

                        if (!this.FRawData[0])
                        {
                            float *col  = (float *)this.colpoints;
                            float *conv = (float *)this.convertedColPoints;
                            if (FRelativeLookup[0])
                            {
                                for (int i = 0; i < 512 * 424; i++)
                                {
                                    conv[i * 2]     = (float)VMath.Map(col[i * 2] - i % 1920, 0, 1920, 0, 1, TMapMode.Float);
                                    conv[i * 2 + 1] = (float)VMath.Map(col[i * 2 + 1] - VMath.Abs(i / 1920), 0, 1080, 0, 1, TMapMode.Float);
                                }
                            }
                            else
                            {
                                for (int i = 0; i < 512 * 424; i++)
                                {
                                    conv[i * 2]     = (float)VMath.Map(col[i * 2], 0, 1920, 0, 1, TMapMode.Clamp);
                                    conv[i * 2 + 1] = (float)VMath.Map(col[i * 2 + 1], 0, 1080, 0, 1, TMapMode.Clamp);
                                }
                            }
                        }
                    }

                    this.FInvalidate = true;
                }
            }
        }
コード例 #9
0
    void Update()
    {
        if (m_pMultiSourceFrameReader == null)
        {
            return;
        }

        MultiSourceFrame pMultiSourceFrame = m_pMultiSourceFrameReader.AcquireLatestFrame();

        if (pMultiSourceFrame != null)
        {
            frameCount++;
            using (DepthFrame pDepthFrame = pMultiSourceFrame.DepthFrameReference.AcquireFrame())
            {
                using (ColorFrame pColorFrame = pMultiSourceFrame.ColorFrameReference.AcquireFrame())
                {
                    // Get Depth Frame Data
                    if (pDepthFrame != null)
                    {
                        GCHandle pDepthData = GCHandle.Alloc(pDepthBuffer, GCHandleType.Pinned);
                        pDepthFrame.CopyFrameDataToIntPtr(pDepthData.AddrOfPinnedObject(), (uint)pDepthBuffer.Length * sizeof(ushort));
                        pDepthData.Free();
                        pDepthFrame.Dispose();
                    }

                    // Get Color Frame Data
                    if (pColorFrame != null)
                    {
                        GCHandle pColorData = GCHandle.Alloc(pColorBuffer, GCHandleType.Pinned);
                        pColorFrame.CopyConvertedFrameDataToIntPtr(pColorData.AddrOfPinnedObject(), (uint)pColorBuffer.Length, ColorImageFormat.Rgba);
                        pColorData.Free();
                        pColorFrame.Dispose();
                    }
                    ProcessFrame();
                }
            }
        }
    }
コード例 #10
0
        private unsafe void FrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            _Stopwatch.Restart();
            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

            if (multiSourceFrame != null)
            {
                ColorFrame     colorFrame     = null;
                DepthFrame     depthFrame     = null;
                BodyFrame      bodyFrame      = null;
                BodyIndexFrame bodyIndexFrame = null;

                try
                {
                    bool allRequiredDataReceived = true;

                    if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Color))
                    {
                        colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame();
                        if (colorFrame != null)
                        {
                            fixed(byte *colorBytesPointer = ColorPixels)
                            {
                                IntPtr colorPtr = (IntPtr)colorBytesPointer;
                                uint   size     = (uint)(_ColorFrameDescription.Width * _ColorFrameDescription.Height * _ColorFrameDescription.BytesPerPixel);

                                if (colorFrame.RawColorImageFormat == ImageFormat)
                                {
                                    colorFrame.CopyRawFrameDataToIntPtr(colorPtr, size);
                                }
                                else
                                {
                                    colorFrame.CopyConvertedFrameDataToIntPtr(colorPtr, size, ImageFormat);
                                }
                            }
                        }
                        else
                        {
                            allRequiredDataReceived = false;
                        }
                    }


                    if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Depth) && allRequiredDataReceived)
                    {
                        depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame();
                        if (depthFrame != null)
                        {
                            fixed(ushort *depthBytesPointer = DepthPixels)
                            {
                                IntPtr depthPtr = (IntPtr)depthBytesPointer;

                                depthFrame.CopyFrameDataToIntPtr(depthPtr, (uint)(_DepthFrameDescription.Width * _DepthFrameDescription.Height * _DepthFrameDescription.BytesPerPixel));
                            }
                        }
                        else
                        {
                            allRequiredDataReceived = false;
                        }
                    }


                    if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Body) && allRequiredDataReceived)
                    {
                        bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame();
                        if (bodyFrame != null)
                        {
                            bodyFrame.GetAndRefreshBodyData(Bodies);
                        }
                        else
                        {
                            allRequiredDataReceived = false;
                        }
                    }


                    if (_FrameSourceTypes.HasFlag(FrameSourceTypes.BodyIndex) && allRequiredDataReceived)
                    {
                        bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame();
                        if (bodyIndexFrame != null)
                        {
                            fixed(byte *bodyIndexBytesPointer = BodyIndexPixels)
                            {
                                IntPtr bodyIndexPtr = (IntPtr)bodyIndexBytesPointer;

                                bodyIndexFrame.CopyFrameDataToIntPtr(bodyIndexPtr, (uint)(_BodyIndexFrameDescription.Width * _BodyIndexFrameDescription.Height * _BodyIndexFrameDescription.BytesPerPixel));
                            }
                        }
                        else
                        {
                            allRequiredDataReceived = false;
                        }
                    }

                    if (allRequiredDataReceived)
                    {
                        _KinectFrameArrivedEventArgs.ColorPixels     = ColorPixels;
                        _KinectFrameArrivedEventArgs.DepthPixels     = DepthPixels;
                        _KinectFrameArrivedEventArgs.Bodies          = Bodies;
                        _KinectFrameArrivedEventArgs.BodyIndexPixels = BodyIndexPixels;
                        _KinectFrameArrivedEventArgs.KinectSensor    = multiSourceFrame.KinectSensor;
                        _KinectFrameArrivedEventArgs.FrameNumber     = _FrameNumber;

                        EventHandler <KinectFrameArrivedEventArgs> handler = FrameArrived;
                        if (handler != null)
                        {
                            handler(this, _KinectFrameArrivedEventArgs);
                        }
                    }
                }
                finally
                {
                    if (colorFrame != null)
                    {
                        colorFrame.Dispose();
                    }

                    if (depthFrame != null)
                    {
                        depthFrame.Dispose();
                    }

                    if (bodyFrame != null)
                    {
                        bodyFrame.Dispose();
                    }

                    if (bodyIndexFrame != null)
                    {
                        bodyIndexFrame.Dispose();
                    }
                }
            }

            _Stopwatch.Stop();
            RaiseKinectFrameComplete(_Stopwatch.Elapsed);
            _FrameNumber++;
        }
コード例 #11
0
        /// <summary>
        /// Handles the depth/color/body index frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void OnMultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            DepthFrame depthFrame = null;
            ColorFrame colorFrame = null;
            BodyFrame  bodyFrame  = null;

            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }

            // We use a try/finally to ensure that we clean up before we exit the function.
            // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer.
            try
            {
                depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame();
                colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame();
                bodyFrame  = multiSourceFrame.BodyFrameReference.AcquireFrame();

                // If any frame has expired by the time we process this event, return.
                // The "finally" statement will Dispose any that are not null.
                if (depthFrame == null || colorFrame == null || bodyFrame == null)
                {
                    return;
                }

                // Copy color data (using Bgra format)
                colorFrame.CopyConvertedFrameDataToIntPtr(colorPixels, COLOR_PIXEL_BYTES, ColorImageFormat.Bgra);

                if (ColorDataCheckBox.Checked)
                {
                    Marshal.Copy(colorPixels, colorFrameData, 8, (int)COLOR_PIXEL_BYTES);
                    colorFramePublisher.Send(new ZFrame(colorFrameData));
                }

                if (BodyDataCheckBox.Checked)
                {
                    // Copy data for Body tracking
                    bodyArray = new Body[bodyFrame.BodyCount];
                    bodyFrame.GetAndRefreshBodyData(bodyArray);

                    // Remove old bodies
                    bodyFrameData.Clear();

                    //At this point, we are just reserving 4 bytes for storing 'bodyCount' and we are going to modify it later
                    AddArrayToList(ref bodyFrameData, new byte[4] {
                        0, 0, 0, 0
                    });

                    int bodyCount = 0;
                    foreach (Body body in bodyArray)
                    {
                        if (!body.IsTracked)
                        {
                            continue;
                        }

                        AddArrayToList(ref bodyFrameData, BitConverter.GetBytes(body.TrackingId));   //add 8 bytes for ulong TrackingId
                        AddArrayToList(ref bodyFrameData, BitConverter.GetBytes(ALL_JOINTS.Length)); //add 4 bytes for int TrackingId

                        foreach (JointType jointType in ALL_JOINTS)
                        {
                            var joint = body.Joints[jointType];
                            AddArrayToList(ref bodyFrameData, BitConverter.GetBytes((int)joint.TrackingState)); //add 4 bytes for int TrackingState
                            AddArrayToList(ref bodyFrameData, BitConverter.GetBytes((int)joint.JointType));     //add 4 bytes for int JointType
                            AddArrayToList(ref bodyFrameData, BitConverter.GetBytes(joint.Position.X));         //add 4 bytes for float X
                            AddArrayToList(ref bodyFrameData, BitConverter.GetBytes(joint.Position.Y));         //add 4 bytes for float Y
                            AddArrayToList(ref bodyFrameData, BitConverter.GetBytes(joint.Position.Z));         //add 4 bytes for float Z
                        }
                        bodyCount++;
                    }

                    var bodyCountBytes = BitConverter.GetBytes(bodyCount);//4 bytes
                    UpdateList(bodyCountBytes, ref bodyFrameData);

                    bodyFramePublisher.Send(new ZFrame(bodyFrameData.ToArray()));
                }

                if (PointCloudDataCheckBox.Checked)
                {
                    depthFrame.CopyFrameDataToIntPtr(depthFrameData, DEPTH_FRAME_BYTES);
                    coordinateMapper.MapDepthFrameToCameraSpaceUsingIntPtr(depthFrameData, DEPTH_FRAME_BYTES, camerSpacePoints, CAMERA_SPACE_BYTES);
                    coordinateMapper.MapDepthFrameToColorSpaceUsingIntPtr(depthFrameData, DEPTH_FRAME_BYTES, colorSpacePoints, COLOR_SPACE_BYTES);

                    // Remove old points
                    ClearPointCloud();

                    //At this point, we are just reserving 4 bytes for storing 'pointCloudSize' and we are going to modify it later
                    AddPointsToCloud(new byte[4] {
                        0, 0, 0, 0
                    });

                    ComposePointCloud();

                    GetNonEmptyPointCloud(out byte[] pointCloud);
                    pointCloudPublisher.Send(new ZFrame(pointCloud));
                }
            }
            finally
            {
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                }

                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                }

                if (bodyFrame != null)
                {
                    bodyFrame.Dispose();
                }
            }
        }