/// <summary>
        /// Checks the gesture.
        /// </summary>
        /// <param name="skeleton">The skeleton.</param>
        /// <returns>GesturePartResult based on if the gesture part has been completed</returns>
        public GesturePartResult CheckGesture(Skeleton skeleton, AllFramesReadyEventArgs e, GestureStatus gestureStatus)
        {
            // Right in front of Shoulders
            if (skeleton.Joints[JointType.HandRight].Position.Z > skeleton.Joints[JointType.ShoulderRight].Position.Z - 0.15)
            {
                return GesturePartResult.Pausing;
            }

            DepthImageFrame depthFrame = e.OpenDepthImageFrame();
            byte[] handBytes = null;

            if (depthFrame == null)
            {
                return GesturePartResult.Fail;
            }

            try
            {
                if (skeleton.Joints[JointType.HandRight].Position.X == 0 || skeleton.Joints[JointType.HandRight].Position.Y == 0)
                {
                    return GesturePartResult.Fail;
                }

                DepthImagePoint rightHandPoint = depthFrame.MapFromSkeletonPoint(skeleton.Joints[JointType.HandRight].Position);

                int intRightX = (int)rightHandPoint.X - (int)50;
                int intRightY = (int)rightHandPoint.Y - (int)60;

                Int32Rect rectRightHand = new Int32Rect(
                              (intRightX < 0) ? 0 : (intRightX + 100 >= depthFrame.Width) ? depthFrame.Width - 100 : intRightX,
                              (intRightY < 0) ? 0 : (intRightY + 100 >= depthFrame.Height) ? depthFrame.Height - 100 : intRightY,
                               100,
                               100
                             );

                handBytes = GenerateColoredBytes(depthFrame, rectRightHand, rightHandPoint.Depth);

                // Judge if making a fist and saving the starting / ending point
                // if 'no'->'yes' record the starting point
                // if 'yes'->'no' record the ending point

                // Initialize unmanged memory to hold the array.
                try
                {
                    // Create image and show it
                    MIplImage source = new MIplImage();
                    IntPtr sourcePtr = CvInvoke.cvCreateImage(new System.Drawing.Size(rectRightHand.Width, rectRightHand.Height),
                        Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 3);
                    source = (MIplImage)Marshal.PtrToStructure(sourcePtr, typeof(MIplImage));

                    // Copy the array to unmanaged memory.
                    Marshal.Copy(handBytes, 0, source.imageData, rectRightHand.Width * rectRightHand.Height * 3);
                    CvInvoke.cvShowImage("Source", sourcePtr);
                }
                finally
                {
                    // Free the unmanaged memory.
                }

                if (isMakingAFist(handBytes, rectRightHand))
                {
                    if (!gestureStatus.isMakingAFast)
                    {
                        gestureStatus.startPoint.X = skeleton.Joints[JointType.HandRight].Position.X;
                        gestureStatus.startPoint.Y = skeleton.Joints[JointType.HandRight].Position.Y;
                        gestureStatus.startPoint.Z = skeleton.Joints[JointType.HandRight].Position.Z;
                    }
                    gestureStatus.isMakingAFast = true;
                }
                else
                {
                    if (gestureStatus.isMakingAFast)
                    {
                        gestureStatus.startPoint.X = skeleton.Joints[JointType.HandRight].Position.X;
                        gestureStatus.startPoint.Y = skeleton.Joints[JointType.HandRight].Position.Y;
                        gestureStatus.startPoint.Z = skeleton.Joints[JointType.HandRight].Position.Z;
                    }
                    gestureStatus.isMakingAFast = false;
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show("AfterConvert : " + ex.Message);
                depthFrame.Dispose();
                return GesturePartResult.Fail;
            }
            depthFrame.Dispose();
            //TODO
            return GesturePartResult.Suceed;
        }
Example #2
-1
        /// <summary>
        /// http://www.cnblogs.com/xrwang/archive/2010/01/26/TheInteractionOfOpenCv-EmguCvANDDotNet.html
        /// </summary>
        /// <param name="image"></param>
        /// <returns></returns>
        public static Bitmap MIplImagePointerToBitmap(MIplImage image)
        {
            PixelFormat pixelFormat; // pixel format
            const string unsupportedDepth = "Unsupported pixel bit depth IPL_DEPTH";
            const string unsupportedChannels = "The number of channels is not supported (only 1,2,4 channels)";
            switch (image.nChannels)
            {
                case 1:
                    switch (image.depth)
                    {
                        case IPL_DEPTH.IPL_DEPTH_8U:
                            pixelFormat = PixelFormat.Format8bppIndexed;
                            break;
                        case IPL_DEPTH.IPL_DEPTH_16U:
                            pixelFormat = PixelFormat.Format16bppGrayScale;
                            break;
                        default:
                            throw new NotImplementedException(unsupportedDepth);
                    }
                    break;
                case 3:
                    switch (image.depth)
                    {
                        case IPL_DEPTH.IPL_DEPTH_8U:
                            pixelFormat = PixelFormat.Format24bppRgb;
                            break;
                        case IPL_DEPTH.IPL_DEPTH_16U:
                            pixelFormat = PixelFormat.Format48bppRgb;
                            break;
                        default:
                            throw new NotImplementedException(unsupportedDepth);
                    }
                    break;
                case 4:
                    switch (image.depth)
                    {
                        case IPL_DEPTH.IPL_DEPTH_8U:
                            pixelFormat = PixelFormat.Format32bppArgb;
                            break;
                        case IPL_DEPTH.IPL_DEPTH_16U:
                            pixelFormat = PixelFormat.Format64bppArgb;
                            break;
                        default:
                            throw new NotImplementedException(unsupportedDepth);
                    }
                    break;
                default:
                    throw new NotImplementedException(unsupportedChannels);

            }
            Bitmap bitmap = new Bitmap(image.width, image.height, image.widthStep, pixelFormat, image.imageData);
            // For grayscale images, but also to modify the color palette
            if (pixelFormat == PixelFormat.Format8bppIndexed)
                SetColorPaletteOfGrayscaleBitmap(bitmap);
            return bitmap;
        }