Пример #1
0
        public static HandFrame zoomOnHand(short[] depthFrame, DepthFrameParams DFSettings, ObjectPoint handPos)
        {
            HandFrame HF = new HandFrame(DFSettings.Width, DFSettings.Height, handPos.X, handPos.Y, getRadiusFromHand(handPos.Depth));

            // converts the 2d X and Y from output matrix to the 1d array Depth Image: DeptImgWidth*Y + X
            // should have xShift and yShift incorporated??????
            int startPoint = DFSettings.Width * Math.Max(1, (handPos.Y - HF.radius)) + Math.Max(1, handPos.X - HF.radius);
            int endPoint   = Math.Min(depthFrame.Length - 1, DFSettings.Width * (handPos.Y + HF.radius) + handPos.X + HF.radius);

            int x = 0;
            int y = 0;

            for (int i = startPoint; i <= endPoint; i++)
            {
                // wraps the linear array to create the 2D Matrix
                if (x >= HF.width)
                {
                    i += DFSettings.Width - HF.width - 1;
                    x  = 0;
                    y++;
                    continue;
                }

                depthFrame[i] = (short)(depthFrame[i] >> DFSettings.Bitmask);

                // Kinect reports unknown values as -1 or 0
                if (depthFrame[i] <= 0 || depthFrame[i] >= handPos.Depth + HF.radius)
                {
                    depthFrame[i] = (short)(handPos.Depth + HF.radius);
                }

                HF.abs[y, x] = (short)(depthFrame[i]);
                HF.dx[y, x]  = threshold(depthFrame[i] - depthFrame[i - 1], FingerDetection.DetectionParameters.dxThreshold);
                HF.dy[y, x]  = threshold(depthFrame[i] - depthFrame[i - DFSettings.Width], FingerDetection.DetectionParameters.dyThreshold);

                x++;
            }

            return(HF);
        }
Пример #2
0
        public static int updateFingersFromDepth(short[] depthFrame, DepthFrameParams DFP, HandPoint[] hands)
        {
            // Check left hand
            if (hands[(int)HandPoint.HANDS.LEFT].tracked)
            {
                // Zoom on hand and get dx, dy
                HandFrame HF = ProcessDepthImage.zoomOnHand(depthFrame, DFP, hands[(int)HandPoint.HANDS.LEFT]);
                // Smooth dx, dy;
                Convolution.seperableConv(DetectionParameters.seperableKernal, HF);
                hands[(int)HandPoint.HANDS.LEFT].tracked = ParticleGradient(HF, hands[(int)HandPoint.HANDS.LEFT]);
            }

            // Check right hand
            if (hands[(int)HandPoint.HANDS.RIGHT].tracked)
            {
                // Zoom on hand and get dx, dy
                HandFrame HF = ProcessDepthImage.zoomOnHand(depthFrame, DFP, hands[(int)HandPoint.HANDS.RIGHT]);
                // Smooth dx, dy;
                Convolution.seperableConv(DetectionParameters.seperableKernal, HF);
                hands[(int)HandPoint.HANDS.RIGHT].tracked = ParticleGradient(HF, hands[(int)HandPoint.HANDS.RIGHT]);
            }

            return(1);
        }