Пример #1
0
 protected void Show_Hide(Object sender, EventArgs e)
 {
     if (OmniViewer.Suppressed)
     {
         OmniViewer.Suppressed = false;
     }
     else
     {
         OmniViewer.CloseAll();
     }
 }
Пример #2
0
        public void timer_Tick(object sender, EventArgs e)
        {
            Image <Bgr, byte> imageQueried = camera.QueryFrame();

            imageQueried = imageQueried.Resize(ImageWidth, ImageHeight, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR); //downsize image for better performance
            if (frameCount > 20)                                                                               //wait for webcam exposure to stabilize

            {
                OmniViewer.Show(ref imageQueried, "imageQueried");
                if (Enabled && MotionDetector.Compute(ref imageQueried, ref imageProcessed))      //motion is recognized as hand raise/drop

                {
                    int complexity = ComplexityComputer.Compute(ref imageQueried); //query the complexity of the current image
                    if (complexity > complexityOld)                                //motion is recognized as hand being raised if current complexity is larger than previous complexity

                    {
                        int gestureNum = GestureReader.Compute(ref imageProcessed);    //GestureReader returns an integer indicating which gesture is being detected
                        if (gestureNum == 0)
                        {
                            keyControl.PausePlay();
                        }
                        else if (gestureNum == 1)
                        {
                            keyControl.PrevTrack();
                        }
                        else if (gestureNum == 2)
                        {
                            keyControl.NextTrack();
                        }
                        else if (gestureNum == 3)
                        {
                            keyControl.Start();
                        }
                    }
                    complexityOld = complexity;    //store current complexity for future comparisons
                }
            }
            else
            {
                frameCount++;
            }
        }
Пример #3
0
        public static int Compute(ref Image <Bgr, byte> imageSource)
        {
            Image <Gray, byte> imageCanny = imageSource.Convert <Gray, byte>().Canny(cannyThresh, cannyLinking);

            imageCanny.ROI = new Rectangle(0, 0, imageWidthCropped, imageHeightCropped);
            OmniViewer.Show(ref imageCanny, "imageCanny");
            int complexity = 0;

            for (int i = 0; i < imageWidthCropped; i++)
            {
                for (int j = 0; j < imageHeightCropped; j++)
                {
                    if (imageCanny.Data[j, i, 0] == 255)
                    {
                        complexity++;
                    }
                }
            }

            return(complexity);
        }
Пример #4
0
        public static bool Compute(ref Image <Bgr, byte> imageSource, ref Image <Gray, byte> imageResult)
        {
            bool handDetected = false;

            if (imageBackground != null)
            {
                byte baseThreshold = 0;
                Image <Gray, byte> imageProcessed = null;
                getFrameDifference(ref imageSource, ref imageBackground, ref imageProcessed, ref baseThreshold);
                OmniViewer.Show(ref imageProcessed, "MotionDetector_imageProcessed0");
                List <int>   xArray = new List <int>(), yArray = new List <int>(); //find center of motion by finding the median of X and Y positions of pixels brighter than base threshold
                List <Point> candidates = new List <Point>();
                for (int i = 0; i < imageWidth; i++)
                {
                    for (int j = 0; j < imageHeight; j++)
                    {
                        if (imageProcessed.Data[j, i, 0] > baseThreshold)
                        {
                            imageProcessed.Data[j, i, 0] = 255;
                            xArray.Add(i); yArray.Add(j);
                            candidates.Add(new Point(i, j));
                        }
                    }
                }

                if (candidates.Count > 0)
                {
                    xArray.Sort(); yArray.Sort();
                    Point centerOfMotion = new Point(xArray[xArray.Count / 3], yArray[yArray.Count / 3]);
                    imageProcessed._ThresholdBinary(new Gray(baseThreshold), white);    //smooth image to cancel noise, and get count of pixels above threshold
                    Image <Gray, byte> imageSmoothed = imageProcessed.SmoothGaussian(9);
                    int pixCount = 0;
                    for (int i = 0; i < imageWidth; i++)
                    {
                        for (int j = 0; j < imageHeight; j++)
                        {
                            if (imageSmoothed.Data[j, i, 0] == 255)
                            {
                                imageSmoothed.Data[j, i, 0] = 255; pixCount++;
                            }
                            else
                            {
                                imageSmoothed.Data[j, i, 0] = 0;
                            }
                        }
                    }

                    if (pixCount > 20 && centerOfMotion.X < imageWidth / 3d)
                    {
                        gestureActive = true;                                                         //heuristics to recognize motion as raising hand
                    }
                    else if (gestureCaptureDelayCurrent == gestureCaptureDelay)
                    {
                        gestureActive = false;
                    }
                    else
                    {
                        gestureCaptureDelayCurrent++;
                    }

                    if (gestureActive)
                    {
                        if (!gestureActiveOld)
                        {
                            imageBeforeMotion = imageBackground;
                            motionTimerCount  = 0;
                            xWeightedLeft     = xWeightedRight = 0d;
                            xWeightedCount    = 0;
                            motionXMin        = 9999;
                            motionXMax        = 0;
                        }

                        for (int i = 0; i < imageWidth; i++)
                        {
                            for (int j = 0; j < imageHeight; j++)
                            {
                                if (imageProcessed.Data[j, i, 0] == 255)
                                {
                                    if (i < imageWidth / 2)
                                    {
                                        xWeightedLeft += 1d - (i / (imageWidth / 2d)); xWeightedCount++;
                                    }
                                    else
                                    {
                                        xWeightedRight += 1d - (imageWidth - i) / (imageWidth / 2d); xWeightedCount++;
                                    }
                                }
                            }
                        }

                        if (centerOfMotion.X < motionXMin)
                        {
                            motionXMin = centerOfMotion.X;
                        }
                        if (centerOfMotion.X > motionXMax)
                        {
                            motionXMax = centerOfMotion.X;
                        }
                        imageSmoothed.Draw(new CircleF(centerOfMotion, 5), new Gray(127), 2);
                    }
                    else if (!gestureActive && gestureActiveOld)
                    {
                        double size        = (xWeightedLeft / xWeightedCount) - (xWeightedRight / xWeightedCount);
                        int    motionWidth = motionXMax - motionXMin;
                        if (motionTimerCount >= 1 && motionTimerCount < 50 && size > 0.3d && motionWidth < 15)
                        {
                            imageAfterMotion = imageSource;
                            byte baseThreshold2 = 0;    //obtain preliminary hand silhouette by computing frame difference between images before and after motion, then thresholding image by base threshold
                            Image <Gray, byte> imageSubtraction = null;
                            getFrameDifference(ref imageAfterMotion, ref imageBeforeMotion, ref imageSubtraction, ref baseThreshold2);
                            imageSubtraction._ThresholdBinary(new Gray(/*baseThreshold2 / 3*/ 10), white);     //todo: threshold using baseThreshold2 instead of a fixed value
                            imageResult  = imageSubtraction.Erode(2).Dilate(2);
                            handDetected = true;
                        }
                        gestureCaptureDelayCurrent = 0;
                    }
                    gestureActiveOld = gestureActive;
                    OmniViewer.Show(ref imageSmoothed, "MotionDetector_imageSmoothed");
                    OmniViewer.Show(ref imageProcessed, "MotionDetector_imageProcessed");
                }
            }
            imageBackground = imageSource;
            return(handDetected);
        }
Пример #5
0
        private static BlobDetector blobDetectorPalm = new BlobDetector(), blobDetectorFingers = new BlobDetector();    //for clustering palm and fingers

        public static int Compute(ref Image <Gray, byte> imageSource)
        {
            Image <Gray, byte> imageProcessed = imageSource.Erode(1);                                        //erode image to prevent connected fingertips and other image defects

            Image <Gray, byte> imageCropped = new Image <Gray, byte>(imageWidthCropped, imageHeightCropped); //crop image for faster processing and noise cancellation, since right hand is always going to show up in the left of the image

            for (int i = 0; i < imageWidthCropped; i++)
            {
                for (int j = 0; j < imageHeightCropped; j++)
                {
                    imageCropped.Data[j, i, 0] = imageProcessed.Data[j, i, 0];
                }
            }

            Image <Gray, byte> imagePalm = imageCropped.Clone();       //locating palm by eroding fingers (smoothing, equalizing, then thresholding is better than conventional erosion) and dilating the final palm shape

            imagePalm._SmoothGaussian(9);
            imagePalm._EqualizeHist();
            imagePalm._ThresholdBinary(new Gray(254), white);
            imagePalm = imagePalm.Dilate(5);

            blobDetectorPalm.Compute(ref imagePalm, 255);                                 //marking biggest blob (connected pixels cluster) as the palm blob

            int          medianWidth = blobDetectorPalm.BlobMaxArea.ComputeMedianWidth(); //crop the palm blob to exact height using custom heuristics
            int          palmCropY   = (int)(blobDetectorPalm.BlobMaxArea.YMin + medianWidth * 1.5d);
            List <Point> palmPoints  = new List <Point>();

            foreach (Point pt in blobDetectorPalm.BlobMaxArea.Data)
            {
                if (pt.Y < palmCropY)
                {
                    palmPoints.Add(pt);
                    imageCropped.Data[pt.Y, pt.X, 0] = 191;
                }
            }

            List <Point> palmBlobBorder = new List <Point>();    //finding the border pixels of palm blob by checking if the pixel is bordering a white pixel

            foreach (Point pt in palmPoints)
            {
                int xMin = pt.X - 1, xMax = pt.X + 1,               //8 points surrounding pt
                           yMin = pt.Y - 1, yMax = pt.Y + 1;
                checkBounds(ref xMin, ref xMax, imageWidthCropped); //check if values are out of bounds of imageCropped
                checkBounds(ref yMin, ref yMax, imageHeightCropped);

                bool kill = false;
                for (int i = xMin; i <= xMax; i++)      //survey 8 points surrounding pt
                {
                    for (int j = yMin; j <= yMax; j++)
                    {
                        if (imageCropped.Data[j, i, 0] == 255)      //detect pixels that border white pixels
                        {
                            palmBlobBorder.Add(pt);
                            kill = true;
                            break;
                        }
                    }
                    if (kill)
                    {
                        break;
                    }
                }
            }

            foreach (Point pt in palmBlobBorder)
            {
                imageCropped.Data[pt.Y, pt.X, 0] = 255; //setting the color of palm border pixels to white to avoid impeding the progress of bresenham line algorithm
            }
            double minLineLength       = 0d;            //minimum length in order to be marked as line that travels to a fingertip
            List <LineSegment2D> lines = new List <LineSegment2D>();

            foreach (Point pt in palmBlobBorder)    //path finding algorithm, find all straight lines that originate from palm boarder and travel to hand shape border
            {
                for (int i = 340; i >= 200; i--)    //radiate lines between angles 200 and 340 (upwards between 20 and 160)

                {
                    Point  ptResult;
                    double angle = i * Math.PI / 180d;
                    Point  ptCircle = getCircumferencePoint(pt, 160, angle);                     //end point of the line (opposing the origin end)
                    int    x = pt.X, y = pt.Y, x2 = ptCircle.X, y2 = ptCircle.Y;
                    if (bresenham(ref x, ref y, ref x2, ref y2, ref imageCropped, out ptResult)) //radiate lines between orign and end points

                    {
                        LineSegment2D line = new LineSegment2D(ptResult, pt);
                        lines.Add(line);
                        minLineLength += line.Length;    //add current line length to minLineLength since the latter is average length times a coefficient
                    }
                }
            }

            minLineLength = minLineLength / lines.Count * 2.5d;                                              //filter fingerlines to remove ones that do not travel to fingertips, then draw fingerlines that are left onto an image and run blob detection to find finger blobs
            Image <Gray, byte> imageFingers = new Image <Gray, byte>(imageWidthCropped, imageHeightCropped); //new image where all lines that travel to fingertips will be drawn

            foreach (LineSegment2D line in lines)
            {
                if (line.Length > minLineLength || line.P1.X == 0)
                {
                    imageFingers.Draw(line, new Gray(255), 1);        //draw finger lines that are longer than minLineLength, or if fingerline borders the left edge of the image in case the finger isn't fully within view
                }
            }
            imageFingers._SmoothGaussian(3);
            imageFingers._ThresholdBinary(new Gray(254), white);    //smooth drawn fingerlines into finger blobs
            blobDetectorFingers.Compute(ref imageFingers, 255);
            int fingersCount = 0;
            if (blobDetectorFingers.Blobs.Count > 1)
            {
                foreach (Blob blob in blobDetectorFingers.Blobs)    //heuristics for eliminating false blobs, specifically needed when a fist is shown
                {
                    if (blob.ComputeMaxWidth() >= 2 && blob.Length / (blob.ComputeMedianWidth() + 1) >= 3)
                    {
                        int verificationCount = 0;
                        foreach (Blob blobCurrent in blobDetectorFingers.Blobs)
                        {
                            if (blob != blobCurrent && blob.YMin < blobCurrent.YMax)
                            {
                                verificationCount++;
                            }
                        }

                        if (verificationCount > 0)
                        {
                            fingersCount++;
                        }
                    }
                }
            }
            else if (blobDetectorFingers.Blobs.Count == 1)
            {
                if (blobDetectorFingers.BlobMaxArea.Length / (blobDetectorFingers.BlobMaxArea.ComputeMedianWidth() + 1) >= 3)
                {
                    fingersCount = 1;
                }
            }

            if (fingersCount > 3)
            {
                fingersCount = 3;                     //currently limiting to 3 gestures, will be expanded in the future
            }
            OmniViewer.Show(ref imageFingers, "imageFingers");
            return(fingersCount);
        }