Ejemplo n.º 1
0
        /// <summary>
        /// What this process does to an image
        /// </summary>
        /// <param name="obj">Data pertaining to this image</param>
        /// <param name="p_image">Image to be processed</param>
        /// <seealso cref="http://www.cs.otago.ac.nz/cosc453/student_tutorials/principal_components.pdf"/>
        protected override async void doWork(Object p_imgData)
        {
            //The point will be null if there were no points to observe
            if (((ImageData)p_imgData).DataPoints.Count != 0)
            {
                //Narrow data set
                List <Point> dataPoints = ((ImageData)p_imgData).DataPoints;

                //Step 2 of PCA Step 1 is gathering data.
                PCAData pcaData = getMeanValues(dataPoints);

                //step 3 develop the coVariance matrix
                genCoVarMatrix(dataPoints, ref pcaData);

                //step 4 get eigenvectors and values
                calculateEigens(ref pcaData);

                //prepare variables for drawing later
                ((ImageData)p_imgData).EigenVectors = pcaData.eigenVectors;
                ((ImageData)p_imgData).Center       = new Point((int)pcaData.XBar, (int)pcaData.YBar);
                ((ImageData)p_imgData).Orientation  = getOrientation(pcaData.eigenVectors);
            }

            Processing.getInstance().ToGesturesImage = (ImageData)p_imgData;
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Handles all the drawing on the image
        /// </summary>
        /// <param name="p_imgData"></param>
        protected override void doWork(object p_imgData)
        {
            if (((ImageData)p_imgData).ConvexDefects != null)
            {
                List <Point>        convexHull    = ((ImageData)p_imgData).ConvexHull;
                List <Point>        contour       = ((ImageData)p_imgData).Contour;
                List <ConvexDefect> convexDefects = ((ImageData)p_imgData).ConvexDefects;
                List <Point>        fingerTips    = ((ImageData)p_imgData).FingerTips;
                MotionGestureProcessing.ImageData.Gestures gesture = ((ImageData)p_imgData).Gesture;

                byte[]     buffer;
                BitmapData data = BitmapManip.lockBitmap(out buffer, ((ImageData)p_imgData).Image);

                drawOrientation(data, buffer, ((ImageData)p_imgData).EigenVectors, ((ImageData)p_imgData).Center);
                drawLines(ref data, ref buffer, convexHull, Color.Yellow);
                drawLines(ref data, ref buffer, contour, Color.Blue);
                drawDefects(ref data, ref buffer, convexDefects, Color.Orange);
                drawFingers(ref data, ref buffer, fingerTips, 20, Color.Red);
                drawGesture(ref data, ref buffer, gesture);

                BitmapManip.unlockBitmap(ref buffer, ref data, ((ImageData)p_imgData).Image);

                ((ImageData)p_imgData).Image.Save("Final.jpg");
            }

            Processing.getInstance().ToReturnImage = (ImageData)p_imgData;
        }
        /// <summary>
        /// Establishes a listening connection
        /// </summary>
        protected override void setupListener()
        {
            m_isoImageHandler = (obj) =>
            {
                doWork(obj);
            };

            Processing.getInstance().IsolationImageFilled += m_isoImageHandler;
        }
        /// <summary>
        /// Listener for preprocessing image ready
        /// </summary>
        protected override void setupListener()
        {
            m_preprocImageHandler = (obj) =>
            {
                doWork(obj);
            };

            Processing.getInstance().PreprocessImageFilled += m_preprocImageHandler;
        }
        /// <summary>
        /// this method transforms the image into a filtered image
        /// UPDATE: this now performs almost insantly instead of the 2 seconds it took before
        /// </summary>
        /// <param name="p_imageData"></param>
        protected override async void doWork(Object p_imageData)
        {
            if (m_isInitialized)
            {
                Image procImage = ((ImageData)p_imageData).Image;
                //Setting up a buffer to be used for concurrent read/write
                byte[] buffer;
                convert2PixelFormat(ref procImage);
                BitmapData data = BitmapManip.lockBitmap(out buffer, procImage);
                ImageProcess.convert2GreyScale(ref buffer);

                #region Call Parallel.Invoke for each coordinate
                Parallel.Invoke(
                    () =>
                {
                    //upper left
                    dividedDoWorkARGB(buffer, 0, 0, data.Width / 2, data.Height / 2, data.Width);
                },
                    () =>
                {
                    //upper right
                    dividedDoWorkARGB(buffer, data.Width / 2, 0, data.Width, data.Height / 2, data.Width);
                },
                    () =>
                {
                    //lower left
                    dividedDoWorkARGB(buffer, 0, data.Height / 2, data.Width / 2, data.Height, data.Width);
                },
                    () =>
                {
                    //lower right
                    dividedDoWorkARGB(buffer, data.Width / 2, data.Height / 2, data.Width, data.Height, data.Width);
                });
                #endregion

                ((ImageData)p_imageData).DataPoints = ImageProcess.getDataPoints(ref data, ref buffer);

                //Provide finer area filtering and signal enhancing
                filterNoise(((ImageData)p_imageData).DataPoints, ref data, ref buffer);
                strengthenSignal(ref data, ref buffer);

                ((ImageData)p_imageData).DataPoints = findHand(ref data, ref buffer);
                ImageProcess.updateBuffer(((ImageData)p_imageData).DataPoints, ref data, ref buffer);

                BitmapManip.unlockBitmap(ref buffer, ref data, procImage);

                ((ImageData)p_imageData).Image = procImage;
                Processing.getInstance().ToPreProcessing = (ImageData)p_imageData;

                //If someone is listener raise an event
                if (ProcessReady != null)
                {
                    ProcessReady();
                }
            }
        }
Ejemplo n.º 6
0
        protected override void setupListener()
        {
            m_PCAImageHandler = (obj) =>
            {
                //Thread t = new Thread(new ParameterizedThreadStart(doWork));
                //t.Start(obj);
                doWork(obj);
            };

            Processing.getInstance().PCAImageFilled += m_PCAImageHandler;
        }
Ejemplo n.º 7
0
        /// <summary>
        /// This get the gesture of the hand
        /// </summary>
        /// <param name="p_imgData"></param>
        protected override async void doWork(Object p_imgData)
        {
            if (((ImageData)p_imgData).ConvexDefects != null)
            {
                MotionGestureProcessing.ImageData.Gestures gesture = ImageData.Gestures.INITIALIZING;

                List <ConvexDefect> convexDefects = ((ImageData)p_imgData).ConvexDefects;
                List <Point>        fingerTips    = ((ImageData)p_imgData).FingerTips;

                ((ImageData)p_imgData).Gesture = deriveGesture(ref convexDefects, ref fingerTips);
            }
            else
            {
                m_thumbPos = 0;
            }


            //writeGesture(gesture);
            Processing.getInstance().ToDrawingImage = (ImageData)p_imgData;
        }
        /// <summary>
        /// There are some rather labor intensive portions of code that I need to have a section of pipeline to handle
        /// These include:
        ///     Cropping the wrist from the hand
        ///     Finding the contour of the hand
        ///     wrapping a shell around the contour
        /// </summary>
        /// <param name="p_imgData"></param>
        protected override async void doWork(object p_imgData)
        {
            byte[]     buffer;
            BitmapData data = BitmapManip.lockBitmap(out buffer, ((ImageData)p_imgData).Image);

            ((ImageData)p_imgData).DataPoints = ImageProcess.getDataPoints(ref data, ref buffer);

            if (((ImageData)p_imgData).DataPoints.Count > 0)
            {
                m_direction = DIRECTION.INVALID;

                ((ImageData)p_imgData).Filter     = cropDataSet(ref data, ref buffer);
                ((ImageData)p_imgData).Contour    = ImageProcess.getContour(ref data, ref buffer);
                ((ImageData)p_imgData).ConvexHull = ImageProcess.getConvexHull(((ImageData)p_imgData).Contour);

                //defects
                double defectThreshold;
                if (m_direction == DIRECTION.LEFT || m_direction == DIRECTION.RIGHT)
                {
                    defectThreshold = ((ImageData)p_imgData).Filter.Width * .15;
                }
                else
                {
                    defectThreshold = ((ImageData)p_imgData).Filter.Height * .15;
                }
                ((ImageData)p_imgData).ConvexDefects = ImageProcess.getConvexDefects(((ImageData)p_imgData).Contour, ((ImageData)p_imgData).ConvexHull,
                                                                                     defectThreshold);
                ((ImageData)p_imgData).ConvexDefects = organizeDefects(((ImageData)p_imgData).ConvexDefects);

                //fingers
                Dictionary <int, List <Point> > fingerTips = findFingers(ref data, ref buffer);
                ((ImageData)p_imgData).FingerTips = refineFingerTips(ref fingerTips, ((ImageData)p_imgData).ConvexDefects);
            }

            BitmapManip.unlockBitmap(ref buffer, ref data, ((ImageData)p_imgData).Image);

            Processing.getInstance().ToPCAImage = (ImageData)p_imgData;
        }
        /// <summary>
        /// this method transforms the image into a filtered image
        /// UPDATE: this now performs almost insantly instead of the 2 seconds it took before
        /// </summary>
        /// <param name="p_imageData"></param>
        protected override async void doWork(Object p_imageData)
        {
            if (m_isInitialized)
            {
                //Setting up a buffer to be used for concurrent read/write
                byte[]     buffer;
                BitmapData data = BitmapManip.lockBitmap(out buffer, ((imageData)p_imageData).Image);

                //This method returns bit per pixel, we need bytes.
                int depth = Bitmap.GetPixelFormatSize(data.PixelFormat) / 8;

                #region Call Parallel.Invoke for each coordinate
                //Only want to do ARGB and RGB check one time
                // Creates more code but is faster
                if (depth == 3)
                {
                    Parallel.Invoke(
                        () =>
                    {
                        //upper left
                        dividedDoWorkRGB(buffer, 0, 0, data.Width / 2, data.Height / 2, data.Width);
                    },
                        () =>
                    {
                        //upper right
                        dividedDoWorkRGB(buffer, data.Width / 2, 0, data.Width, data.Height / 2, data.Width);
                    },
                        () =>
                    {
                        //lower left
                        dividedDoWorkRGB(buffer, 0, data.Height / 2, data.Width / 2, data.Height, data.Width);
                    },
                        () =>
                    {
                        //lower right
                        dividedDoWorkRGB(buffer, data.Width / 2, data.Height / 2, data.Width, data.Height, data.Width);
                    });
                }
                else
                {
                    Parallel.Invoke(
                        () =>
                    {
                        //upper left
                        dividedDoWorkARGB(buffer, 0, 0, data.Width / 2, data.Height / 2, data.Width);
                    },
                        () =>
                    {
                        //upper right
                        dividedDoWorkARGB(buffer, data.Width / 2, 0, data.Width, data.Height / 2, data.Width);
                    },
                        () =>
                    {
                        //lower left
                        dividedDoWorkARGB(buffer, 0, data.Height / 2, data.Width / 2, data.Height, data.Width);
                    },
                        () =>
                    {
                        //lower right
                        dividedDoWorkARGB(buffer, data.Width / 2, data.Height / 2, data.Width, data.Height, data.Width);
                    });
                }
                #endregion

                ((imageData)p_imageData).Datapoints = getDataPoints(ref data, ref buffer);

                findHand((imageData)p_imageData, data, buffer);

                //Provide wide area filtering
                ((imageData)p_imageData).Filter = m_filterArea;
                //drawCenter(buffer, data, m_center);

                //Guasian cancelling
                if (depth == 3)
                {
                    performCancellingRGB(ref buffer, data);
                }
                else
                {
                    performCancellingARGB(ref buffer, data);
                }

                ((imageData)p_imageData).Datapoints = getDataPoints(ref data, ref buffer);

                //Provide finer area filtering and signal enhancing
                filterNoise(((imageData)p_imageData).Datapoints, ref data, ref buffer);
                strengthenSignal(ref data, ref buffer);

                ((imageData)p_imageData).Datapoints = getDataPoints(ref data, ref buffer);

                BitmapManip.unlockBitmap(ref buffer, ref data, ((imageData)p_imageData).Image);

                Processing.getInstance().ToPCAImage = (imageData)p_imageData;

                //If someone is listener raise an event
                if (ProcessReady != null)
                {
                    ProcessReady();
                }
            }
        }