Ejemplo n.º 1
0
    private static Image <Hsv, Byte> processedred;                   //Casts bitmap to image<Hsv, byte>

    //Red Segmentation
    public static void Proc(BitmapSource Image, Canvas Canvas3, System.Windows.Controls.Image outputimage)
    {
        if (Image != null)
        {
            //Red Processing
            //Converts to image<>
            MemoryStream  Streamred  = new MemoryStream();
            BitmapEncoder encodedred = new BmpBitmapEncoder();
            encodedred.Frames.Add(BitmapFrame.Create(Image));
            encodedred.Save(Streamred);
            System.Drawing.Bitmap myBmpred = new System.Drawing.Bitmap(Streamred); //Casts image to bitmap
            processedred = new Image <Hsv, Byte>(myBmpred);                        //Casts bitmap to image<Hsv, byte>

            //Main processing
            CvInvoke.Flip(processedred, processedred, Emgu.CV.CvEnum.FlipType.Horizontal); //Flips the image in the horizontal
            Image <Gray, Byte> Thrred;                                                     //Creates two Grayscale images that will be used when segmenting
            Thrred = processedred.InRange(new Hsv(165, 125, 120), new Hsv(180, 255, 255)); //Handles second range for RED

            //Handles noise and cleans image
            Mat kernel = Mat.Ones(3, 3, Emgu.CV.CvEnum.DepthType.Cv32F, 1);             //Creates 3x3 kernelred for use as kernelred
            CvInvoke.MorphologyEx(Thrred, Thrred, Emgu.CV.CvEnum.MorphOp.Open, kernel, new System.Drawing.Point(0, 0), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar(1));
            CvInvoke.MorphologyEx(Thrred, Thrred, Emgu.CV.CvEnum.MorphOp.Dilate, kernel, new System.Drawing.Point(0, 0), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar(1));

            // Used to display red parts of original image
            //Extracts only RED parts from orignal image
            //Mat Mask;                                                                    //Creates Mat for converting mask to Mat
            //Mask = Thrred.Mat;                                                           //Casts mask to Mat
            //Image<Hsv, byte> Redisolated = new Image<Hsv, byte>(processedred.Width, processedred.Height);    //Creates Image<Hsv,byte> for final processedred image
            //CvInvoke.BitwiseAnd(processedred, processedred, Redisolated, Mask);                     //ANDS mask with orignal image to retain only portions that are RED

            //Extracts biggest blob
            //Variables
            double                largestareared         = 0;
            int                   largestcontourindexred = 0;
            Image <Hsv, Byte>     Output          = new Image <Hsv, Byte>(processedred.Width, processedred.Height);
            Image <Gray, Byte>    ContourdrawnRed = new Image <Gray, Byte>(processedred.Width, processedred.Height);
            VectorOfVectorOfPoint ContoursRed     = new VectorOfVectorOfPoint();
            Mat                   HierarchyRed    = new Mat();

            //Processing
            CvInvoke.FindContours(Thrred, ContoursRed, HierarchyRed, Emgu.CV.CvEnum.RetrType.Ccomp, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);    //Finds contours in image

            //Iterates through each contour
            for (int i = 0; i < ContoursRed.Size; i++)
            {
                double a = CvInvoke.ContourArea(ContoursRed[i], false);                    //  Find the area of contour
                if (a > largestareared)
                {
                    largestareared         = a;
                    largestcontourindexred = i;                                            //Stores the index of largest contour
                    boundingrectred        = CvInvoke.BoundingRectangle(ContoursRed[i]);   // Creates bounding rectangle for biggest contour
                }
            }

            //Compute centre of rectangle
            XRed = boundingrectred.X + (boundingrectred.Width / 2);
            YRed = boundingrectred.Y + (boundingrectred.Height / 2);

            //Old Method used for overlay
            //CvInvoke.DrawContours(processedred, ContoursRed, largestcontourindexred, new MCvScalar(255, 255, 255), 10, Emgu.CV.CvEnum.LineType.Filled, HierarchyRed, 0); //Draws biggest contour on blank image
            //processedred.Draw(boundingrectred,new Hsv(255,255,255), 3);
            //CvInvoke.Circle(processedred, new System.Drawing.Point(640-XRed, YRed), 4, new MCvScalar(255),2, Emgu.CV.CvEnum.LineType.Filled);
            //outputimage.Source = BitmapSourceConvert.ToBitmapSource1(processedred1);

            //Cleanup
            //Mask.Dispose();
            Thrred.Dispose();
            Streamred.Dispose();
            myBmpred.Dispose();

            //Blue
            Image <Gray, Byte> ThrBlue;                                                     //Creates two Grayscale images that will be used when segmenting
            ThrBlue = processedred.InRange(new Hsv(85, 110, 80), new Hsv(135, 230, 220));   //Handles second range for Blue

            //Handles noise and cleans image
            CvInvoke.MorphologyEx(ThrBlue, ThrBlue, Emgu.CV.CvEnum.MorphOp.Open, kernel, new System.Drawing.Point(0, 0), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar(1));
            CvInvoke.MorphologyEx(ThrBlue, ThrBlue, Emgu.CV.CvEnum.MorphOp.Dilate, kernel, new System.Drawing.Point(0, 0), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar(1));

            //Used to display blue parts of original image
            //Extracts only RED parts from orignal image
            //Mat Mask1;                                                                  //Creates Mat for converting mask to Mat
            //Mask1 = ThrBlue.Mat;                                                           //Casts mask to Mat
            //Image<Hsv, byte> Bluleisolated = new Image<Hsv, byte>(processedred.Width, processedred.Height);    //Creates Image<Hsv,byte> for final processedred image
            //CvInvoke.BitwiseAnd(processedred, processedred, Redisolated, Mask);                     //ANDS mask with orignal image to retain only portions that are RED

            //Extracts biggest blob
            //Variables
            double                LargestAreaBlue         = 0;
            int                   LargestContourIndexBlue = 0;
            MCvPoint2D64f         CenterBlue       = new MCvPoint2D64f(0, 0);
            Image <Gray, Byte>    ContourDrawnBlue = new Image <Gray, Byte>(processedred.Width, processedred.Height);
            VectorOfVectorOfPoint ContoursBlue     = new VectorOfVectorOfPoint();
            Moments               MomentsBlue      = new Moments();
            Mat                   HierarchyBlue    = new Mat();

            //Processing
            CvInvoke.FindContours(ThrBlue, ContoursBlue, HierarchyBlue, Emgu.CV.CvEnum.RetrType.Ccomp, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);    //Finds contours in image

            //Iterates through each contour
            for (int i = 0; i < ContoursBlue.Size; i++)
            {
                double a = CvInvoke.ContourArea(ContoursBlue[i], false);                    //  Find the area of contour
                if (a > LargestAreaBlue)
                {
                    LargestAreaBlue         = a;
                    LargestContourIndexBlue = i;                                                                 //Stores the index of largest contour
                    boundingrectBlue        = CvInvoke.BoundingRectangle(ContoursBlue[LargestContourIndexBlue]); // Creates bounding rectangle for biggest contour
                }
            }

            //Compute center of rectangle
            XBlue = boundingrectBlue.X + boundingrectBlue.Width / 2;
            YBlue = boundingrectBlue.Y + boundingrectBlue.Height / 2;

            //Cleanup
            //Mask1.Dispose();
            ThrBlue.Dispose();

            //Add point to images
            Canvas3.Children.Clear();

            System.Windows.Shapes.Ellipse PointRed  = CreateEllipse.CircleRed();
            System.Windows.Shapes.Ellipse PointBlue = CreateEllipse.CircleBlue();

            Canvas3.Children.Add(PointRed);
            Canvas3.Children.Add(PointBlue);

            PointRed.SetValue(Canvas.LeftProperty, (640 - XRed) * .6);      //0.6 used as the stream sizes are 0.6 times the actual resolution
            PointRed.SetValue(Canvas.TopProperty, YRed * .6);

            PointBlue.SetValue(Canvas.LeftProperty, (640 - XBlue) * .6);
            PointBlue.SetValue(Canvas.TopProperty, YBlue * .6);

            return;
        }
        else
        {
            return;
        }
    }
Ejemplo n.º 2
0
        //Gets depth info from kinect and casts to a bitmap
        private void Ksensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e)
        {
            DepthImageFrame depthFrame = e.OpenDepthImageFrame();   //Puts Depthframe into Depthframe

            //Checks if there is a depthFrame
            if (depthFrame != null)
            {
                // Copy the pixel data from the image to a temporary array
                depthFrame.CopyDepthImagePixelDataTo(this.depthPixels);

                // Get the min and max reliable depth for the current frame
                int minDepth = depthFrame.MinDepth;
                int maxDepth = depthFrame.MaxDepth;

                //Convert depth data to bitmapsource
                short[] pixelData = new short[depthFrame.PixelDataLength];
                depthFrame.CopyPixelDataTo(pixelData);

                BitmapSource bmap = BitmapSource.Create(
                    depthFrame.Width,
                    depthFrame.Height,
                    2, 2,
                    PixelFormats.Gray16, null,
                    pixelData,
                    depthFrame.Width * depthFrame.BytesPerPixel);

                double vertF = 571.401, horzF = 557.274; //Focal lengths

                ColorImagePoint[] color = new ColorImagePoint[depthFrame.PixelDataLength];
                ksensor.CoordinateMapper.MapDepthFrameToColorFrame(DepthImageFormat.Resolution640x480Fps30, this.depthPixels, ColorImageFormat.RgbResolution640x480Fps30, color);

                //Seraches mapped RED coordinates
                for (k = 0; k < 640; ++k)
                {
                    if (color[k].X == XR)
                    {
                        break;
                    }
                }
                for (int h = k; h < depthFrame.PixelDataLength; h += 640)
                {
                    if (color[h].Y == YR)
                    {
                        if (h % 640 != 0)
                        {
                            XRMapped = h % 640;
                        }

                        YRMapped = (h - XR) / 640;

                        //Red coordinates
                        ZR = this.depthPixels[(640 - XRMapped) + (YRMapped * 640)].Depth;

                        Rxcoord = (ZR * (320 - XRMapped)) / horzF;
                        Rycoord = (ZR * (240 - YRMapped)) / vertF;

                        RCoordX.Content = Math.Round(Rxcoord);
                        RCoordY.Content = Math.Round(Rycoord);
                        RCoordZ.Content = ZR;
                        break;
                    }
                }

                //Searches mapped Blue coordinates
                for (j = 0; j < 640; ++j)
                {
                    if (color[j].X == XB)
                    {
                        break;
                    }
                }
                for (int h = j; h < depthFrame.PixelDataLength; h += 640)
                {
                    if (color[h].Y == YB)
                    {
                        if (h % 640 != 0)
                        {
                            XBMapped = h % 640;
                        }
                        YBMapped = (h - XB) / 640;

                        //Red coordinates
                        ZB = this.depthPixels[(640 - XBMapped) + (YBMapped * 640)].Depth;

                        Bxcoord = (ZB * (320 - XBMapped)) / horzF;
                        Bycoord = (ZB * (240 - YBMapped)) / vertF;

                        BCoordX.Content = Math.Round(Bxcoord);
                        BCoordY.Content = Math.Round(Bycoord);
                        BCoordZ.Content = ZB;
                        break;
                    }
                }

                //Set stream to image
                Depthstream.Source = bmap;

                //Add points to imageviews for debugging
                Canvas1.Children.Clear();
                Canvas2.Children.Clear();

                System.Windows.Shapes.Ellipse DepthPointRed  = CreateEllipse.CircleRed();
                System.Windows.Shapes.Ellipse DepthPointBlue = CreateEllipse.CircleBlue();
                System.Windows.Shapes.Ellipse ColorPointRed  = CreateEllipse.CircleRed();
                System.Windows.Shapes.Ellipse ColorPointBlue = CreateEllipse.CircleBlue();

                Canvas2.Children.Add(ColorPointRed);
                Canvas2.Children.Add(ColorPointBlue);

                Canvas1.Children.Add(DepthPointRed);
                Canvas1.Children.Add(DepthPointBlue);

                DepthPointRed.SetValue(Canvas.LeftProperty, (depthFrame.Width - XRMapped - 3) * .6);
                DepthPointRed.SetValue(Canvas.TopProperty, (YRMapped - 3) * .6);

                DepthPointBlue.SetValue(Canvas.LeftProperty, (depthFrame.Width - XBMapped - 3) * .6);
                DepthPointBlue.SetValue(Canvas.TopProperty, (YBMapped - 3) * .6);

                ColorPointRed.SetValue(Canvas.LeftProperty, (depthFrame.Width - XR - 3) * .6);
                ColorPointRed.SetValue(Canvas.TopProperty, (YR - 3) * .6);

                ColorPointBlue.SetValue(Canvas.LeftProperty, (depthFrame.Width - XB - 3) * .6);
                ColorPointBlue.SetValue(Canvas.TopProperty, (YB - 3) * .6);

                //Cleanup
                depthFrame.Dispose();
                CoordinateFrameCalc();
            }
        }