Пример #1
0
 private void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
 {
     using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
     {
         if (colorFrame != null)
         {
             colorFrame.CopyPixelDataTo(this.colorPixels);
             this.colorBitmap.WritePixels(
                 new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                 this.colorPixels,
                 this.colorBitmap.PixelWidth * sizeof(int),
                 0);
             int sx = (int)this.sld_c1_sX.Value;
             int sy = (int)this.sld_c1_sY.Value;
             int dx = (int)this.sld_c1_dX.Value;
             int dy = (int)this.sld_c1_dY.Value;
             int w  = 0;
             int h  = 0;
             if (dx >= sx)
             {
                 w = (dx - sx);
             }
             if (dy >= sy)
             {
                 h = (dy - sy);
             }
             float             cx        = (float)sx + ((float)w) / 2;
             float             cy        = (float)sy + ((float)h) / 2;
             Image <Bgr, Byte> openCVImg = new Image <Bgr, byte>(colorBitmap.ToBitmap());
             box = new MCvBox2D(new PointF(cx, cy), new SizeF(new PointF((float)w, (float)h)), 0);
             openCVImg.Draw(box, new Bgr(System.Drawing.Color.Green), 4);
             this.cimg_cage4.Source = ImageHelpers.ToBitmapSource(openCVImg);
         }
     }
 }
Пример #2
0
        private void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            BitmapSource depthBmp = null;

            blobCount = 0;

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                {
                    if (depthFrame != null)
                    {
                        blobCount = 0;

                        depthBmp = depthFrame.SliceDepthImage((int)sliderMin.Value, (int)sliderMax.Value);

                        Image <Bgr, Byte>  openCVImg  = new Image <Bgr, byte>(depthBmp.ToBitmap());
                        Image <Gray, byte> gray_image = openCVImg.Convert <Gray, byte>();

                        using (MemStorage stor = new MemStorage())
                        {
                            //Find contours with no holes try CV_RETR_EXTERNAL to find holes
                            Contour <System.Drawing.Point> contours = gray_image.FindContours(
                                Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE,
                                Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_EXTERNAL,
                                stor);

                            for (int i = 0; contours != null; contours = contours.HNext)
                            {
                                i++;

                                if ((contours.Area > Math.Pow(sliderMinSize.Value, 2)) && (contours.Area < Math.Pow(sliderMaxSize.Value, 2)))
                                {
                                    MCvBox2D box = contours.GetMinAreaRect();
                                    openCVImg.Draw(box, new Bgr(System.Drawing.Color.Red), 2);
                                    blobCount++;
                                }
                            }
                        }

                        this.outImg.Source = ImageHelpers.ToBitmapSource(openCVImg);
                        txtBlobCount.Text  = blobCount.ToString();
                    }
                }


                if (colorFrame != null)
                {
                    colorFrame.CopyPixelDataTo(this.colorPixels);
                    this.colorBitmap.WritePixels(
                        new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                        this.colorPixels,
                        this.colorBitmap.PixelWidth * sizeof(int),
                        0);
                }
            }
        }
        void DoBlobDetection()
        {
            blobCount      = 0;
            convertedImage = new FormatConvertedBitmap();
            CreateImageForTracking(irBitmap, irPixels);
            TrackBlobs();

            //openCVImg.Save("c:\\opencvImage.bmp");

            if (switchImg)
            {
                mainImg.Source      = ImageHelpers.ToBitmapSource(thresholdedImage);
                secondaryImg.Source = ImageHelpers.ToBitmapSource(openCVImg);
            }
            else
            {
                mainImg.Source      = ImageHelpers.ToBitmapSource(openCVImg);
                secondaryImg.Source = ImageHelpers.ToBitmapSource(thresholdedImage);
            }
            txtBlobCount.Text = blobCount.ToString();

            //this.processingThread = new ProcessingThread(sensor, this.sensor_AllFramesReady);
            // We need to shut down the processing thread when this main thread is shut down.
        }
Пример #4
0
        private void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            blobCount = 0;

            var bCw = new BackgroundWorker();

            var colorRange         = (int)sliderColorRange.Value;
            var sliderMaxValue     = (int)sliderMax.Value;
            var sliderMinSizeValue = sliderMinSize.Value;
            var sliderMaxSizeValue = sliderMaxSize.Value;


            bCw.DoWork += (s, a) =>
            {
                using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                    using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                    {
                        if (colorFrame != null && depthFrame != null)
                        {
                            short[] depthPixels;
                            byte[]  colorPixels;
                            byte[]  thesPixels;

                            colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength];
                            thesPixels  = new byte[this.sensor.ColorStream.FramePixelDataLength];
                            depthPixels = new short[this.sensor.DepthStream.FramePixelDataLength];

                            BitmapSource colorBmp = null;
                            BitmapSource thesBmp  = null;
                            // BitmapSource depthBmp = null;
                            depthFrame.CopyPixelDataTo(depthPixels);
                            //var greyPixels = new byte[depthFrame.Height * depthFrame.Width * 4];
                            colorFrame.CopyPixelDataTo(colorPixels);
                            colorFrame.CopyPixelDataTo(thesPixels);
                            //depthBmp = BitmapSource.Create(depthFrame.Width, depthFrame.Height, 96, 96, PixelFormats.Bgr32, null, greyPixels, depthFrame.Width * 4);
                            colorBmp = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, colorPixels, colorFrame.Width * 4);
                            Image <Bgr, Byte> returnImage = new Image <Bgr, byte>(colorBmp.ToBitmap());

                            const int BlueIndex  = 0;
                            const int GreenIndex = 1;
                            const int RedIndex   = 2;

                            double minDepth = int.MaxValue;
                            System.Drawing.Point minPoint = new System.Drawing.Point();

                            for (int depthIndex = 0, colorIndex = 0;
                                 depthIndex < depthPixels.Length && colorIndex < colorPixels.Length;
                                 depthIndex++, colorIndex += 4)
                            {
                                // Calculate the distance represented by the two depth bytes
                                int depth = depthPixels[depthIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth;
                                int y     = colorIndex / (colorFrame.Width * 4);
                                int x     = (colorIndex - (y * colorFrame.Width * 4)) / 4;
                                if (minDepth > depth && depth > 0)
                                {
                                    minDepth = depth;

                                    minPoint = new System.Drawing.Point(x, y);
                                }

                                // Apply the intensity to the color channels
                                if (depth > sliderMaxValue)
                                {
                                    thesPixels[colorIndex + BlueIndex]  = 0; //blue
                                    thesPixels[colorIndex + GreenIndex] = 0; //green
                                    thesPixels[colorIndex + RedIndex]   = 0; //red
                                }
                            }

                            thesBmp = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, thesPixels, colorFrame.Width * 4);

                            Image <Hsv, Byte> openCVImg = new Image <Hsv, byte>(thesBmp.ToBitmap());
                            var smoothed = openCVImg.SmoothMedian(7);
                            //Increase saturation
                            smoothed[1] += 30;

                            //Get 10x10 pixel color sample from the nearest point
                            Image <Gray, Byte> averageMask = new Image <Gray, byte>(colorFrame.Width, colorFrame.Height, new Gray(0));
                            averageMask.Draw(new System.Drawing.Rectangle(minPoint.X - 5, minPoint.Y - 5, 10, 10), new Gray(255), -1);


                            //Make a HSV theshold mask
                            Image <Gray, Byte> theshold;

                            // 2. Obtain the 3 channels (hue, saturation and value) that compose the HSV image
                            Image <Gray, byte>[] channels = smoothed.Split();

                            try
                            {
                                var avgColor = channels[0].GetAverage(averageMask);
                                // 3. Remove all pixels from the hue channel that are not in the range [40, 60]
                                CvInvoke.cvInRangeS(channels[0], new Gray(avgColor.Intensity - colorRange).MCvScalar, new Gray(avgColor.Intensity + colorRange).MCvScalar, channels[0]);

                                // 4. Display the result
                                theshold = channels[0];
                            }
                            finally
                            {
                                channels[1].Dispose();
                                channels[2].Dispose();
                            }


                            ////Find blob
                            //using (MemStorage stor = new MemStorage())
                            //{
                            //    //Find contours with no holes try CV_RETR_EXTERNAL to find holes
                            //    Contour<System.Drawing.Point> contours = theshold.FindContours(
                            //     Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE,
                            //     Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_EXTERNAL ,
                            //     stor);

                            //    for (int i = 0; contours != null; contours = contours.HNext)
                            //    {
                            //        i++;

                            //        if ((contours.Area > Math.Pow(sliderMinSizeValue, 2)) && (contours.Area < Math.Pow(sliderMaxSizeValue, 2)))
                            //        {
                            //            MCvBox2D box = contours.GetMinAreaRect();
                            //            returnImage.Draw(box, new Bgr(System.Drawing.Color.Red), 2);
                            //            blobCount++;
                            //        }
                            //    }
                            //}

                            //get depthpoint data (point cloud)

                            //work out if cuboid by tracking point clouds

                            //draw yellow cross on nearest point
                            returnImage = theshold.Convert <Bgr, byte>();
                            returnImage.Draw(new Cross2DF(minPoint, 50, 50), new Bgr(System.Drawing.Color.Yellow), 4);

                            outImg.Dispatcher.BeginInvoke(new Action(() =>
                            {
                                this.outImg.Source = ImageHelpers.ToBitmapSource(returnImage);
                                txtBlobCount.Text  = string.Format("x:{0}, y:{1}", minPoint.X, minPoint.Y);
                            }));
                        }
                    }
            };

            bCw.RunWorkerAsync();
        }
Пример #5
0
        private void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            //TODO Keep the previous frame image as well,
            //Compare both on a background process and save it to the worksheet
            //Convert x&y differences to millimeters according to depth data (distance)
            //and some trigonometry
            BitmapSource depthBmp = null;

            blobCount = 0;

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                {
                    if (depthFrame != null)
                    {
                        blobCount = 0;

                        depthBmp = depthFrame.SliceDepthImage((int)sliderMin.Value, (int)sliderMax.Value);

                        Image <Bgr, Byte>  openCVImg  = new Image <Bgr, byte>(depthBmp.ToBitmap());
                        Image <Gray, byte> gray_image = openCVImg.Convert <Gray, byte>();

                        if (running)
                        {
                            wsheet.Cells[1, frameCount + 1].Value = "Frame " + frameCount;
                            frameCount++;
                            using (MemStorage stor = new MemStorage())
                            {
                                //Find contours with no holes try CV_RETR_EXTERNAL to find holes
                                Contour <System.Drawing.Point> contours = gray_image.FindContours(
                                    Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE,
                                    Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_EXTERNAL,
                                    stor);

                                //Conversion of depthPixels to skeletonPoints which contain all three dimensions in meters.
                                //The conversion and copying is assumed to be costly but there are no single pixel to single point conversion I could find.
                                depthFrame.CopyDepthImagePixelDataTo(depthPixels);
                                //mapper.MapDepthFrameToSkeletonFrame(depthFormat, depthPixels, skeletonPoints);

                                for (int i = 0; contours != null; contours = contours.HNext)
                                {
                                    i++;

                                    if ((contours.Area > Math.Pow(sliderMinSize.Value, 2)) && (contours.Area < Math.Pow(sliderMaxSize.Value, 2)))
                                    {
                                        MCvBox2D box = contours.GetMinAreaRect();
                                        //DrQ RED BOX AROUND BLOB
                                        openCVImg.Draw(box, new Bgr(System.Drawing.Color.Red), 2);
                                        blobCount++;
                                        int             x = (int)box.center.X;
                                        int             y = (int)box.center.Y;
                                        DepthImagePoint p = new DepthImagePoint();
                                        p.X     = x;
                                        p.Y     = y;
                                        p.Depth = depthPixels[x + 640 * y].Depth;
                                        SkeletonPoint s = mapper.MapDepthPointToSkeletonPoint(depthFormat, p);

                                        //TODO Conversion from absolute coordinates to relative coordinates

                                        addCoordData(3 * blobCount - 1, frameCount, s.X, s.Y, s.Z);

                                        /*if (KinectSensor.IsKnownPoint(s))
                                         * {
                                         *  addCoordData(3 * blobCount - 1, frameCount, s.X, s.Y, s.Z);
                                         * }*/
                                    }
                                }
                            }
                        }

                        this.outImg.Source = ImageHelpers.ToBitmapSource(openCVImg);
                        txtBlobCount.Text  = blobCount.ToString();

                        getNext().RunWorkerAsync(openCVImg);
                    }
                }


                if (colorFrame != null)
                {
                    colorFrame.CopyPixelDataTo(this.colorPixels);
                    this.colorBitmap.WritePixels(
                        new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                        this.colorPixels,
                        this.colorBitmap.PixelWidth * sizeof(int),
                        0);
                }
            }
        }