예제 #1
0
        public KeyValuePair <BitmapSource, List <KeyValuePair <Emgu.CV.Cvb.CvBlob, CameraSpacePoint> > > processBlobs(DepthFrameReader depthFrameReader, InfraredFrame frame, double blobSizeThreshold)
        {
            int width  = frame.FrameDescription.Width;
            int height = frame.FrameDescription.Height;

            ushort[] imageDataArray = new ushort[width * height];
            frame.CopyFrameDataToArray(imageDataArray);


            byte[] pixelData = new byte[width * height * (PixelFormats.Bgr32.BitsPerPixel + 7) / 8];

            int colorIndex = 0;

            for (int i = 0; i < imageDataArray.Length; i++)
            {
                ushort d = (ushort)(imageDataArray[i] >> 8);
                byte   b = (byte)d;
                int    x = colorIndex;
                pixelData[colorIndex++] = b;
                pixelData[colorIndex++] = b;
                pixelData[colorIndex++] = b;
                pixelData[colorIndex++] = 255;
            }

            Image <Bgr, short> openCvImg      = new Image <Bgr, short>(CameraImage.DEPTH_IMAGE_WIDTH, CameraImage.DEPTH_IMAGE_HEIGHT, new Bgr(0, 0, 0));
            Image <Bgr, short> openCvDepthImg = new Image <Bgr, short>(CameraImage.DEPTH_IMAGE_WIDTH, CameraImage.DEPTH_IMAGE_HEIGHT, new Bgr(0, 0, 0));

            int stride = width * ((PixelFormats.Bgr32.BitsPerPixel) / 8);
            //We create an image 96x96
            BitmapSource sBitmap = System.Windows.Media.Imaging.BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, null, pixelData, stride);

            openCvImg.Bitmap = CameraImage.BmsToBm(sBitmap);

            // copy this image as the debug image on which will be drawn
            var gray_image = openCvImg.Convert <Gray, byte>();

            gray_image._GammaCorrect(0.3);

            var greyThreshImg = gray_image.ThresholdBinary(new Gray(220), new Gray(255));

            greyThreshImg = greyThreshImg.Dilate(5);

            var rgb = new Rgb(255, 0, 0);
            //var depthFrameReader = _kSensor.DepthFrameSource.OpenReader();
            var depthFrame = depthFrameReader.AcquireLatestFrame();

            if (depthFrame == null)
            {
                return(new KeyValuePair <BitmapSource, List <KeyValuePair <Emgu.CV.Cvb.CvBlob, CameraSpacePoint> > >(null, null));
            }
            ushort[] depthData = new ushort[width * height];

            depthFrame.CopyFrameDataToArray(depthData);
            depthFrame.Dispose();

            Emgu.CV.Cvb.CvBlobs        resultingImgBlobs = new Emgu.CV.Cvb.CvBlobs();
            Emgu.CV.Cvb.CvBlobDetector bDetect           = new Emgu.CV.Cvb.CvBlobDetector();
            var nBlobs = bDetect.Detect(greyThreshImg, resultingImgBlobs);


            List <KeyValuePair <Emgu.CV.Cvb.CvBlob, CameraSpacePoint> > mappedPoints = new List <KeyValuePair <Emgu.CV.Cvb.CvBlob, CameraSpacePoint> >();

            if (nBlobs > 0)
            {
                var             blobImg = greyThreshImg;
                DepthSpacePoint dsp     = new DepthSpacePoint();
                foreach (Emgu.CV.Cvb.CvBlob targetBlob in resultingImgBlobs.Values)
                {
                    if (targetBlob.Area > blobSizeThreshold)
                    {
                        blobImg.Draw(targetBlob.BoundingBox, new Gray(255), 1);
                        dsp.X = targetBlob.Centroid.X;
                        dsp.Y = targetBlob.Centroid.Y;
                        int depth       = (int)this.blobDetector.getDepth((int)dsp.X, (int)dsp.Y, width, depthData);//(Math.Floor(width * dsp.Y + dsp.X));
                        var mappedPoint = _kSensor.CoordinateMapper.MapDepthPointToCameraSpace(dsp, depthData[depth]);
                        if (!float.IsInfinity(mappedPoint.X) && !float.IsInfinity(mappedPoint.Y) && !float.IsInfinity(mappedPoint.Z))
                        {
                            mappedPoints.Add(new KeyValuePair <Emgu.CV.Cvb.CvBlob, CameraSpacePoint>(targetBlob, mappedPoint));
                        }
                    }
                }
            }


            //return BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, null, pixelData, stride);
            var bitmap = BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, null, pixelData, stride);

            return(new KeyValuePair <BitmapSource, List <KeyValuePair <Emgu.CV.Cvb.CvBlob, CameraSpacePoint> > >(bitmap, mappedPoints));
        }
예제 #2
0
        public ImageSource detectRetroreflectiveBlob(int width, int height, byte[] pixelData)
        {
            //Create color and depth images to process
            Image <Bgr, short> openCvImg      = new Image <Bgr, short>(CameraImage.DEPTH_IMAGE_WIDTH, CameraImage.DEPTH_IMAGE_HEIGHT, new Bgr(0, 0, 0));
            Image <Bgr, short> openCvDepthImg = new Image <Bgr, short>(CameraImage.DEPTH_IMAGE_WIDTH, CameraImage.DEPTH_IMAGE_HEIGHT, new Bgr(0, 0, 0));

            int stride = width * ((PixelFormats.Bgr32.BitsPerPixel) / 8);
            //We create an image 96x96
            BitmapSource sBitmap = System.Windows.Media.Imaging.BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, null, pixelData, stride);

            openCvImg.Bitmap = CameraImage.BmsToBm(sBitmap);

            // copy this image as the debug image on which will be drawn
            var gray_image = openCvImg.Convert <Gray, byte>();

            gray_image._GammaCorrect(0.3);

            var greyThreshImg = gray_image.ThresholdBinary(new Gray(220), new Gray(255));

            //greyThreshImg = greyThreshImg.Dilate(5);

            Emgu.CV.Cvb.CvBlobs        resultingImgBlobs = new Emgu.CV.Cvb.CvBlobs();
            Emgu.CV.Cvb.CvBlobDetector bDetect           = new Emgu.CV.Cvb.CvBlobDetector();
            var nBlobs = bDetect.Detect(greyThreshImg, resultingImgBlobs);

            int _blobSizeThreshold = 1;
            var rgb = new Rgb(255, 0, 0);
            var depthFrameReader = _kSensor.DepthFrameSource.OpenReader();
            var depthFrame       = depthFrameReader.AcquireLatestFrame();

            ushort[] depthData = new ushort[width * height];

            depthFrame.CopyFrameDataToArray(depthData);

            List <KeyValuePair <Emgu.CV.Cvb.CvBlob, CameraSpacePoint> > detectedBlobs = new List <KeyValuePair <Emgu.CV.Cvb.CvBlob, CameraSpacePoint> >();

            if (nBlobs > 0)
            {
                var blobImg = greyThreshImg;

                foreach (Emgu.CV.Cvb.CvBlob targetBlob in resultingImgBlobs.Values)
                {
                    if (targetBlob.Area > _blobSizeThreshold)
                    {
                        blobImg.Draw(targetBlob.BoundingBox, new Gray(255), 1);
                        float centroidX = targetBlob.Centroid.X;
                        float centroidY = targetBlob.Centroid.Y;

                        DepthSpacePoint dsp = new DepthSpacePoint();
                        dsp.X = targetBlob.Centroid.X; //targetBlob.BoundingBox.X;
                        dsp.Y = targetBlob.Centroid.Y; //targetBlob.BoundingBox.Y;
                        int depth       = (int)(width * dsp.Y + dsp.X);
                        var mappedPoint = _kSensor.CoordinateMapper.MapDepthPointToCameraSpace(dsp, depthData[depth]);
                        detectedBlobs.Add(new KeyValuePair <Emgu.CV.Cvb.CvBlob, CameraSpacePoint>(targetBlob, mappedPoint));
                    }
                }
            }

            depthFrame.Dispose();
            //return BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, null, pixelData, stride);
            //return detectedBlobs;
            return(CameraImage.BmToBms(greyThreshImg.Bitmap));
        }