public BitmapSource Apply(BitmapSource image)
 {
     var grayScale = ToGrayScale(image);
     var filter = new OtsuThreshold();
     var bmp = filter.Apply(grayScale);
     return bmp.ToBitmapImage();
 }
        // Process specified image trying to recognize counter's image
        public void Process(Bitmap image, IImageProcessingLog log)
        {
            log.AddMessage("Image size: " + image.Width +
                 " x " + image.Height);

            //get image
            byte[] textData = GetImageData();
            UnmanagedImage img = CreateImage(textData);
            log.AddImage("Raw Image", img.ToManagedImage());

            //resize Image
            AForge.Imaging.Filters.ResizeNearestNeighbor resizeFilter = new AForge.Imaging.Filters.ResizeNearestNeighbor(500, (int)(500 / res));
            UnmanagedImage resizedImage = resizeFilter.Apply(img);
            log.AddImage("Resized Image", resizedImage.ToManagedImage());
            
            //filter floor
            UnmanagedImage floorFilteredImage = FilterFloor(resizedImage, textData);
            log.AddImage("Floor filtered", floorFilteredImage.ToManagedImage());

            // 1- grayscale image
            Bitmap grayImage =
                 Grayscale.CommonAlgorithms.BT709.Apply(resizedImage.ToManagedImage());
            log.AddImage("Grayscale", resizedImage.ToManagedImage());

            // 2 - Otsu thresholding
            OtsuThreshold threshold = new OtsuThreshold();
            Bitmap binaryImage = threshold.Apply(grayImage);
            log.AddImage("Binary", binaryImage);
            log.AddMessage("Otsu threshold: " + threshold.ThresholdValue);
            //resive image

            // 3 - Blob counting
            BlobCounter blobCounter = new BlobCounter();
            blobCounter.FilterBlobs = true;
            blobCounter.MinWidth = 1;
            blobCounter.MinWidth = 1;

            blobCounter.ProcessImage(binaryImage);
            Blob[] blobs = blobCounter.GetObjectsInformation();

            log.AddMessage("Found blobs (min width/height = 24): " +
                 blobs.Length);

        }
        public List<string> DetectLicensePlate(Bitmap bitmap)
        {
            Bitmap original = (Bitmap)bitmap.Clone();

            // threshold obrazku - s timto lze experimentovat a zlepsit detekci znacek v nekterych obraycich
            bitmap = Grayscale.CommonAlgorithms.BT709.Apply(bitmap);
            OtsuThreshold threshold = new OtsuThreshold();
            bitmap = threshold.Apply(bitmap);

            BitmapData bitmapData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadWrite, bitmap.PixelFormat);

            // vyhledani objektu
            BlobCounter blobCounter = new BlobCounter();

            blobCounter.FilterBlobs = true;
            blobCounter.MinHeight = 10;
            blobCounter.MinWidth = 60;

            blobCounter.ProcessImage(bitmapData);
            Blob[] blobs = blobCounter.GetObjectsInformation();
            bitmap.UnlockBits(bitmapData);

            // detekce objektu a jeho tvaru
            SimpleShapeChecker shapeChecker = new SimpleShapeChecker();
            //shapeChecker.AngleError = 15;
            //shapeChecker.MinAcceptableDistortion = 1;
            //shapeChecker.RelativeDistortionLimit = 0.05f;

            List<string> results = new List<string>();

            int a = 0;
            string text;
            for (int i = 0, n = blobs.Length; i < n; i++)
            {
                List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);

                List<IntPoint> corners;
                if (shapeChecker.IsConvexPolygon(edgePoints, out corners))
                {
                    // zjisteni typu (obdelnik, ...)
                    PolygonSubType subType = shapeChecker.CheckPolygonSubType(corners);

                    if (corners.Count() == 4)
                    {
                        Bitmap rectangle = this.Transform(corners, original);
                        text = this.Detect(rectangle);
                        if (text.Length > 6)
                        {
                            results.Add(text);
                            rectangle.Save("temp/" + text + ".jpg");
                        }
                    }
                }
            }

            return results;
        }
 /// <summary>
 /// Binarize image with Otsu threshold filter
 /// </summary>
 /// <param name="image"></param>
 public static Image OtsuThreshold(this Image image)
 {
     OtsuThreshold thresholdFilter = new OtsuThreshold();
     return thresholdFilter.Apply(BitmapGrayscale(image));
 }
Exemple #5
0
        // Process specified image trying to recognize counter's image
        public void Process( Bitmap image, IImageProcessingLog log )
        {
            log.AddMessage( "Image size: " + image.Width + " x " + image.Height );

            // 1 - Grayscale
            Bitmap grayImage = Grayscale.CommonAlgorithms.BT709.Apply( image );
            log.AddImage( "Grayscale", grayImage );

            // 2 - Edge detection
            DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector( ); 
            Bitmap edges = edgeDetector.Apply( grayImage );
            log.AddImage( "Edges", edges );

            // 3 - Threshold edges
            Threshold thresholdFilter = new Threshold( 40 ); 
            thresholdFilter.ApplyInPlace( edges );
            log.AddImage( "Thresholded Edges", edges );

            // 4 - Blob Counter
            BlobCounter blobCounter = new BlobCounter( );
            blobCounter.MinHeight = 32;
            blobCounter.MinWidth  = 32;
            blobCounter.FilterBlobs  = true;
            blobCounter.ObjectsOrder = ObjectsOrder.Size;

            blobCounter.ProcessImage( edges );
            Blob[] blobs = blobCounter.GetObjectsInformation( );

            // create copy of source image, so we could draw on it
            Bitmap imageCopy = AForge.Imaging.Image.Clone( image );

            BitmapData imageData = imageCopy.LockBits( new Rectangle( 0, 0, image.Width, image.Height ),
                ImageLockMode.ReadWrite, imageCopy.PixelFormat );

            // lock grayscale image, so we could access it's pixel values
            BitmapData grayData = grayImage.LockBits( new Rectangle( 0, 0, image.Width, image.Height ),
                ImageLockMode.ReadOnly, grayImage.PixelFormat );
            UnmanagedImage grayUI = new UnmanagedImage( grayData );

            // list of found dark/black quadrilaterals surrounded by white area
            List<List<IntPoint>> foundObjects = new List<List<IntPoint>>( );
            // shape checker for checking quadrilaterals
            SimpleShapeChecker shapeChecker = new SimpleShapeChecker( );

            // 5 - check each blob
            for ( int i = 0, n = blobs.Length; i < n; i++ )
            {
                List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints( blobs[i] );
                List<IntPoint> corners = null;

                // does it look like a quadrilateral ?
                if ( shapeChecker.IsQuadrilateral( edgePoints, out corners ) )
                {
                    // do some more checks to filter so unacceptable shapes
                    // if ( CheckIfShapeIsAcceptable( corners ) )
                    {
                        log.AddMessage( "Blob size: " + blobs[i].Rectangle.Width + " x " + blobs[i].Rectangle.Height );

                        // get edge points on the left and on the right side
                        List<IntPoint> leftEdgePoints, rightEdgePoints;
                        blobCounter.GetBlobsLeftAndRightEdges( blobs[i], out leftEdgePoints, out rightEdgePoints );

                        // calculate average difference between pixel values from outside of the shape and from inside
                        float diff = CalculateAverageEdgesBrightnessDifference(
                            leftEdgePoints, rightEdgePoints, grayUI );

                        log.AddMessage( "Avg Diff: " + diff );

                        // check average difference, which tells how much outside is lighter than inside on the average
                        if ( diff > 20 )
                        {
                            Drawing.Polygon( imageData, corners, Color.Red );
                            // add the object to the list of interesting objects for further processing
                            foundObjects.Add( corners );
                        }
                    }
                }
            }

            imageCopy.UnlockBits( imageData );
            grayImage.UnlockBits( grayData );

            log.AddImage( "Potential glyps", imageCopy );

            int counter = 1;

            // further processing of each potential glyph
            foreach ( List<IntPoint> corners in foundObjects )
            {
                log.AddMessage( "Glyph #" + counter );
                
                log.AddMessage( string.Format( "Corners: ({0}), ({1}), ({2}), ({3})",
                    corners[0], corners[1], corners[2], corners[3] ) );

                // 6 - do quadrilateral transformation
                QuadrilateralTransformation quadrilateralTransformation =
                    new QuadrilateralTransformation( corners, 250, 250 );

                Bitmap transformed = quadrilateralTransformation.Apply( grayImage );

                log.AddImage( "Transformed #" + counter, transformed );

                // 7 - otsu thresholding
                OtsuThreshold otsuThresholdFilter = new OtsuThreshold( ); 
                Bitmap transformedOtsu = otsuThresholdFilter.Apply( transformed );
                log.AddImage( "Transformed Otsu #" + counter, transformedOtsu );

                int glyphSize = 5;
                SquareBinaryGlyphRecognizer gr = new SquareBinaryGlyphRecognizer( glyphSize );

                bool[,] glyphValues = gr.Recognize( transformedOtsu,
                    new Rectangle( 0, 0, 250, 250 ) );

                log.AddImage( "Glyph lines #" + counter, transformedOtsu );

                // output recognize glyph to log
                log.AddMessage( string.Format( "glyph: {0:F2}%", gr.confidence * 100 ) );
                for ( int i = 0; i < glyphSize; i++ )
                {
                    StringBuilder sb = new StringBuilder( "   " );

                    for ( int j = 0; j < glyphSize; j++ )
                    {
                        sb.Append( ( glyphValues[i, j] ) ? "1 " : "0 " );
                    }

                    log.AddMessage( sb.ToString( ) );
                }

                counter++;
            }
        }
        public List<Object> findObjects(VideoReader videoReader, IDepthReader depthReader, Action<ushort[], CameraSpacePoint[]> mappingFunction, IProgress<int> progress)
        {
            var shapeOptimizer = new FlatAnglesOptimizer(160);
            Console.WriteLine("Find glyph box");
            List<Object> objects = new List<Object>();

            if (videoReader == null)
                return objects;

            /// For each frame (int frameNo)
            /// For each recognized glyph in frame (int faceIndex)
            /// Store A tuple of 
            ///              -    A list of bounding points for recognized glyph
            ///              -    A glyphface instance
            var recognizedGlyphs = new Dictionary<int, Dictionary<int, Dictionary<int, Tuple<List<System.Drawing.PointF>, GlyphFace, List<Point3>>>>>();

            Bitmap image = null;
            Mat m = null;
            Bitmap grayImage = null;
            Bitmap edges = null;
            UnmanagedImage grayUI = null;
            Bitmap transformed = null;
            Bitmap transformedOtsu = null;


            //A control flag, true if at the previous frame loop, there is detection of some glyph
            // When this is true, only searching for glyph box in some neighborhood of the previous glyphs
            // if the frame is not an anchor frame
            bool previousFrameDetection = false;

            for (int frameNo = 0; frameNo < videoReader.frameCount; frameNo++)
            //for (int frameNo = 80; frameNo < 81; frameNo++)
            {
                if (progress != null)
                    progress.Report(frameNo);

                Console.WriteLine("=============================================");
                Console.WriteLine("Frame no " + frameNo);
                m = videoReader.getFrame(frameNo);
                if (m == null)
                {
                    break;
                }

                var startPos = new System.Drawing.Point();

                getImageForProcessing(recognizedGlyphs, m, previousFrameDetection, frameNo, ref image, ref startPos);

                // Reset right after using
                previousFrameDetection = false;


                Stopwatch stopwatch = Stopwatch.StartNew();

                /// Adapt from Glyph Recognition Prototyping
                /// Copyright © Andrew Kirillov, 2009-2010
                /// 
                // 1 - Grayscale
                grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image);

                stopwatch.Stop();
                Console.WriteLine("Gray scale time = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

                // 2 - Edge detection
                DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector();
                edges = edgeDetector.Apply(grayImage);

                stopwatch.Stop();
                Console.WriteLine("Edge detection time = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

                // 3 - Threshold edges
                // Was set to 20 and the number of detected glyphs are too low
                // Should be set higher
                Threshold thresholdFilter = new Threshold(60);
                thresholdFilter.ApplyInPlace(edges);

                stopwatch.Stop();
                Console.WriteLine("Threshold time = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

                // 4 - Blob Counter
                BlobCounter blobCounter = new BlobCounter();
                blobCounter.MinHeight = 32;
                blobCounter.MinWidth = 32;
                blobCounter.FilterBlobs = true;
                blobCounter.ObjectsOrder = ObjectsOrder.Size;

                blobCounter.ProcessImage(edges);
                Blob[] blobs = blobCounter.GetObjectsInformation();

                stopwatch.Stop();
                Console.WriteLine("Blob finding time = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

                //// create unmanaged copy of source image, so we could draw on it
                //UnmanagedImage imageData = UnmanagedImage.FromManagedImage(image);

                // Get unmanaged copy of grayscale image, so we could access it's pixel values
                grayUI = UnmanagedImage.FromManagedImage(grayImage);

                // list of found dark/black quadrilaterals surrounded by white area
                List<List<IntPoint>> foundObjects = new List<List<IntPoint>>();
                // shape checker for checking quadrilaterals
                SimpleShapeChecker shapeChecker = new SimpleShapeChecker();

                Console.WriteLine("edgePoints");

                // 5 - check each blob
                for (int i = 0, n = blobs.Length; i < n; i++)
                {
                    List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);

                    List<IntPoint> corners = null;

                    // does it look like a quadrilateral ?
                    if (shapeChecker.IsQuadrilateral(edgePoints, out corners))
                    {
                        // do some more checks to filter so unacceptable shapes
                        // if ( CheckIfShapeIsAcceptable( corners ) )
                        {

                            // get edge points on the left and on the right side
                            List<IntPoint> leftEdgePoints, rightEdgePoints;
                            blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints);

                            // calculate average difference between pixel values from outside of the shape and from inside
                            float diff = this.CalculateAverageEdgesBrightnessDifference(
                                leftEdgePoints, rightEdgePoints, grayUI);

                            // check average difference, which tells how much outside is lighter than inside on the average
                            if (diff > 20)
                            {
                                //Drawing.Polygon(imageData, corners, Color.FromArgb(255, 255, 0, 0));
                                // add the object to the list of interesting objects for further processing
                                foundObjects.Add(corners);
                            }
                        }
                    }
                }

                stopwatch.Stop();
                Console.WriteLine("Finding black quadiralateral surrounded by white area = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();


                int recordedTimeForRgbFrame = (int)(videoReader.totalMiliTime * frameNo / (videoReader.frameCount - 1));

                CameraSpacePoint[] csps = new CameraSpacePoint[videoReader.frameWidth * videoReader.frameHeight];
                if (depthReader != null)
                {
                    ushort[] depthValues = depthReader.readFrameAtTime(recordedTimeForRgbFrame);
                    mappingFunction(depthValues, csps);
                }

                stopwatch.Stop();
                Console.WriteLine("Mapping into 3 dimensional = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();


                // further processing of each potential glyph
                foreach (List<IntPoint> corners in foundObjects)
                {
                    Console.WriteLine("found some corner");
                    // 6 - do quadrilateral transformation
                    QuadrilateralTransformation quadrilateralTransformation =
                        new QuadrilateralTransformation(corners, 20 * (glyphSize + 2), 20 * (glyphSize + 2));

                    transformed = quadrilateralTransformation.Apply(grayImage);

                    // 7 - otsu thresholding
                    OtsuThreshold otsuThresholdFilter = new OtsuThreshold();
                    transformedOtsu = otsuThresholdFilter.Apply(transformed);

                    // +2 for offset
                    int glyphSizeWithBoundary = glyphSize + 2;
                    SquareBinaryGlyphRecognizer gr = new SquareBinaryGlyphRecognizer(glyphSizeWithBoundary);

                    bool[,] glyphValues = gr.Recognize(ref transformedOtsu,
                        new Rectangle(0, 0, 20 * (glyphSize + 2), 20 * (glyphSize + 2)));

                    bool[,] resizedGlyphValues = new bool[glyphSize, glyphSize];

                    for (int i = 0; i < glyphSize; i++)
                        for (int j = 0; j < glyphSize; j++)
                        {
                            resizedGlyphValues[i, j] = glyphValues[i + 1, j + 1];
                        }


                    GlyphFace face = new GlyphFace(resizedGlyphValues, glyphSize);

                    Console.WriteLine("Find glyph face " + face.ToString());

                    // Transfer back to original coordinates
                    List<IntPoint> originalCorners = new List<IntPoint>();
                    foreach (var corner in corners)
                    {
                        IntPoint p = new IntPoint(corner.X + startPos.X, corner.Y + startPos.Y);
                        originalCorners.Add(p);
                    }

                    Console.WriteLine("Corner points");
                    foreach (var corner in originalCorners)
                    {
                        Console.WriteLine(corner);
                    }

                    for (int boxPrototypeIndex = 0; boxPrototypeIndex < boxPrototypes.Count; boxPrototypeIndex++)
                    {
                        var boxPrototype = boxPrototypes[boxPrototypeIndex];
                        foreach (int faceIndex in boxPrototype.indexToGlyphFaces.Keys)
                        {
                            if (face.Equals(boxPrototype.indexToGlyphFaces[faceIndex]))
                            {
                                if (!recognizedGlyphs.ContainsKey(boxPrototypeIndex))
                                {
                                    Console.WriteLine("Detect new type of prototype " + boxPrototypeIndex);
                                    recognizedGlyphs[boxPrototypeIndex] = new Dictionary<int, Dictionary<int, Tuple<List<System.Drawing.PointF>, GlyphFace, List<Point3>>>>();
                                }

                                if (!recognizedGlyphs[boxPrototypeIndex].ContainsKey(frameNo))
                                {
                                    Console.WriteLine("Detect glyph at frame " + frameNo + " for prototype " + boxPrototypeIndex);
                                    if (!previousFrameDetection)
                                    {
                                        previousFrameDetection = true;
                                    }

                                    recognizedGlyphs[boxPrototypeIndex][frameNo] = new Dictionary<int, Tuple<List<System.Drawing.PointF>, GlyphFace, List<Point3>>>();
                                }

                                recognizedGlyphs[boxPrototypeIndex][frameNo][faceIndex] = new Tuple<List<System.Drawing.PointF>, GlyphFace, List<Point3>>(
                                    originalCorners.Select(p => new System.Drawing.PointF(p.X, p.Y)).ToList(),
                                    face,
                                    depthReader != null ?
                                    originalCorners.Select(p => p.X + p.Y * videoReader.frameWidth >= 0 && p.X + p.Y * videoReader.frameWidth < videoReader.frameWidth * videoReader.frameHeight ?
                                                                   new Point3(csps[p.X + p.Y * videoReader.frameWidth].X,
                                                                   csps[p.X + p.Y * videoReader.frameWidth].Y,
                                                                   csps[p.X + p.Y * videoReader.frameWidth].Z) : new Point3()).ToList() :
                                                                   new List<Point3>()
                                    );

                                break;
                            }
                        }
                    }
                }

                foreach (IDisposable o in new IDisposable[] { image, m, grayImage, edges, grayUI, transformed, transformedOtsu })
                {
                    if (o != null)
                    {
                        o.Dispose();
                    }
                }

                stopwatch.Stop();
                Console.WriteLine("Transforming and detect glyph = " + stopwatch.ElapsedMilliseconds);
                stopwatch.Restart();

            }

            if (progress != null)
                progress.Report(videoReader.frameCount);

            if (recognizedGlyphs.Keys.Count != 0)
            {
                foreach (int boxPrototypeIndex in recognizedGlyphs.Keys)
                {
                    Console.WriteLine("For boxPrototypeIndex = " + boxPrototypeIndex + " Found glyph box at " + recognizedGlyphs[boxPrototypeIndex].Keys.Count + " frames");
                    GlyphBoxObject oneBox = null;
                    var boxPrototype = boxPrototypes[boxPrototypeIndex];
                    oneBox = new GlyphBoxObject(currentSession, "", Color.Black, 1, videoReader.fileName);
                    oneBox.boxPrototype = boxPrototype;
                    foreach (int frameNo in recognizedGlyphs[boxPrototypeIndex].Keys)
                    {
                        var glyphs = recognizedGlyphs[boxPrototypeIndex][frameNo];

                        var glyphBounds = new List<List<System.Drawing.PointF>>();
                        var glyph3DBounds = new List<List<Point3>>();
                        var faces = new List<GlyphFace>();

                        foreach (var glyph in glyphs)
                        {
                            glyphBounds.Add(glyph.Value.Item1);
                            faces.Add(glyph.Value.Item2);
                            glyph3DBounds.Add(glyph.Value.Item3);
                        }

                        oneBox.setBounding(frameNo, glyphSize, glyphBounds, faces);
                        oneBox.set3DBounding(frameNo, glyphSize, glyph3DBounds, faces);

                        //Point3 center = new Point3();
                        //Quaternions quaternions = new Quaternions();

                        //oneBox.set3DBounding(frameNo, new CubeLocationMark(frameNo, center, quaternions));
                    }

                    objects.Add(oneBox);
                }
            }

            return objects;
        }
Exemple #7
-1
        // Process specified image trying to recognize counter's image
        public void Process( Bitmap image, IImageProcessingLog log )
        {
            log.AddMessage( "Image size: " + image.Width + " x " + image.Height );

            // 1- grayscale image
            Bitmap grayImage = Grayscale.CommonAlgorithms.BT709.Apply( image );
            log.AddImage( "Grayscale", grayImage );

            // 2 - Otsu thresholding
            OtsuThreshold threshold = new OtsuThreshold( );
            Bitmap binaryImage = threshold.Apply( grayImage );
            log.AddImage( "Binary", binaryImage );
            log.AddMessage( "Otsu threshold: " + threshold.ThresholdValue );

            // 3 - Blob counting
            BlobCounter blobCounter = new BlobCounter( );
            blobCounter.FilterBlobs = true;
            blobCounter.MinWidth = 24;
            blobCounter.MinWidth = 24;

            blobCounter.ProcessImage( binaryImage );
            Blob[] blobs = blobCounter.GetObjectsInformation( );

            log.AddMessage( "Found blobs (min width/height = 24): " + blobs.Length );

            // 4 - check shape of each blob
            SimpleShapeChecker shapeChecker = new SimpleShapeChecker( );

            log.AddMessage( "Found coins: " );
            int count = 0;

            // create graphics object for drawing on image
            Graphics g = Graphics.FromImage( image );
            Pen pen = new Pen( Color.Red, 3 );

            foreach ( Blob blob in blobs )
            {
                List<IntPoint> edgePoint = blobCounter.GetBlobsEdgePoints( blob );

                // check if shape looks like a circle
                DoublePoint center;
                double radius;

                if ( shapeChecker.IsCircle( edgePoint, out center, out radius ) )
                {
                    count++;

                    log.AddMessage( string.Format( "  {0}: center = ({1}, {2}), radius = {3}",
                        count, center.X, center.Y, radius ) );

                    // highlight coin
                    g.DrawEllipse( pen, (int) ( center.X - radius ), (int) ( center.Y - radius ),
                        (int) ( radius * 2 ), (int) ( radius * 2 ) );
                }
            }

            g.Dispose( );
            pen.Dispose( );

            log.AddMessage( "Total coins: " + count);
            log.AddImage( "Coins", image );
        }