Add() public method

Add new filter to the sequence.
public Add ( IFilter filter ) : void
filter IFilter Filter to add to the sequence.
return void
        public VideoProcessor()
        {
            background = null;

            pixelateFilter = new Pixellate();
            pixelateFilter.PixelSize = 10;

            differenceFilter = new Difference();
            thresholdFilter = new Threshold(15);
            grayscaleFilter = new Grayscale(0.2125, 0.7154, 0.0721);
            erosionFilter = new Erosion();

            moveTowardsFilter = new MoveTowards();

            filters1 = new FiltersSequence();
            filters1.Add(pixelateFilter);
            filters1.Add(grayscaleFilter);

            filters2 = new FiltersSequence();

            filters2.Add(differenceFilter);
            filters2.Add(thresholdFilter);
            filters2.Add(erosionFilter);

            rat1 = new Tracker(640 / 2, 480 / 2, Color.Red);

            rat2 = new Tracker(400, 300, Color.Green);

            counter = 0;
        }
示例#2
0
        /// <summary>
        /// Constructor
        /// </summary>
        public CardRecognizer()
        {
            //Initialize common filter sequence , this sequence generally will be applied
            commonSeq = new FiltersSequence();
            commonSeq.Add(Grayscale.CommonAlgorithms.BT709);
            commonSeq.Add(new BradleyLocalThresholding());
            commonSeq.Add(new DifferenceEdgeDetector());

            //Load Templates From Resources ,
            //Templates will be used for template matching
            j = miranda.ui.Properties.Resources.J;
            k = miranda.ui.Properties.Resources.K;
            q = miranda.ui.Properties.Resources.Q;
            clubs = miranda.ui.Properties.Resources.Clubs;
            diamonds = miranda.ui.Properties.Resources.Diamonds;
            spades = miranda.ui.Properties.Resources.Spades;
            hearts = miranda.ui.Properties.Resources.Hearts;

            try
            {
                _engine = new TesseractEngine(@"./tessdata", "rus", EngineMode.Default);
                //_engine.SetVariable("tessedit_char_whitelist", "$.,0123456789");
            }
            catch (Exception ex)
            {
                Trace.TraceError(ex.ToString());
                Ex.Report(ex);
            }
        }
        public CardRecognizer()
        {
            //Initialize common filter sequence , this sequence generally will be applied
            commonSeq = new FiltersSequence();
            commonSeq.Add(Grayscale.CommonAlgorithms.BT709);
            commonSeq.Add(new OtsuThreshold());
            commonSeq.Add(new DifferenceEdgeDetector());

            Stream strm;
            BinaryFormatter bformat;

            AppDomain.CurrentDomain.AssemblyResolve += CurrentDomain_AssemblyResolve;

            strm = File.Open("NetworkLetra.net", FileMode.Open);
            bformat = new BinaryFormatter();

            NetworkLetra = (FeedforwardNetwork)bformat.Deserialize(strm);

            strm = File.Open("NetworkSuits.net", FileMode.Open);
            bformat = new BinaryFormatter();

            NetworkSuits = (FeedforwardNetwork) bformat.Deserialize(strm);

            strm = File.Open("NetworkNumero.net", FileMode.Open);
            bformat = new BinaryFormatter();

            NetworkNumero = (FeedforwardNetwork)bformat.Deserialize(strm);

            AppDomain.CurrentDomain.AssemblyResolve -= CurrentDomain_AssemblyResolve;
        }
示例#4
0
        /// <summary>
        /// Constructor
        /// </summary>
        public CardRecognizer()
        {
            //Initialize common filter sequence , this sequence generally will be applied
            commonSeq = new FiltersSequence();
            commonSeq.Add(Grayscale.CommonAlgorithms.BT709);
            commonSeq.Add(new BradleyLocalThresholding());
            commonSeq.Add(new DifferenceEdgeDetector());

            
            //Load Templates From Resources , 
            //Templates will be used for template matching

            LoadResources();
            
            
        }
示例#5
0
        public Bitmap Adelgazar()
        {
            Invert ivert = new Invert();
            imagen = ivert.Apply(imagen);

            FiltersSequence filterSequence = new FiltersSequence();

            filterSequence.Add(new HitAndMiss(
                new short[,] { { 0, 0, 0 },
                               { -1, 1, -1 },
                               { 1, 1, 1 } },
                HitAndMiss.Modes.Thinning));
            filterSequence.Add(new HitAndMiss(
                new short[,] { { -1, 0, 0 },
                               { 1, 1, 0 },
                               { -1, 1, -1 } },
                HitAndMiss.Modes.Thinning));
            filterSequence.Add(new HitAndMiss(
                new short[,] { { 1, -1, 0 },
                               { 1, 1, 0 },
                               { 1, -1, 0 } },
                HitAndMiss.Modes.Thinning));
            filterSequence.Add(new HitAndMiss(
                new short[,] { { -1, 1, -1 },
                               { 1, 1, 0 },
                               { -1, 0, 0 } },
                HitAndMiss.Modes.Thinning));
            filterSequence.Add(new HitAndMiss(
                new short[,] { { 1, 1, 1 },
                               { -1, 1, -1 },
                               { 0, 0, 0 } },
                HitAndMiss.Modes.Thinning));
            filterSequence.Add(new HitAndMiss(
                new short[,] { { -1, 1, -1 },
                               { 0, 1, 1 },
                               { 0, 0, -1 } },
                HitAndMiss.Modes.Thinning));
            filterSequence.Add(new HitAndMiss(
                new short[,] { { 0, -1, 1 },
                               { 0, 1, 1 },
                               { 0, -1, 1 } },
                HitAndMiss.Modes.Thinning));
            filterSequence.Add(new HitAndMiss(
                new short[,] { { 0, 0, -1 },
                               { 0, 1, 1 },
                               { -1, 1, -1 } },
                HitAndMiss.Modes.Thinning));

            FilterIterator filterIterator = new FilterIterator(filterSequence, 15);

            imagen = filterIterator.Apply(imagen);

            imagen = ivert.Apply(imagen);

            return imagen;
        }
示例#6
0
        private void thresholding()
        // Threshold Image
        {
            // Declare Image
            Bitmap bmp = new Bitmap(pictureBox1.Image);
            // create filters sequence
            FiltersSequence filter = new AForge.Imaging.Filters.FiltersSequence();

            // add filters to the sequence
            filter.Add(new Grayscale(0.299, 0.587, 0.114));
            filter.Add(new Threshold(128));

            // apply the filter sequence
            Bitmap newbmp = filter.Apply(bmp);

            pictureBox1.Image = newbmp;
        }
示例#7
0
        private Bitmap j, k, q; //Face Card Character Templates

        #endregion Fields

        #region Constructors

        /// <summary>
        /// Constructor
        /// </summary>
        public CardRecognizer()
        {
            //Initialize common filter sequence , this sequence generally will be applied
            commonSeq = new FiltersSequence();
            commonSeq.Add(Grayscale.CommonAlgorithms.BT709);
            commonSeq.Add(new BradleyLocalThresholding());
            commonSeq.Add(new DifferenceEdgeDetector());

            //Load Templates From Resources ,
            //Templates will be used for template matching
            j = PlayingCardRecognition.Properties.Resources.J;
            k = PlayingCardRecognition.Properties.Resources.K;
            q = PlayingCardRecognition.Properties.Resources.Q;
            clubs = PlayingCardRecognition.Properties.Resources.Clubs;
            diamonds = PlayingCardRecognition.Properties.Resources.Diamonds;
            spades = PlayingCardRecognition.Properties.Resources.Spades;
            hearts = PlayingCardRecognition.Properties.Resources.Hearts;
        }
        /// <summary>
        /// Manipulate (and resize) an image
        /// </summary>
        /// <param name="source"></param>
        /// <param name="parameters"></param>
        /// <returns></returns>
        public override Bitmap Manipulate(object source, ImageManipulationSettings parameters)
        {
            var image = Resize(source, parameters);

            var filters = new FiltersSequence();

            if (parameters.Gamma.HasValue)
                filters.Add(new GammaCorrection(parameters.Gamma.Value));

            if (parameters.Sharpen.HasValue)
                filters.Add(new Sharpen { Threshold = (int)parameters.Sharpen.Value });

            if (parameters.Hue.HasValue)
                filters.Add(new HueModifier((int)parameters.Hue.Value));

            if (parameters.Saturation.HasValue)
                filters.Add(new SaturationCorrection((float)parameters.Saturation.Value));

            if (parameters.Brightness.HasValue)
                filters.Add(new BrightnessCorrection((int)parameters.Brightness.Value));

            if (parameters.Contrast.HasValue)
                filters.Add(new ContrastCorrection((int)parameters.Contrast.Value));

            return filters.Count == 0
                ? image
                : filters.Apply(image);
        }
示例#9
0
        private bool IsNotNumber(Bitmap source)
        {
            var template = Resources.PlayerEmpty;
            var temp = source.Clone() as Bitmap; //Clone image to keep original image

            var seq = new FiltersSequence();
            seq.Add(Grayscale.CommonAlgorithms.BT709);
            temp = seq.Apply(source); // Apply filters on source image

            var templ = seq.Apply(template);

            var templateMatchin = new ExhaustiveTemplateMatching(0.9f);
            TemplateMatch[] templates;
            if (
                temp.Width < template.Width
                ||
                temp.Height < template.Height
                )
                templates = templateMatchin.ProcessImage(templ, temp);
            else
                templates = templateMatchin.ProcessImage(temp, templ);

            var res = templates.Length > 0;

            template = Resources.PlayerMissing;
            templ = seq.Apply(template);

            templateMatchin = new ExhaustiveTemplateMatching(0.9f);
            TemplateMatch[] templates2;
            if (
                temp.Width < template.Width
                ||
                temp.Height < template.Height
                )
                templates2 = templateMatchin.ProcessImage(templ, temp);
            else
                templates2 = templateMatchin.ProcessImage(temp, templ);

            res |= templates2.Length > 0;
            return res;
        }
示例#10
0
        public bool ScanByTemplate(Bitmap source, Bitmap template)
        {
            var temp = source.Clone() as Bitmap; //Clone image to keep original image
            var tempTempl = template;

            var seq = new FiltersSequence();
            seq.Add(Grayscale.CommonAlgorithms.BT709);  //First add  grayScaling filter
            //seq.Add(new Threshold(200));
            seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter
            temp = seq.Apply(source); // Apply filters on source image

            //tempTempl = seq.Apply(template); // Apply filters on source image

            var templateMatchin = new ExhaustiveTemplateMatching(0.9f);
            TemplateMatch[] templates;
            templates = templateMatchin.ProcessImage(temp, tempTempl);

            return templates.Length > 0;
        }
示例#11
0
        void webcam_ImageCaptured_Back(object source, WebcamEventArgs e)
        {
            _FrameImage.Image = e.WebCamImage;
            Bitmap MaskImage = new Bitmap(640, 480);
            if (backgroundFrame == null)
            {
                Frames2Ignore--;
                if (Frames2Ignore == 0)
                {
                    backgroundFrame = (Bitmap)e.WebCamImage;
                    backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
                }
                return;
            }

            //Save curent image
            CurrentFrame = (Bitmap)e.WebCamImage;
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);

            /*
            // create filter
            IFilter pixellateFilter = new Pixellate();
            // apply the filter
            backgroundFrame = pixellateFilter.Apply(backgroundFrame);
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
            CurrentFrame = pixellateFilter.Apply(CurrentFrame);
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);*/

            MoveTowards moveTowardsFilter = new MoveTowards();
            moveTowardsFilter.OverlayImage = CurrentFrameGray;
            // move background towards current frame
            Bitmap tmp = moveTowardsFilter.Apply(backgroundFrameGray);
            // dispose old background
            backgroundFrame.Dispose();
            backgroundFrame = tmp;

            // create processing filters sequence

            FiltersSequence processingFilter = new FiltersSequence();
            processingFilter.Add(new Difference(backgroundFrameGray));
            processingFilter.Add(new Threshold(15));
            processingFilter.Add(new Opening());
            processingFilter.Add(new Edges());
            processingFilter.Add(new DifferenceEdgeDetector());

            // apply the filter

            Bitmap tmp1 = processingFilter.Apply(CurrentFrameGray);
            // extract red channel from the original image

            IFilter extrachChannel = new ExtractChannel(RGB.R);
            Bitmap redChannel = extrachChannel.Apply(backgroundFrame);

            //  merge red channel with moving object borders

            Merge mergeFilter = new Merge();
            mergeFilter.OverlayImage = tmp1;
            Bitmap tmp2 = mergeFilter.Apply(redChannel);
            // replace red channel in the original image

            ReplaceChannel replaceChannel = new ReplaceChannel(RGB.R,tmp2);
            replaceChannel.ChannelImage = tmp2;
            Bitmap tmp3 = replaceChannel.Apply(backgroundFrame);

            ConnectedComponentsLabeling CCL = new ConnectedComponentsLabeling();
            CCL.MinWidth = 75;
            CCL.MinHeight = 75;
            CCL.CoupledSizeFiltering = true;
            Bitmap tmp4 = CCL.Apply(tmp1);

            blobCounter.MinHeight = 75;
            blobCounter.MinWidth = 75;
            blobCounter.CoupledSizeFiltering = true;
            blobCounter.ProcessImage(tmp1);
            Blob[] blobs = blobCounter.GetObjects(tmp1);
            int maxSize = 0;
            Blob maxObject = new Blob(0, new Rectangle(0, 0, 0, 0));
            // find biggest blob
            Bitmap Masked = new Bitmap(320, 240);
            if (blobs != null)
            {
                foreach (Blob blob in blobs)
                {
                    int blobSize = blob.Rectangle.Width * blob.Rectangle.Height;

                    if (blobSize > maxSize)
                    {
                        maxSize = blobSize;
                        maxObject = blob;
                    }
                }

                for (int i = maxObject.Rectangle.Left; i < maxObject.Rectangle.Right; i++)
                {
                    for (int j = maxObject.Rectangle.Top; j < maxObject.Rectangle.Bottom; j++)
                    {
                        Masked.SetPixel(i, j, maxObject.Image.GetPixel(i - maxObject.Rectangle.Left, j - maxObject.Rectangle.Top));
                    }
                }
            }

            /*Bitmap Hor = new Bitmap(320, 240);
            Bitmap Ver = new Bitmap(320, 240);
            if (maxSize > 150)
            {
                AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(tmp1);
                int[] HistVer = VIS.Gray.Values;
                AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(tmp1);
                int[] HistHor = HIS.Gray.Values;

                for (int x=0;x<320;x++)
                    for (int y = 0; y < 240; y++)
                    {
                        Hor.SetPixel(x, y, Color.White);
                        Ver.SetPixel(x, y, Color.White);
                    }
                int Imax = -1, Max = -1;
                for (int i = 0; i < HistHor.Length; i++)
                {
                    for (int y = 0; y < ((double)(HistHor[i]) / 255) ; y++)
                        Hor.SetPixel(i, y, Color.Black);
                        if (HistHor[i] > 0)
                        {
                            Imax = i;
                            Max = HistHor[i];
                        }
                }
                int ImaxY = -1, MaxY = -1;
                for (int i = 0; i < HistVer.Length; i++)
                {
                    for (int x = 0; x < ((double)(HistVer[i]) / 255) ; x++)
                        Ver.SetPixel(x, i, Color.Black);
                    if (HistVer[i] > MaxY)
                    {
                        ImaxY = i;
                        MaxY = HistVer[i];
                    }
                }
            }

            */
               /* blobCounter.MinHeight = 75;
            blobCounter.MinWidth = 75;
            blobCounter.CoupledSizeFiltering = true;
            blobCounter.ProcessImage(tmp1);

            Blob[] blobs = blobCounter.GetObjects(tmp1);
            int maxSize = 0;
            Blob maxObject = new Blob(0, new Rectangle(0, 0, 0, 0));
            // find biggest blob
            if (blobs != null)
            {
                foreach (Blob blob in blobs)
                {
                    int blobSize = blob.Rectangle.Width * blob.Rectangle.Height;

                    if (blobSize > maxSize)
                    {
                        maxSize = blobSize;
                        maxObject = blob;
                    }
                }

                if (maxObject.Rectangle.Height > 90 && maxObject.Rectangle.Width > 30)
                {
                    AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(maxObject.Image);
                    int[] HistVer = VIS.Gray.Values;
                    AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(maxObject.Image);
                    int[] HistHor = HIS.Gray.Values;

                    int Imax = -1, Max = -1;
                    for (int i = 0; i < HistHor.Length; i++)
                    {
                        if (HistHor[i] > 0)
                        {
                            Imax = i;
                            Max = HistHor[i];
                            break;
                        }
                    }
                    int ImaxY = -1, MaxY = -1;
                    for (int i = 0; i < HistVer.Length; i++)
                    {
                        if (HistVer[i] > MaxY)
                        {
                            ImaxY = i;
                            MaxY = HistVer[i];
                        }
                    }
                    //Imax = 0;
                    ImaxY = 0;

                    Console.WriteLine("X={0},Y={1}", maxObject.Rectangle.X, maxObject.Rectangle.Y);
                    if (eChangedCursorEvent != null && maxSize != 0)
                        eChangedCursorEvent(maxObject.Rectangle.X + Imax, maxObject.Rectangle.Y + ImaxY);
                    LastX = maxObject.Rectangle.X;
                    LastY = maxObject.Rectangle.Y;
                }*/
                /*else if (LastX != -1 && LastY != -1 && maxSize > 0)
                {
                    //Calc distance from LastX,LastY
                    double distX = System.Math.Pow(maxObject.Rectangle.X - LastX, 2);
                    double distY = System.Math.Pow(maxObject.Rectangle.Y - LastY, 2);
                    double dist = System.Math.Pow(distX + distY, 0.5);
                    if (dist < 15)
                    {
                        Console.WriteLine("X={0},Y={1}", maxObject.Rectangle.X, maxObject.Rectangle.Y);
                        if (eChangedCursorEvent != null && maxSize != 0)
                            eChangedCursorEvent(maxObject.Rectangle.X, maxObject.Rectangle.Y);
                        LastX = maxObject.Rectangle.X;
                        LastY = maxObject.Rectangle.Y;
                    }
                    else
                    {
                        LastX = -1;
                        LastY = -1;
                    }
                }
                else
                {
                    LastX = -1;
                    LastY = -1;
                }*/
            //}
            _CaptureImage.Image = maxObject.Image;
            //_CaptureImage.Image = tmp3;
            _CaptureImage2.Image = tmp4;
            backgroundFrame = (Bitmap)e.WebCamImage;
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
        }
 // Process max 200 frames (5 min) in 320x240 resolution. So 76KB memory per frame (grayscale). 1200 frames is max 93 MB of RAM (normally less because of area)
 private void processFilePart()
 {
     int nrofframes = imageStack.Length;
     int i;
     int sum;
     // create filters
     Morph morphFilter = new Morph(); // filter for adapting background
     morphFilter.SourcePercent = 0.8;
     Difference differenceFilter = new Difference(); // filter for subtracting two frames
     Threshold thresholdFilter = new Threshold(); // filter for thresholding
     FiltersSequence filters = new FiltersSequence(); // all filters in one
     filters.Add(morphFilter);
     filters.Add(differenceFilter);
     filters.Add(thresholdFilter);
     thresholdFilter.ThresholdValue = threshold;
     // Process here
     for (i = 0; i < nrofframes; i++)
     {
         // move background towards current frame
         morphFilter.OverlayImage = imageStack[i];
         Bitmap Temp = morphFilter.Apply(backgroundFrame);
         backgroundFrame = Temp.Clone(new Rectangle(0, 0, Temp.Width, Temp.Height), Temp.PixelFormat);
         Temp.Dispose();
         // apply rest of the filters
         differenceFilter.OverlayImage = imageStack[i];
         Bitmap Temp2 = filters.Apply(backgroundFrame);
         sum = 0;
         // Calculate sum of white pixels
         for (int j = 0; j < Temp2.Width; j++)
         {
             for (int k = 0; k < Temp2.Height; k++)
             {
                 if (Temp2.GetPixel(j, k) != Color.FromArgb(255, 0, 0, 0))
                 {
                     sum += 1;
                 }
             }
         }
         Temp2.Dispose();
         if (sum > objectsize)
         {
             tracker.addFrame(currentFrame);
         }
         currentFrame += 1;
     }
     // Discard Array
     for (i = 0; i < nrofframes; i++)
     {
         imageStack[i].Dispose();
     }
 }
示例#13
0
        /// <summary>
        /// Detects and recognizes cards from source image
        /// </summary>
        /// <param name="source">Source image to be scanned</param>
        /// <returns>Recognized Cards</returns>
        public List<Card> Recognize(Bitmap source)
        {
            List<Card> collection = new List<Card>();
            
            Bitmap temp = source.Clone(source.PixelFormat) as Bitmap; //Clone image to keep original image

            FiltersSequence seq = new FiltersSequence();
            seq.Add(Grayscale.CommonAlgorithms.BT709);  //First add  grayScaling filter
            seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter
            temp = seq.Apply(source); // Apply filters on source image

            //Extract blobs from image whose size width and height larger than 150
            BlobCounter extractor = new BlobCounter();
            extractor.FilterBlobs = true;
            extractor.MinWidth = extractor.MinHeight = 150;
            extractor.MaxWidth = extractor.MaxHeight = 350;
            extractor.ProcessImage(temp);

            //Will be used transform(extract) cards on source image 
            QuadrilateralTransformation quadTransformer = new QuadrilateralTransformation();

            //Will be used resize(scaling) cards 
            ResizeBilinear resizer = new ResizeBilinear(CardWidth, CardHeight);

            foreach (Blob blob in extractor.GetObjectsInformation())
            {
                //Get Edge points of card
                List<IntPoint> edgePoints = extractor.GetBlobsEdgePoints(blob);
                //Calculate/Find corners of card on source image from edge points
                List<IntPoint> corners = PointsCloud.FindQuadrilateralCorners(edgePoints);

                quadTransformer.SourceQuadrilateral = corners; //Set corners for transforming card 
                quadTransformer.AutomaticSizeCalculaton = true;

                Bitmap cardImg = quadTransformer.Apply(source); //Extract(transform) card image

                if (cardImg.Width > cardImg.Height) //If card is positioned horizontally
                {
                    WriteableBitmap wbmp=(WriteableBitmap)cardImg;
                    wbmp = wbmp.Rotate(90);
                    cardImg = (Bitmap)wbmp; //Rotate
                }
                cardImg = resizer.Apply(cardImg); //Normalize card size

                Card card = new Card(cardImg, corners.ToArray()); //Create Card Object
                char color = ScanColor(card.GetTopLeftPart()); //Scan color
                bool faceCard = IsFaceCard(cardImg); //Determine type of card(face or not)

                if (!faceCard)
                {
                    card.Suit = ScanSuit(cardImg, color); //Scan Suit of non-face card
                    card.Rank = ScanRank(cardImg); //Scan Rank of non-face card
                }
                else
                {
                    Bitmap topLeft = card.GetTopLeftPart();

                    seq = null;
                    seq = new FiltersSequence();

                    seq.Add(Grayscale.CommonAlgorithms.BT709);
                    seq.Add(new BradleyLocalThresholding());
                    topLeft = seq.Apply(topLeft);
                    BlobsFiltering bFilter = new BlobsFiltering(5, 5, 150, 150);
                    bFilter.ApplyInPlace(topLeft); //Filter blobs that can not be a suit

                    //topLeft.Save("topleft.bmp", ImageFormat.Bmp);

                    card.Suit = ScanFaceSuit(topLeft, color); //Scan suit of face card
                    card.Rank = ScanFaceRank(topLeft); //Scan rank of face card
                }
                collection.Add(card); //Add card to collection
            }
            return collection;
        }
示例#14
0
文件: Form1.cs 项目: bdus/AForge
        public Bitmap DealImg(System.Drawing.Image b)
        {
            try
            {
                /*var bnew = new Bitmap(b.Width, b.Height, PixelFormat.Format24bppRgb);

                Graphics g = Graphics.FromImage(bnew);
                g.DrawImage(b, 0, 0);
                g.Dispose();

                bnew = new Grayscale(0.2125, 0.7154, 0.0721).Apply(bnew);
                bnew = new BlobsFiltering(1, 1, b.Width, b.Height).Apply(bnew);
                bnew = new Sharpen().Apply(bnew);
                bnew = new Threshold(50).Apply(bnew);
                */
                //code above do not effect well here.

                Bitmap bnew = new Bitmap(b);
                Graphics g = Graphics.FromImage(bnew);
                g.DrawImage(b, 0, 0);
                g.Dispose();
                FiltersSequence seq = new FiltersSequence();
                seq.Add(Grayscale.CommonAlgorithms.BT709);
                seq.Add(new OtsuThreshold());
                bnew = seq.Apply(bnew);

                return bnew;
            }
            catch (Exception)
            {

                throw;
            }
            //return null;
        }
示例#15
0
        /// <summary>
        /// Detects and recognizes cards from source image
        /// </summary>
        /// <param name="source">Source image to be scanned</param>
        /// <returns>Recognized Cards</returns>
        public CardCollection Recognize(Bitmap source, string filePath, int id,
            int minSize, Rectangle suitRect, Rectangle rankRect
            )
        {
            CardCollection collection = new CardCollection();  //Collection that will hold cards
            Bitmap temp = source.Clone() as Bitmap; //Clone image to keep original image

            FiltersSequence seq = new FiltersSequence();
            seq.Add(Grayscale.CommonAlgorithms.BT709);  //First add  grayScaling filter
            seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter
            temp = seq.Apply(source); // Apply filters on source image

            //if (!string.IsNullOrEmpty(fileName))
            //{
            //    temp.Save(fileName, ImageFormat.Bmp);
            //}
            //Extract blobs from image whose size width and height larger than 150
            BlobCounter extractor = new BlobCounter();
            extractor.FilterBlobs = true;
            extractor.MinWidth = extractor.MinHeight = minSize;//TODO card size
            //extractor.MaxWidth = extractor.MaxHeight = 70;//TODO card size
            extractor.ProcessImage(temp);

            //Will be used transform(extract) cards on source image
            //QuadrilateralTransformation quadTransformer = new QuadrilateralTransformation();

            foreach (Blob blob in extractor.GetObjectsInformation())
            {
                var cardImg = source.Clone(blob.Rectangle, PixelFormat.DontCare);

                Card card = new Card(cardImg); //Create Card Object

                Bitmap suitBmp = card.GetPart(suitRect);
                char color = ScanColor(suitBmp); //Scan color

                seq.Clear();

                seq.Add(Grayscale.CommonAlgorithms.BT709);
                seq.Add(new OtsuThreshold());
                suitBmp = seq.Apply(suitBmp);

                card.Suit = ScanSuit(suitBmp, color); //Scan suit of face card

                Bitmap rankBmp = card.GetPart(rankRect);
                seq.Clear();

                seq.Add(Grayscale.CommonAlgorithms.BT709);
                seq.Add(new OtsuThreshold());
                rankBmp = seq.Apply(rankBmp);

                //var ext = new BlobsFiltering(0, 0, 40, 40);
                //ext.ApplyInPlace(rankBmp);
                card.Rank = ScanRank(rankBmp); //Scan Rank of non-face card

                //if (card.Rank == Rank.NOT_RECOGNIZED)
                //{
                //    if (!string.IsNullOrEmpty(filePath))
                //    {
                //        while (File.Exists(filePath + id + ".bmp"))
                //            id++;
                //        top.Save(filePath + id + ".bmp", ImageFormat.Bmp);
                //    }
                //}

                if(card.Rank != Rank.NOT_RECOGNIZED && card.Suit != Suit.NOT_RECOGNIZED)
                    collection.Add(card); //Add card to collection
            }

            collection.SortByRank();
            return collection;
        }
示例#16
0
        void webcam_ImageCaptured_Back2(object source, WebcamEventArgs e)
        {
            _FrameImage.Image = e.WebCamImage;
            Bitmap MaskImage = new Bitmap(640, 480);
            if (backgroundFrame == null)
            {
                Frames2Ignore--;
                if (Frames2Ignore == 0)
                {
                    backgroundFrame = (Bitmap)e.WebCamImage;
                    backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
                }
                return;
            }

            //Save curent image
            CurrentFrame = (Bitmap)e.WebCamImage;
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);

            /*
            // create filter
            IFilter pixellateFilter = new Pixellate();
            // apply the filter
            backgroundFrame = pixellateFilter.Apply(backgroundFrame);
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
            CurrentFrame = pixellateFilter.Apply(CurrentFrame);
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);*/

            MoveTowards moveTowardsFilter = new MoveTowards();
            moveTowardsFilter.OverlayImage = CurrentFrameGray;
            // move background towards current frame
            Bitmap tmp = moveTowardsFilter.Apply(backgroundFrameGray);
            // dispose old background
            backgroundFrame.Dispose();
            backgroundFrame = tmp;

            // create processing filters sequence

            FiltersSequence processingFilter = new FiltersSequence();
            processingFilter.Add(new Difference(backgroundFrameGray));
            processingFilter.Add(new Threshold(15));
            processingFilter.Add(new Opening());
            processingFilter.Add(new Edges());
            processingFilter.Add(new DifferenceEdgeDetector());

            // apply the filter

            Bitmap tmp1 = processingFilter.Apply(CurrentFrameGray);
            // extract red channel from the original image

            /*IFilter extrachChannel = new ExtractChannel(RGB.R);
            Bitmap redChannel = extrachChannel.Apply(backgroundFrame);

            //  merge red channel with moving object borders

            Merge mergeFilter = new Merge();
            mergeFilter.OverlayImage = tmp1;
            Bitmap tmp2 = mergeFilter.Apply(redChannel);
            // replace red channel in the original image

            ReplaceChannel replaceChannel = new ReplaceChannel(RGB.R, tmp2);
            replaceChannel.ChannelImage = tmp2;
            Bitmap tmp3 = replaceChannel.Apply(backgroundFrame);

            ConnectedComponentsLabeling CCL = new ConnectedComponentsLabeling();
            CCL.MinWidth = 75;
            CCL.MinHeight = 75;
            CCL.CoupledSizeFiltering = true;
            Bitmap tmp4 = CCL.Apply(tmp1);

            blobCounter.MinHeight = 75;
            blobCounter.MinWidth = 75;
            blobCounter.CoupledSizeFiltering = true;
            blobCounter.ProcessImage(tmp1);
            Blob[] blobs = blobCounter.GetObjects(tmp1);
            int maxSize = 0;
            Blob maxObject = new Blob(0, new Rectangle(0, 0, 0, 0));
            // find biggest blob
            if (blobs != null)
            {
                foreach (Blob blob in blobs)
                {
                    int blobSize = blob.Rectangle.Width * blob.Rectangle.Height;

                    if (blobSize > maxSize)
                    {
                        maxSize = blobSize;
                        maxObject = blob;
                    }
                }
            }*/
            Bitmap Hor = new Bitmap(320, 240);
            Bitmap Ver = new Bitmap(320, 240);
            /*if (maxSize > 150)
            {
                AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(tmp1);
                int[] HistVer = VIS.Gray.Values;
                AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(tmp1);
                int[] HistHor = HIS.Gray.Values;
            }
             */
            AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(tmp1);
            int[] HistVer = VIS.Gray.Values;
            AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(tmp1);
            int[] HistHor = HIS.Gray.Values;
            //StateMgr.Execute(HistHor,HistVer);
            if (eChangedCursorEvent != null && StateMgr.Val == Webcam.ValidLocation.TRUE)
            {
                //Console.WriteLine("X={0} , Y={1}", StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY);
                eChangedCursorEvent(StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY);
                //eChangedCursorEvent(StateMgr.CurrState.CurrX, 100);
                //eChangedCursorEvent(100, StateMgr.CurrState.CurrY);
            }

            #region Paint Hist
            /*for (int x = 0; x < 320; x++)
                for (int y = 0; y < 240; y++)
                {
                    Hor.SetPixel(x, y, Color.White);
                    Ver.SetPixel(x, y, Color.White);
                }
            int Imax = -1, Max = -1;
            for (int i = 0; i < HistHor.Length; i++)
            {
                for (int y = 0; y < ((double)(HistHor[i]) / 255); y++)
                    Hor.SetPixel(i, y, Color.Black);
                if (HistHor[i] > 0)
                {
                    Imax = i;
                    Max = HistHor[i];
                }
            }
            int ImaxY = -1, MaxY = -1;
            for (int i = 0; i < HistVer.Length; i++)
            {
                for (int x = 0; x < ((double)(HistVer[i]) / 255); x++)
                    Ver.SetPixel(x, i, Color.Black);
                if (HistVer[i] > MaxY)
                {
                    ImaxY = i;
                    MaxY = HistVer[i];
                }
            }*/
            #endregion

            _CaptureImage.Image = Hor;
            _CaptureImage2.Image = Ver;
            backgroundFrame = (Bitmap)e.WebCamImage;
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
        }
示例#17
0
        private IFilter GenerateFilters()
        {
            FiltersSequence filters = new FiltersSequence();
            foreach (TreeNode node in treeLayers.Nodes)
            {
                if (node.Level == 0 && node.Checked)
                {

                    foreach (TreeNode child in node.Nodes)
                    {
                        if (child.Checked)
                        {
                            HSLFiltering local_filter = new HSLFiltering(((LayerColor)child.Tag).hue_range, ((LayerColor)child.Tag).sat_range, ((LayerColor)child.Tag).lum_range);
                            local_filter.FillColor = ((LayerProperties)node.Tag).HSLReplacementColor;
                            local_filter.FillOutsideRange = false;
                            filters.Add(local_filter);
                        }
                    }

                }

            }
            return filters;
        }
示例#18
0
        private void doAllImage()
        {
            DirectoryInfo dir = new DirectoryInfo("c:\\code");
            FileInfo[] files = dir.GetFiles("*.jpg");
            preprocessFilters = new FiltersSequence();
            preprocessFilters.Add(Grayscale.CommonAlgorithms.BT709);
            //preprocessFilters.Add(new BradleyLocalThresholding());
            preprocessFilters.Add(new OtsuThreshold());
            foreach (FileInfo file in files)
            {

                Bitmap map = new Bitmap(file.FullName);
                int noiseWidth = 5;
                Bitmap preImg = denoise(preprocess(map, preprocessFilters), new Size(noiseWidth, noiseWidth));
                preImg = denoise(preprocess(preImg, preprocessFilters), new Size(noiseWidth, 3));
                preImg = denoise(preprocess(preImg, preprocessFilters), new Size(3, noiseWidth));
                quganrao(preImg);
                //
                //to2(map);
                preImg.Save("c:\\code\\1\\" + file.Name);
            }
        }
示例#19
0
        Bitmap processImageCenterline(string filename)
        {
            using (Bitmap SampleImage = (Bitmap)System.Drawing.Image.FromFile(filename))
            {
                // We must convert it to grayscale because
                // the filter accepts 8 bpp grayscale images
                Grayscale GF = new Grayscale(0.2125, 0.7154, 0.0721);
                using (Bitmap GSampleImage = GF.Apply(SampleImage))
                {
                    // Saving the grayscale image, so we could see it later
                    // Detecting image edges and saving the result
                    CannyEdgeDetector CED = new CannyEdgeDetector(0, 70);
                    //CED.ApplyInPlace(GSampleImage);
                    //BradleyLocalThresholding bwfilter = new BradleyLocalThresholding();
                    //bwfilter.ApplyInPlace(GSampleImage);
                    // create filter

                    // create filter sequence
                    FiltersSequence filterSequence = new FiltersSequence();

                    // Inverting image to get white image on black background
                    filterSequence.Add(new Invert());
                    filterSequence.Add(new SISThreshold());
                    // Finding skeleton
                    filterSequence.Add(new SimpleSkeletonization());
                    //clean image from scratches
                    short[,] se = new short[,] {
                                                { -1, -1, -1 },
                                                {  0,  1,  0 },
                                                { -1, -1, -1 }};

                    filterSequence.Add(new HitAndMiss(se, HitAndMiss.Modes.Thinning));
                    //filterSequence.Add(new Median( ));
                    //filterSequence.Add(new Dilatation());
                    filterSequence.Add(new Invert());
                    // apply the filter and rfeturn value
                    return filterSequence.Apply(GSampleImage);

                }

            }
        }
示例#20
0
 private void InitFilters()
 {
     gsFilter = new Grayscale(0.33, 0.33, 0.33);
     diffFilter = new Difference();
     motionFilter = new FiltersSequence();
     motionFilter.Add(new Threshold(THRESHOLD));
     motionFilter.Add(new BlobsFiltering(MIN_BLOB, MIN_BLOB, MAX_BLOB, MAX_BLOB, true));
     morphFilter = new Morph();
     morphFilter.SourcePercent = MORPH_PERCENT;
     blobCount = new BlobCounter();
     blobCount.MinHeight = MIN_BLOB;
     blobCount.MaxHeight = MAX_BLOB;
 }
        /// <summary>
        /// Detects and recognizes cards from source image
        /// </summary>
        /// <param name="source">Source image to be scanned</param>
        /// <returns>Recognized Cards</returns>
        public CardCollection Recognize(Bitmap source)
        {
            CardCollection collection = new CardCollection();  //Collection that will hold cards
            Bitmap temp = source.Clone() as Bitmap; //Clone image to keep original image

            FiltersSequence seq = new FiltersSequence();
            seq.Add(Grayscale.CommonAlgorithms.BT709);  //First add  grayScaling filter
            seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter
            temp = seq.Apply(source); // Apply filters on source image

            //Extract blobs from image whose size width and height larger than 150
            BlobCounter extractor = new BlobCounter();
            extractor.FilterBlobs = true;
            extractor.MinWidth = extractor.MinHeight = 150;
            extractor.MaxWidth = extractor.MaxHeight = 350;
            extractor.ProcessImage(temp);

            //Will be used transform(extract) cards on source image
            QuadrilateralTransformation quadTransformer = new QuadrilateralTransformation();

            //Will be used resize(scaling) cards
            ResizeBilinear resizer = new ResizeBilinear(CardWidth, CardHeight);

            foreach (Blob blob in extractor.GetObjectsInformation())
            {
                //Get Edge points of card
                List<IntPoint> edgePoints = extractor.GetBlobsEdgePoints(blob);
                //Calculate/Find corners of card on source image from edge points
                List<IntPoint> corners = PointsCloud.FindQuadrilateralCorners(edgePoints);

                quadTransformer.SourceQuadrilateral = corners; //Set corners for transforming card
                quadTransformer.AutomaticSizeCalculaton = true;

                Bitmap cardImg = quadTransformer.Apply(source); //Extract(transform) card image

                if (cardImg.Width > cardImg.Height) //If card is positioned horizontally
                    cardImg.RotateFlip(RotateFlipType.Rotate90FlipNone); //Rotate
                cardImg = resizer.Apply(cardImg); //Normalize card size

                Card card = new Card(cardImg, corners.ToArray()); //Create Card Object
                bool faceCard = IsFaceCard(cardImg); //Determine type of card(face or not)

                ResizeBicubic res;

                seq.Clear();
                seq.Add(Grayscale.CommonAlgorithms.BT709);
                seq.Add(new OtsuThreshold());

                Bitmap topLeftSuit = card.GetTopLeftSuitPart();
                Bitmap bmp = seq.Apply(topLeftSuit);

                bmp = CutWhiteSpaces(bmp);
                res = new ResizeBicubic(32, 40);
                bmp = res.Apply(bmp);

                Bitmap topLeftRank = card.GetTopLeftRankPart();
                Bitmap bmp2 = seq.Apply(topLeftRank);

                bmp2 = CutWhiteSpaces(bmp2);

                seq.Clear();
                seq.Add(new OtsuThreshold());
                bmp = seq.Apply(bmp);
                card.Suit = ScanSuit(bmp);

                if (!faceCard)
                {
                    res = new ResizeBicubic(26, 40);
                    bmp2 = res.Apply(bmp2);
                    seq.Clear();
                    seq.Add(new OtsuThreshold());
                    bmp2 = seq.Apply(bmp2);
                    card.Rank = ScanRank(bmp2);
                }
                else
                {
                    res = new ResizeBicubic(32, 40);
                    bmp2 = res.Apply(bmp2);
                    seq.Clear();
                    seq.Add(new OtsuThreshold());
                    bmp2 = seq.Apply(bmp2);
                    card.Rank = ScanFaceRank(bmp2);
                }
                collection.Add(card); //Add card to collection
            }
            return collection;
        }
示例#22
0
        void webcam_ImageCaptured(object source, WebcamEventArgs e)
        {
            _FrameImage.Image = e.WebCamImage;
            Bitmap MaskImage = new Bitmap(640, 480);
            if (backgroundFrame == null)
            {
                Frames2Ignore--;
                if (Frames2Ignore == 0)
                {
                    backgroundFrame = (Bitmap)e.WebCamImage;
                    backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
                }
                return;
            }

            //Save curent image
            CurrentFrame = (Bitmap)e.WebCamImage;
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);

            /*
            // create filter
            IFilter pixellateFilter = new Pixellate();
            // apply the filter
            backgroundFrame = pixellateFilter.Apply(backgroundFrame);
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
            CurrentFrame = pixellateFilter.Apply(CurrentFrame);
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);*/

            MoveTowards moveTowardsFilter = new MoveTowards();
            moveTowardsFilter.OverlayImage = CurrentFrameGray;
            // move background towards current frame
            Bitmap tmp = moveTowardsFilter.Apply(backgroundFrameGray);
            // dispose old background
            backgroundFrame.Dispose();
            backgroundFrame = tmp;

            // create processing filters sequence

            FiltersSequence processingFilter = new FiltersSequence();
            processingFilter.Add(new Difference(backgroundFrameGray));
            processingFilter.Add(new Threshold(15));
            processingFilter.Add(new Opening());
            processingFilter.Add(new Edges());
            processingFilter.Add(new DifferenceEdgeDetector());

            // apply the filter

            Bitmap tmp1 = processingFilter.Apply(CurrentFrameGray);
            // extract red channel from the original image

            IFilter extrachChannel = new ExtractChannel(RGB.R);
            Bitmap redChannel = extrachChannel.Apply(backgroundFrame);

            //  merge red channel with moving object borders

            Merge mergeFilter = new Merge();
            mergeFilter.OverlayImage = tmp1;
            Bitmap tmp2 = mergeFilter.Apply(redChannel);
            // replace red channel in the original image

            ReplaceChannel replaceChannel = new ReplaceChannel(RGB.R, tmp2);
            replaceChannel.ChannelImage = tmp2;
            Bitmap tmp3 = replaceChannel.Apply(backgroundFrame);
            StateMgr.Execute(tmp1);
            if (eChangedCursorEvent != null && StateMgr.Val == Webcam.ValidLocation.TRUE)
            {
                //Console.WriteLine("X={0} , Y={1}", StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY);
                eChangedCursorEvent(StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY);
                for (int i = -4; i <= 4; i++)
                    for (int j = -4; j <= 4;j++ )
                        tmp3.SetPixel(StateMgr.CurrState.CurrX+i, StateMgr.CurrState.CurrY+j, Color.Blue);
                //eChangedCursorEvent(StateMgr.CurrState.CurrX, 100);
                //eChangedCursorEvent(100, StateMgr.CurrState.CurrY);
            }
            _CaptureImage.Image = tmp1;
            _CaptureImage2.Image = tmp3;
            backgroundFrame = (Bitmap)e.WebCamImage;
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
        }
示例#23
0
        /// <summary>
        /// processes Frame for Motion Detection based on background generation
        /// </summary>
        /// <param name="frame">
        /// Takes in 2 Bitmap parameters, currentFrame and backgroundFrame
        /// </param>
        /// <returns>
        /// frame in which motion is marked
        /// </returns>
        public Bitmap processFrame(params Bitmap[] frame)
        {
            Bitmap currentFrame = frame[0];
            // create grayscale filter (BT709)
            Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
            Bitmap GScurrentFrame = filter.Apply(currentFrame);
            if (this.backgroundFrame == null)
            {
                this.backgroundFrame = (Bitmap)GScurrentFrame.Clone();
                GScurrentFrame.Dispose();
                return currentFrame;
            }
            else
            {
                Morph filterx = new Morph(GScurrentFrame);
                filterx.SourcePercent = 0.75;
                Bitmap tmp = filterx.Apply(backgroundFrame);
                // dispose old background
                backgroundFrame.Dispose();
                backgroundFrame = tmp;

                // create processing filters sequence
                FiltersSequence processingFilter = new FiltersSequence();
                processingFilter.Add(new Difference(backgroundFrame));
                processingFilter.Add(new Threshold(threshold_val));
                processingFilter.Add(new Opening());
                processingFilter.Add(new Edges());
                // apply the filter
                Bitmap tmp1 = processingFilter.Apply(GScurrentFrame);

                IFilter extractChannel = new ExtractChannel(RGB.R);
                Bitmap redChannel = extractChannel.Apply(currentFrame);
                Merge mergeFilter = new Merge();
                mergeFilter.OverlayImage = tmp1;
                Bitmap t3 = mergeFilter.Apply(redChannel);
                ReplaceChannel rc = new ReplaceChannel(RGB.R, t3);
                t3 = rc.Apply(currentFrame);
                redChannel.Dispose();
                tmp1.Dispose();
                GScurrentFrame.Dispose();
                return t3;
            }
        }
示例#24
0
        public string RecognizeTextSmall(Bitmap source)
        {
            try
            {
                //var res = IsNotNumber(source);
                //if (res)
                //{
                //    return "none";
                //}

                var temp = source.Clone() as Bitmap; //Clone image to keep original image

                var seq = new FiltersSequence();

                //seq.Add(new Grayscale(0.7, 0.7, 0.7));
                seq.Add(Grayscale.CommonAlgorithms.BT709);
                //seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter
                seq.Add(new Invert());
                //seq.Add(new ResizeBilinear(source.Width * 2, source.Height * 2));
                temp = seq.Apply(source); // Apply filters on source image

                //using (var engine = new TesseractEngine(@"./tessdata", "rus", EngineMode.Default))
                {

                    using (var page = _engine.Process(temp, PageSegMode.SingleLine))
                    {
                        var text = page.GetText();
                        var conf = page.GetMeanConfidence();

                        //Ex.Report(new Exception(text));
                        //if (conf < 0.5)
                        //    return "none";
                        return text;
                    }
                }
            }
            catch (Exception e)
            {
                Trace.TraceError(e.ToString());
                Ex.Report(e);
                return "";
            }
        }
示例#25
0
        //TODO not f*****g working
        public string RecognizeBet(Bitmap source)
        {
            try
            {
                //var res = IsNotNumber(source);
                //if (res)
                //{
                //    return "none";
                //}

                var temp = source.Clone() as Bitmap; //Clone image to keep original image

                //*
                var seq = new FiltersSequence();

                seq.Add(new Grayscale(0, 1, 0));
                //seq.Add(Grayscale.CommonAlgorithms.BT709);
                //seq.Add(new ResizeBilinear(source.Width * 2, source.Height * 2));
                //seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter
                //seq.Add(new Threshold(50));
                //seq.Add(new Invert());

                temp = seq.Apply(source); // Apply filters on source image
                //*/

                //var extractor = new BlobCounter();
                //extractor.FilterBlobs = true;

                //extractor.MaxHeight = 15;
                //extractor.MinHeight = 0;
                ////extractor.MaxWidth = 10;
                ////extractor.MinWidth = 10;
                ////extractor.BackgroundThreshold = Color.Green;
                //extractor.ProcessImage(temp);

                //////Will be used transform(extract) cards on source image
                ////QuadrilateralTransformation quadTransformer = new QuadrilateralTransformation();

                //foreach (Blob blob in extractor.GetObjectsInformation())
                //{
                //    ////Get Edge points of card
                //    //List<IntPoint> edgePoints = extractor.GetBlobsEdgePoints(blob);
                //    ////Calculate/Find corners of card on source image from edge points
                //    //List<IntPoint> corners = PointsCloud.FindQuadrilateralCorners(edgePoints);

                //    //var cardImg = source.Clone(blob.Rectangle, PixelFormat.DontCare);
                //    var cardImg = temp.Clone(blob.Rectangle, PixelFormat.DontCare);
                //}
                //using (var engine = new TesseractEngine(@"./tessdata", "rus", EngineMode.Default))
                {
                    //_engine.SetVariable("tessedit_char_whitelist", "$.,0123456789");
                    using (var page = _engine.Process(temp))
                    {
                        var text = page.GetText();
                        var conf = page.GetMeanConfidence();

                        //Ex.Report(new Exception(text));
                        //if (conf < 0.8)
                        //    return "none";
                        return text;
                    }
                }
            }
            catch (Exception e)
            {
                Trace.TraceError(e.ToString());
                Ex.Report(e);
                return "";
            }
        }
示例#26
0
        /// <summary>
        /// Based off www.codeproject.com/Articles/10248/Motion-Detection-Algorithms
        /// </summary>
        /// <param name="prevImage"></param>
        /// <param name="image"></param>
        /// <returns></returns>
        private Bitmap ThresholdImage(Bitmap prevImage, Bitmap image)
        {
            // create filter
            new MoveTowards(image).ApplyInPlace(prevImage);

            FiltersSequence processingFilter = new FiltersSequence();
            processingFilter.Add(new Difference(prevImage));
            processingFilter.Add(new Pixellate());
            processingFilter.Add(new Grayscale(0.2125, 0.7154, 0.0721));
            processingFilter.Add(new Threshold(45));

            // apply the filter

            return processingFilter.Apply(image);
        }
示例#27
0
        public Button RecognizeOneButton(Bitmap source, Rectangle rect, string filePath, int id)
        {
            var seq = new FiltersSequence();
            var card = new Button(source); //Create Card Object

            card.Rect = rect;

            seq.Clear();

            seq.Add(Grayscale.CommonAlgorithms.BT709);
            seq.Add(new OtsuThreshold());
            source = seq.Apply(source);

            card.Tip = ScanButtonTip(source); //Scan Rank of non-face card

            //if (card.Tip == ButtonTip.NOT_RECOGNIZED)
            //{
            //    if (!string.IsNullOrEmpty(filePath))
            //    {
            //        while (File.Exists(filePath + id + ".bmp"))
            //            id++;
            //        top.Save(filePath + id + ".bmp", ImageFormat.Bmp);
            //    }
            //}

            return card;
        }
示例#28
0
        private void btnCardSuit_Click(object sender, EventArgs e)
        {
            var image = Bitmap.FromFile(tbFile.Text) as Bitmap;
            Crop crop = new Crop(_settings.MyCardsRect);//TODO card identity
            var source = crop.Apply(image);

            FiltersSequence seq = new FiltersSequence();
            seq.Add(Grayscale.CommonAlgorithms.BT709);  //First add  grayScaling filter
            seq.Add(new OtsuThreshold()); //Then add binarization(thresholding) filter
            var temp = seq.Apply(source); // Apply filters on source image

            BlobCounter extractor = new BlobCounter();
            extractor.FilterBlobs = true;
            extractor.MinWidth = extractor.MinHeight = (int)_settings.MinCardSize;//TODO card size
            //extractor.MaxWidth = extractor.MaxHeight = 70;//TODO card size
            extractor.ProcessImage(temp);

            //Will be used transform(extract) cards on source image
            //QuadrilateralTransformation quadTransformer = new QuadrilateralTransformation();

            Bitmap cardImg = null;
            foreach (Blob blob in extractor.GetObjectsInformation())
            {
                cardImg = source.Clone(blob.Rectangle, PixelFormat.DontCare);
                break;
            }

            var r = _settings.CardSuitRect;
            var f = new AreaSelectorForm(new Rectangle(r.X*5, r.Y*5, r.Width*5, r.Height*5));
            f.Text = "Select CardSuitRect rect";

            f.SetImage(cardImg, 5);
            if (f.ShowDialog() == DialogResult.OK)
            {
                var newRect = new Rectangle(
                    (int)Math.Round(f.Rect.X / 5.0),
                    (int)Math.Round(f.Rect.Y / 5.0),
                    (int)Math.Round(f.Rect.Width / 5.0),
                    (int)Math.Round(f.Rect.Height / 5.0));
                lblCardSuit.Text = newRect.ToString();
                _settings.CardSuitRect = newRect;
                AppSettingsManager.Save(_settings);
            }
        }