public static bool HasData(OmrPageOutput page, OmrBarcodeField barcode)
        {
            String tPath = Path.GetTempFileName();

            using (Bitmap bmp = new Bitmap((int)barcode.TopRight.X - (int)barcode.TopLeft.X, (int)barcode.BottomLeft.Y - (int)barcode.TopLeft.Y, PixelFormat.Format24bppRgb))
            {
                using (Graphics g = Graphics.FromImage(bmp))
                    using (System.Drawing.Image img = System.Drawing.Image.FromFile(page.AnalyzedImage))
                    {
                        g.DrawImage(img, 0, 0, new Rectangle(new Point((int)barcode.TopLeft.X, (int)barcode.TopLeft.Y), bmp.Size), GraphicsUnit.Pixel);
                        // is g empty?
                    }

                // Blobs
                BlobCounter blobCounter = new BlobCounter();
                blobCounter.MinHeight   = 30;
                blobCounter.MinWidth    = 30;
                blobCounter.FilterBlobs = true;

                //blobCounter.BackgroundThreshold = Color.Gray;// new Color(255, 255, 255);

                using (var grayscale = new GrayscaleY().Apply(bmp))
                {
                    Threshold binaryThreshold = new Threshold(page.Template.ScanThreshold);
                    binaryThreshold.ApplyInPlace(grayscale);
                    new Invert().ApplyInPlace(grayscale);

                    // Check for circles
                    blobCounter.ProcessImage(grayscale);
                    Blob[] blobs = blobCounter.GetObjectsInformation();
                    return(blobs.Where(o => o.ColorMean == Color.FromArgb(255, 255, 255, 255)).Count() > 0);
                }
            }
        }
Beispiel #2
0
        public Bitmap EscalaGrises()
        {
            GrayscaleY filter = new GrayscaleY();

            imagen = filter.Apply(imagen);
            return(imagen);
        }
Beispiel #3
0
        //===================method=========================
        public void Get_Plate()
        {
            image_input     = IMAGE;
            backgroundFrame = new Bitmap(Application.StartupPath + "\\anh\\nen.jpg");
            hor_coe         = 0.6; //0.5
            ver_coe         = 0.4; //0.4
            number_coe      = 13;  //13
            min_freq        = 100;
            plate_ratio     = 14;  //14;

            IFilter filt = new GrayscaleY();

            backgroundFrame = filt.Apply(backgroundFrame);
            image_input     = filt.Apply(image_input);

            //IFilter f = new Threshold(180);
            //image_input = f.Apply(image_input);
            p = image_input;
            Subtract sub_img = new Subtract();

            sub_img.OverlayImage = backgroundFrame;
            Bitmap temp_img = sub_img.Apply(image_input);

            image_input = get_object(image_input, temp_img);
            image_input = fft(image_input);
            PLATE       = image_input;
        }
Beispiel #4
0
        /// <summary>
        /// Creates a comic rendered copy of the input image.
        /// </summary>
        public override Bitmap Render(Bitmap sourceImage)
        {
            // Converters
            GrayscaleY     convertGray  = new GrayscaleY();
            GrayscaleToRGB convertColor = new GrayscaleToRGB();

            // Convert grayscal images
            if (sourceImage.PixelFormat == PixelFormat.Format8bppIndexed)
            {
                sourceImage = convertColor.Apply(sourceImage);
            }

            Bitmap comicImage = AForge.Imaging.Image.Clone(sourceImage);
            Bitmap edgeLayer  = null;
            Bitmap glowLayer  = null;

            // Glow for smooth colors
            GaussianBlur filterBlur = new GaussianBlur();

            filterBlur.Sigma = 2.0;
            filterBlur.Size  = 4;
            glowLayer        = filterBlur.Apply(comicImage);

            //SmartBlur filterBlur = new SmartBlur(10, 0.2);
            //glowLayer = filterBlur.Apply(comicImage);

            ContrastCorrection filterContrast = new ContrastCorrection(1 - (-this.Coloring * 0.1));

            filterContrast.ApplyInPlace(glowLayer);

            BrightnessCorrection filterBrightness = new BrightnessCorrection((-this.Coloring * 0.1) + 0.1);

            filterBrightness.ApplyInPlace(glowLayer);

            Screen blendScreen = new Screen(glowLayer);

            blendScreen.ApplyInPlace(comicImage);


            // Create a layer for edges
            Convolution filterConvolution = new Convolution(ConvolutionKernel);

            edgeLayer = filterConvolution.Apply(comicImage);

            // Convert to grayscale
            edgeLayer = convertGray.Apply(edgeLayer);

            // Threshold (edge thickness)
            Threshold filterThreshold = new Threshold((byte)(this.Edging * 255 / 100));

            filterThreshold.ApplyInPlace(edgeLayer);
            edgeLayer = convertColor.Apply(edgeLayer);

            // intersect comic with top layer (Darken blend)
            Intersect blendIntersect = new Intersect(edgeLayer);

            blendIntersect.ApplyInPlace(comicImage);

            return(comicImage);
        }
Beispiel #5
0
        public Bitmap ToGrayScale(Bitmap Im)
        {
            AForge.Imaging.Filters.GrayscaleY Img = new GrayscaleY();
            Bitmap bmImage = AForge.Imaging.Image.Clone(new Bitmap(Im), PixelFormat.Format24bppRgb);

            return(Img.Apply(bmImage));
        }
Beispiel #6
0
        public BlobsVisualization(FilterContext context)
            : base(new BlobsFilter(), context)
        {
            InitializeComponent();
            grayscaleFilter = new GrayscaleY();

            Filter.MinBlob = 10;
            Filter.MaxBlob = 500;
        }
Beispiel #7
0
 public FilterVisualization()
 {
     InitializeComponent();
     grayscaleFilter        = new GrayscaleY();
     synchronizationContext = SynchronizationContext.Current;
     if (synchronizationContext == null)
     {
         synchronizationContext = new SynchronizationContext();
     }
 }
Beispiel #8
0
//--------------------------------------------------------------------
        private void process_motion()
        {
            IFilter filt = new GrayscaleY();
            currentFrame = filt.Apply(currentFrame);
            backgroundFrame = filt.Apply(backgroundFrame);
            FiltersSequence filters = new FiltersSequence();
            Morph filt_morph = new Morph();
            filt_morph.OverlayImage = currentFrame;
            Bitmap tmp = filt_morph.Apply(backgroundFrame);
            filters.Add(new Difference(tmp));
            filters.Add(new Threshold(15));
            Bitmap tmp1 = filters.Apply(currentFrame);
            alarm = CalculateWhitePixels(tmp1);
        }
Beispiel #9
0
        /// <summary>
        /// Creates a comic rendered copy of the input image.
        /// </summary>
        public override Bitmap Render(Bitmap sourceImage)
        {
            Bitmap sketchImage = AForge.Imaging.Image.Clone(sourceImage);

            GrayscaleY convertGray = new GrayscaleY();

            // Blur
            GaussianBlur filterBlur = new GaussianBlur();

            filterBlur.Sigma = this.PencilTipSize;
            filterBlur.Size  = this.PencilTipSize;
            Bitmap overLayer = filterBlur.Apply(sketchImage);

            // Invert over layer
            Invert sketchInvert = new Invert();

            sketchInvert.ApplyInPlace(overLayer);

            BrightnessCorrection filterBrightness = new BrightnessCorrection(-this.Range * 0.01);

            filterBrightness.ApplyInPlace(overLayer);

            ContrastCorrection filterContrast = new ContrastCorrection(1 - (-this.Range * 0.01));

            filterContrast.ApplyInPlace(overLayer);

            // Convert to grayscale
            sketchImage = convertGray.Apply(sketchImage);
            overLayer   = convertGray.Apply(overLayer);

            // Dodge blending for the win!
            ColorDodge dodgeBlend = new ColorDodge(overLayer);

            dodgeBlend.ApplyInPlace(sketchImage);

            return(sketchImage);
        }
Beispiel #10
0
        public static byte[] ApplyFilter(byte[] imageBytes, ImageProcessingFilters filter, ImageFormat format = null)
        {
            IFilter baseFilter = null;

            switch (filter)
            {
            case ImageProcessingFilters.Default:
                return(imageBytes);

            case ImageProcessingFilters.GrayscaleBT709:
                baseFilter = new GrayscaleBT709();
                break;

            case ImageProcessingFilters.GrayscaleRMY:
                baseFilter = new GrayscaleRMY();
                break;

            case ImageProcessingFilters.GrayscaleY:
                baseFilter = new GrayscaleY();
                break;

            case ImageProcessingFilters.BayerFilter:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.B));
                ((FiltersSequence)baseFilter).Add(new BayerFilter());
                break;

            /*
             * case ImageProcessingFilters.ImageWarp:
             * baseFilter = new ImageWarp(
             * break;
             * */
            case ImageProcessingFilters.Channel_Red:
                baseFilter = new ExtractChannel(RGB.R);
                break;

            case ImageProcessingFilters.Channel_Green:
                baseFilter = new ExtractChannel(RGB.G);
                break;

            case ImageProcessingFilters.Channel_Blue:
                baseFilter = new ExtractChannel(RGB.B);
                break;

            case ImageProcessingFilters.WaterWave:
                baseFilter = new WaterWave();
                ((WaterWave)baseFilter).HorizontalWavesCount     = 10;
                ((WaterWave)baseFilter).HorizontalWavesAmplitude = 5;
                ((WaterWave)baseFilter).VerticalWavesCount       = 3;
                ((WaterWave)baseFilter).VerticalWavesAmplitude   = 15;
                break;

            case ImageProcessingFilters.Sepia:
                baseFilter = new Sepia();
                break;

            case ImageProcessingFilters.BrightnessCorrection:
                baseFilter = new BrightnessCorrection(-50);
                break;

            case ImageProcessingFilters.ContrastCorrection:
                baseFilter = new ContrastCorrection(15);
                break;

            case ImageProcessingFilters.SaturationCorrection1:
                baseFilter = new SaturationCorrection(-0.5f);
                break;

            case ImageProcessingFilters.SaturationCorrection2:
                baseFilter = new SaturationCorrection(-.25f);
                break;

            case ImageProcessingFilters.SaturationCorrection3:
                baseFilter = new SaturationCorrection(+0.5f);
                break;

            case ImageProcessingFilters.Invert:
                baseFilter = new Invert();
                break;

            case ImageProcessingFilters.Blur:
                baseFilter = new Blur();
                break;

            case ImageProcessingFilters.RotateChannels:
                baseFilter = new RotateChannels();
                break;

            case ImageProcessingFilters.RotateChannels2:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new RotateChannels());
                ((FiltersSequence)baseFilter).Add(new RotateChannels());
                break;

            case ImageProcessingFilters.AdditiveNoise:
                IRandomNumberGenerator generator = new UniformGenerator(new Range(-50, 50));
                baseFilter = new AdditiveNoise(generator);
                break;

            case ImageProcessingFilters.GammaCorrection:
                baseFilter = new GammaCorrection(0.5);
                break;

            case ImageProcessingFilters.HistogramEqualization:
                baseFilter = new HistogramEqualization();
                break;

            case ImageProcessingFilters.OrderedDithering:
                byte[,] matrix = new byte[4, 4]
                {
                    { 95, 233, 127, 255 },
                    { 159, 31, 191, 63 },
                    { 111, 239, 79, 207 },
                    { 175, 47, 143, 15 }
                };
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new GrayscaleBT709());
                ((FiltersSequence)baseFilter).Add(new OrderedDithering(matrix));
                break;

            case ImageProcessingFilters.Pixallete:
                baseFilter = new Pixellate();
                break;

            case ImageProcessingFilters.SimplePosterization:
                baseFilter = new SimplePosterization();
                break;

            case ImageProcessingFilters.Texturer_Textile:
                baseFilter = new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7);
                break;

            case ImageProcessingFilters.Texturer_Cloud:
                baseFilter = new Texturer(new AForge.Imaging.Textures.CloudsTexture(), 0.3, 0.7);
                break;

            case ImageProcessingFilters.Texturer_Marble:
                baseFilter = new Texturer(new AForge.Imaging.Textures.MarbleTexture(), 0.3, 0.7);
                break;

            case ImageProcessingFilters.Texturer_Wood:
                baseFilter = new Texturer(new AForge.Imaging.Textures.WoodTexture(), 0.3, 0.7);
                break;

            case ImageProcessingFilters.Texturer_Labyrinth:
                baseFilter = new Texturer(new AForge.Imaging.Textures.LabyrinthTexture(), 0.3, 0.7);
                break;

            case ImageProcessingFilters.SobelEdgeDetector:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R));
                ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector());
                break;

            case ImageProcessingFilters.SobelEdgeDetectorInvert:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R));
                ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector());
                ((FiltersSequence)baseFilter).Add(new Invert());
                break;

            case ImageProcessingFilters.SobelEdgeDetectorSepia:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R));
                ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector());
                ((FiltersSequence)baseFilter).Add(new GrayscaleToRGB());
                ((FiltersSequence)baseFilter).Add(new Sepia());
                break;

            case ImageProcessingFilters.SobelEdgeDetectorSepiaCanvas:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R));
                ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector());
                ((FiltersSequence)baseFilter).Add(new GrayscaleToRGB());
                ((FiltersSequence)baseFilter).Add(new Sepia());
                ((FiltersSequence)baseFilter).Add(new SimplePosterization());
                ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7));
                break;

            case ImageProcessingFilters.Drawing:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new GrayscaleBT709());
                ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector());
                ((FiltersSequence)baseFilter).Add(new Invert());
                ((FiltersSequence)baseFilter).Add(new SimplePosterization());
                break;

            case ImageProcessingFilters.DrawingSepia:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new GrayscaleBT709());
                ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector());
                ((FiltersSequence)baseFilter).Add(new Invert());
                ((FiltersSequence)baseFilter).Add(new SimplePosterization());
                ((FiltersSequence)baseFilter).Add(new GrayscaleToRGB());
                ((FiltersSequence)baseFilter).Add(new Sepia());
                break;

            case ImageProcessingFilters.OilCanvas:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new SimplePosterization());
                ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7));
                break;

            case ImageProcessingFilters.OilCanvasGray:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new SimplePosterization());
                ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7));
                ((FiltersSequence)baseFilter).Add(new GrayscaleBT709());
                break;

            case ImageProcessingFilters.OilCanvasSepia:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new SimplePosterization());
                ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7));
                ((FiltersSequence)baseFilter).Add(new Sepia());
                break;
            }

            if (baseFilter == null)
            {
                return(null);
            }

            return(ApplyFilter(imageBytes, baseFilter, format));
        }
Beispiel #11
0
        /// <summary>
        ///     Performs the analysis on the image
        /// </summary>
        public void Analyze(bool thorough = true)
        {
            if (m_disposed)
            {
                throw new ObjectDisposedException("ImageProcessor");
            }

            if (IsScannable)
            {
                return;
            }

            LuminanceSource source    = new BitmapLuminanceSource(m_bitmap);
            var             binarizer = new HybridBinarizer(source);
            var             binBitmap = new BinaryBitmap(binarizer);

            // Try to extract the form data
            var barReader = new BarcodeReader();

            barReader.AutoRotate              = true;
            barReader.Options.TryHarder       = thorough;
            barReader.Options.PossibleFormats = new List <BarcodeFormat> {
                BarcodeFormat.CODE_128
            };

            m_barcodeResults = barReader.DecodeMultiple(source);

            // Look for barcode markers if possible
            if (m_barcodeResults != null)
            {
                MarkerCodes = m_barcodeResults.Where(o => o.Text.StartsWith("OMR:")).ToArray();
            }
            IsScannable = true;
            // Get the template data
            var markerCode = MarkerCodes == null
                ? null
                : MarkerCodes.FirstOrDefault(o => o.Text.StartsWith("OMR:TL") || o.Text.StartsWith("OMR:ID"));

            // Get the guiding points by circles
            var grayFilter      = new GrayscaleY();
            var thresholdFilter = new Threshold(127);
            var invertFilter    = new Invert();

            using (var searchImage = invertFilter.Apply(thresholdFilter.Apply(grayFilter.Apply(m_bitmap))))
            {
                // Blobs
                var blobCounter = new BlobCounter();
                blobCounter.FilterBlobs = true;
                blobCounter.MinHeight   = 30;
                blobCounter.MinWidth    = 30;

                // Check for circles
                blobCounter.ProcessImage(searchImage);
                var blobs         = blobCounter.GetObjectsInformation();
                var shapeChecker  = new SimpleShapeChecker();
                var controlPoints = new List <Point>();
                var currentCheck  = 45;
                while ((currentCheck-- > 20) && (controlPoints.Count != 4))
                {
                    controlPoints.Clear();
                    // Get the positions
                    foreach (var blob in blobs)
                    {
                        var   center = new Point();
                        float radius = 0;

                        if (shapeChecker.IsCircle(blobCounter.GetBlobsEdgePoints(blob), out center, out radius) &&
                            (radius > currentCheck))
                        {
                            controlPoints.Add(center);
                        }
                    }
                }

                // Control points
                IsScannable &= controlPoints.Count == 4;
                if (!IsScannable)
                {
                    return;
                }

                // Now set markers
                m_topLeft     = controlPoints[0]; //new AForge.Point(this.m_bitmap.Width + 10, this.m_bitmap.Height + 10);
                m_topRight    = controlPoints[1];
                m_bottomLeft  = controlPoints[2];
                m_bottomRight = controlPoints[3];

                // Find the right most bubble
                float rightMost = controlPoints.Select(o => o.X).Max(),
                      leftMost  = controlPoints.Select(o => o.X).Min();
                // Organize those that are left/right
                Point[] lefties = controlPoints.Where(o => o.X < leftMost + (rightMost - leftMost) / 2).ToArray(),
                righties = controlPoints.Where(o => o.X > leftMost + (rightMost - leftMost) / 2).ToArray();

                // HACK:
                if (lefties[0].Y < lefties[1].Y)
                {
                    m_topLeft    = lefties[0];
                    m_bottomLeft = lefties[1];
                }
                else
                {
                    m_topLeft    = lefties[1];
                    m_bottomLeft = lefties[0];
                }

                // HACK:
                if (righties[0].Y < righties[1].Y)
                {
                    m_topRight    = righties[0];
                    m_bottomRight = righties[1];
                }
                else
                {
                    m_topRight    = righties[1];
                    m_bottomRight = righties[0];
                }
            }

            if (!IsScannable)
            {
                return;
            }

            // Get the template data
            if ((MarkerCodes != null) && (markerCode != null))
            {
                var templateData = markerCode.Text.Split(':');
                if (templateData.Length > 2)
                {
                    TemplateName = templateData[2];
                    if (templateData.Length > 3)
                    {
                        Parameters = templateData.Skip(3).ToArray();
                    }
                }
            }
        }
Beispiel #12
0
        /// <summary>
        /// Apply template
        /// </summary>
        /// <param name="template"></param>
        /// <param name="image"></param>
        public OmrPageOutput ApplyTemplate(OmrTemplate template, ScannedImage image)
        {
            // Image ready for scan
            if (!image.IsReadyForScan)
            {
                if (!image.IsScannable)
                {
                    image.Analyze();
                }
                image.PrepareProcessing();
            }

            // Page output
            OmrPageOutput retVal = new OmrPageOutput()
            {
                Id         = image.TemplateName + DateTime.Now.ToString("yyyyMMddHHmmss"),
                TemplateId = image.TemplateName,
                Parameters = image.Parameters,
                StartTime  = DateTime.Now,
                Template   = template
            };

            // Save directory for output images
            string saveDirectory = String.Empty;
            var    parmStr       = new StringBuilder();

            if (this.SaveIntermediaryImages)
            {
                if (image.Parameters != null)
                {
                    foreach (var pv in image.Parameters)
                    {
                        parmStr.AppendFormat("{0}.", pv);
                    }
                }
                retVal.RefImages = new List <string>()
                {
                    String.Format("{0}-{1}-init.bmp", retVal.Id, parmStr),
                    String.Format("{0}-{1}-tx.bmp", retVal.Id, parmStr),
                    String.Format("{0}-{1}-fields.bmp", retVal.Id, parmStr),
                    String.Format("{0}-{1}-gs.bmp", retVal.Id, parmStr),
                    String.Format("{0}-{1}-bw.bmp", retVal.Id, parmStr),
                    String.Format("{0}-{1}-inv.bmp", retVal.Id, parmStr)
                };

                saveDirectory = Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), "imgproc");

                if (!Directory.Exists(saveDirectory))
                {
                    Directory.CreateDirectory(saveDirectory);
                }
                image.Image.Save(Path.Combine(saveDirectory, string.Format("{0}-{1}-init.bmp", DateTime.Now.ToString("yyyyMMddHHmmss"), parmStr)));
            }

            // First, we want to get the image from the scanned image and translate it to the original
            // position in the template
            Bitmap bmp = null;

            try
            {
                bmp = new Bitmap((int)template.BottomRight.X, (int)template.BottomRight.Y, System.Drawing.Imaging.PixelFormat.Format24bppRgb);

                // Scale
                float width  = template.TopRight.X - template.TopLeft.X,
                      height = template.BottomLeft.Y - template.TopLeft.Y;


                // Translate to original
                using (Graphics g = Graphics.FromImage(bmp))
                {
                    ResizeBicubic bc = new ResizeBicubic((int)width, (int)height);
                    g.DrawImage(bc.Apply((Bitmap)image.Image), template.TopLeft.X, template.TopLeft.Y);
                }

                if (this.SaveIntermediaryImages)
                {
                    bmp.Save(Path.Combine(saveDirectory, string.Format("{0}-{1}-tx.bmp", DateTime.Now.ToString("yyyyMMddHHmmss"), parmStr)));
                }


                // Now try to do hit from the template
                if (this.SaveIntermediaryImages)
                {
                    using (var tbmp = bmp.Clone() as Bitmap)
                    {
                        using (Graphics g = Graphics.FromImage(tbmp))
                        {
                            foreach (var field in template.Fields)
                            {
                                g.DrawRectangle(Pens.Black, field.TopLeft.X, field.TopLeft.Y, field.TopRight.X - field.TopLeft.X, field.BottomLeft.Y - field.TopLeft.Y);
                                g.DrawString(field.Id, SystemFonts.CaptionFont, Brushes.Black, field.TopLeft);
                            }
                        }

                        tbmp.Save(Path.Combine(saveDirectory, string.Format("{0}-{1}-fields.bmp", DateTime.Now.ToString("yyyyMMddHHmmss"), parmStr)));
                    }
                }


                // Now convert to Grayscale
                GrayscaleY grayFilter = new GrayscaleY();
                var        gray       = grayFilter.Apply(bmp);
                bmp.Dispose();
                bmp = gray;

                if (this.SaveIntermediaryImages)
                {
                    bmp.Save(Path.Combine(saveDirectory, string.Format("{0}-{1}-gs.bmp", DateTime.Now.ToString("yyyyMMddHHmmss"), parmStr)));
                }

                // Prepare answers
                Dictionary <OmrQuestionField, OmrOutputData> hitFields = new Dictionary <OmrQuestionField, OmrOutputData>();
                BarcodeReader barScan = new BarcodeReader();
                barScan.Options.UseCode39ExtendedMode        = true;
                barScan.Options.UseCode39RelaxedExtendedMode = true;
                barScan.Options.TryHarder   = true;
                barScan.TryInverted         = true;
                barScan.Options.PureBarcode = false;
                barScan.AutoRotate          = true;

                foreach (var itm in template.Fields.Where(o => o is OmrBarcodeField))
                {
                    PointF position = itm.TopLeft;
                    SizeF  size     = new SizeF(itm.TopRight.X - itm.TopLeft.X, itm.BottomLeft.Y - itm.TopLeft.Y);
                    using (var areaOfInterest = new Crop(new Rectangle((int)position.X, (int)position.Y, (int)size.Width, (int)size.Height)).Apply(bmp))
                    {
                        // Scan the barcode
                        var result = barScan.Decode(areaOfInterest);


                        if (result != null)
                        {
                            hitFields.Add(itm, new OmrBarcodeData()
                            {
                                BarcodeData = result.Text,
                                Format      = result.BarcodeFormat,
                                Id          = itm.Id,
                                TopLeft     = new PointF(result.ResultPoints[0].X + position.X, result.ResultPoints[0].Y + position.Y),
                                BottomRight = new PointF(result.ResultPoints[1].X + position.X, result.ResultPoints[0].Y + position.Y + 10)
                            });
                        }
                    }
                }

                // Now binarize
                Threshold binaryThreshold = new Threshold(template.ScanThreshold);
                binaryThreshold.ApplyInPlace(bmp);

                if (this.SaveIntermediaryImages)
                {
                    bmp.Save(Path.Combine(saveDirectory, string.Format("{0}-{1}-bw.bmp", DateTime.Now.ToString("yyyyMMddHHmmss"), parmStr)));
                }

                // Set return parameters
                String tAnalyzeFile = Path.Combine(Path.GetTempPath(), Path.GetTempFileName());
                bmp.Save(tAnalyzeFile, System.Drawing.Imaging.ImageFormat.Jpeg);
                retVal.AnalyzedImage = tAnalyzeFile;
                retVal.BottomRight   = new PointF(bmp.Width, bmp.Height);

                // Now Invert
                Invert invertFiter = new Invert();
                invertFiter.ApplyInPlace(bmp);

                if (this.SaveIntermediaryImages)
                {
                    bmp.Save(Path.Combine(saveDirectory, string.Format("{0}-{1}-inv.bmp", DateTime.Now.ToString("yyyyMMddHHmmss"), parmStr)));
                }


                // Crop out areas of interest
                List <KeyValuePair <OmrQuestionField, Bitmap> > areasOfInterest = new List <KeyValuePair <OmrQuestionField, Bitmap> >();
                foreach (var itm in template.Fields.Where(o => o is OmrBubbleField))
                {
                    PointF position = itm.TopLeft;
                    SizeF  size     = new SizeF(itm.TopRight.X - itm.TopLeft.X, itm.BottomLeft.Y - itm.TopLeft.Y);
                    areasOfInterest.Add(new KeyValuePair <OmrQuestionField, Bitmap>(
                                            itm,
                                            new Crop(new Rectangle((int)position.X, (int)position.Y, (int)size.Width, (int)size.Height)).Apply(bmp))
                                        );
                }

                // Queue analysis
                WaitThreadPool wtp      = new WaitThreadPool();
                Object         syncLock = new object();

                foreach (var itm in areasOfInterest)
                {
                    wtp.QueueUserWorkItem(img =>
                    {
                        var parm = (KeyValuePair <OmrQuestionField, Bitmap>)itm;

                        try
                        {
                            var areaOfInterest = parm.Value;
                            var field          = parm.Key;

                            BlobCounter blobCounter = new BlobCounter();
                            blobCounter.FilterBlobs = true;

                            // Check for circles
                            blobCounter.ProcessImage(areaOfInterest);
                            Blob[] blobs = blobCounter.GetObjectsInformation();
                            var blob     = blobs.FirstOrDefault(o => o.Area == blobs.Max(b => b.Area));
                            if (blob != null)
                            {
                                //var area = new AForge.Imaging.ImageStatistics(blob).PixelsCountWithoutBlack;
                                if (blob.Area < 30)
                                {
                                    return;
                                }
                                var bubbleField = field as OmrBubbleField;
                                lock (syncLock)
                                    hitFields.Add(field, new OmrBubbleData()
                                    {
                                        Id          = field.Id,
                                        Key         = bubbleField.Question,
                                        Value       = bubbleField.Value,
                                        TopLeft     = new PointF(blob.Rectangle.X + field.TopLeft.X, blob.Rectangle.Y + field.TopLeft.Y),
                                        BottomRight = new PointF(blob.Rectangle.X + blob.Rectangle.Width + field.TopLeft.X, blob.Rectangle.Y + blob.Rectangle.Height + field.TopLeft.Y),
                                        BlobArea    = blob.Area
                                    });
                            }
                        }
                        catch (Exception e) {
                            Trace.TraceError(e.ToString());
                        }
                        finally
                        {
                            parm.Value.Dispose();
                        }
                    }, itm);
                }

                wtp.WaitOne();

                // Organize the response
                foreach (var res in hitFields)
                {
                    if (String.IsNullOrEmpty(res.Key.AnswerRowGroup))
                    {
                        this.AddAnswerToOutputCollection(retVal, res);
                    }
                    else
                    {
                        // Rows of data
                        OmrRowData rowGroup = retVal.Details.Find(o => o.Id == res.Key.AnswerRowGroup) as OmrRowData;
                        if (rowGroup == null)
                        {
                            rowGroup = new OmrRowData()
                            {
                                Id = res.Key.AnswerRowGroup
                            };
                            retVal.Details.Add(rowGroup);
                        }

                        this.AddAnswerToOutputCollection(rowGroup, res);
                    }
                }

                // Remove temporary images
                //foreach (var f in retVal.RefImages)
                //    File.Delete(Path.Combine(saveDirectory, f));

                // Outcome is success
                retVal.Outcome = OmrScanOutcome.Success;
            }
            catch (Exception e)
            {
                retVal.Outcome      = OmrScanOutcome.Failure;
                retVal.ErrorMessage = e.Message;
                Trace.TraceError(e.ToString());
            }
            finally
            {
                retVal.StopTime = DateTime.Now;
                bmp.Dispose();
            }

            return(retVal);
        }
Beispiel #13
0
        /// <summary>
        /// This is the method that actually does the work.
        /// </summary>
        /// <param name="DA">The DA object is used to retrieve from inputs and store in outputs.</param>
        protected override void SolveInstance(IGH_DataAccess DA)
        {
            IGH_Goo goo   = null;
            Image   image = new Image();

            if (!DA.GetData(0, ref goo))
            {
                return;
            }
            if (!goo.TryGetImage(ref image))
            {
                return;
            }

            int mode = 0;

            DA.GetData(1, ref mode);

            double numValA = 1.0;

            DA.GetData(2, ref numValA);

            double numValB = 1.0;

            DA.GetData(3, ref numValB);

            double numValC = 1.0;

            DA.GetData(3, ref numValC);

            Filter filter = new Filter();

            int[] indices = new int[] { 2, 3, 4 };

            switch ((FilterModes)mode)
            {
            case FilterModes.BT709:
                ClearParameter(indices);
                filter = new GrayscaleBT709();
                image.Filters.Add(new GrayscaleBT709());
                break;

            case FilterModes.RMY:
                ClearParameter(indices);
                filter = new GrayscaleRMY();
                image.Filters.Add(new GrayscaleRMY());
                break;

            case FilterModes.Y:
                ClearParameter(indices);
                filter = new GrayscaleY();
                image.Filters.Add(new GrayscaleY());
                break;

            case FilterModes.Simple:
                SetParameter(2, "R", "Red", "The Red coefficient");
                SetParameter(3, "G", "Green", "The Green coefficient");
                SetParameter(4, "B", "Blue", "The Blue coefficient");
                filter = new Simple(numValA, numValB, numValC);
                image.Filters.Add(new Simple(numValA, numValB, numValC));
                break;
            }

            message = ((FilterModes)mode).ToString();
            UpdateMessage();

            DA.SetData(0, image);
            DA.SetData(1, filter);
        }