예제 #1
0
        private static Collection <GeometricEdgeBasedPatternMatch> IVA_MatchGeometricPattern2(VisionImage image,
                                                                                              string templatePath,
                                                                                              CurveOptions curveOptions,
                                                                                              MatchGeometricPatternEdgeBasedOptions matchOptions,
                                                                                              IVA_Data ivaData,
                                                                                              int stepIndex,
                                                                                              Roi roi)
        {
            // Geometric Matching (Edge Based)

            // Creates the image template.
            using (VisionImage imageTemplate = new VisionImage(ImageType.U8, 7))
            {
                // Read the image template.
                imageTemplate.ReadVisionFile(templatePath);

                Collection <GeometricEdgeBasedPatternMatch> gpmResults = Algorithms.MatchGeometricPatternEdgeBased(image, imageTemplate, curveOptions, matchOptions, roi);

                // Store the results in the data structure.

                // First, delete all the results of this step (from a previous iteration)
                Functions.IVA_DisposeStepResults(ivaData, stepIndex);

                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("# Matches", gpmResults.Count));

                for (int i = 0; i < gpmResults.Count; ++i)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (Pix.)", i + 1), gpmResults[i].Position.X));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (Pix.)", i + 1), gpmResults[i].Position.Y));

                    // If the image is calibrated, log the calibrated results.
                    if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                    {
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (World)", i + 1), gpmResults[i].CalibratedPosition.X));
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (World)", i + 1), gpmResults[i].CalibratedPosition.Y));
                    }

                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Angle (degrees)", i + 1), gpmResults[i].Rotation));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Scale", i + 1), gpmResults[i].Scale));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Score", i + 1), gpmResults[i].Score));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Occlusion", i + 1), gpmResults[i].Occlusion));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Template Target Curve Score", i + 1), gpmResults[i].TemplateMatchCurveScore));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Correlation Score", i + 1), gpmResults[i].CorrelationScore));
                }

                return(gpmResults);
            }
        }
예제 #2
0
        private void Form1_Load(object sender, EventArgs e)
        {
            // Get the image path
            imagePath = System.IO.Path.Combine(ExampleImagesFolder.GetExampleImagesFolder(), @"Roundcard\");

            // Load the pattern matching template.
            template.ReadVisionFile(System.IO.Path.Combine(imagePath, "template.png"));

            // Initialize the coordinate system data.
            InitializeStageMotionInformation();

            // Enable the timer.
            timer1.Enabled = true;
            timer1.Start();
            timer1_Tick(timer1, EventArgs.Empty);
        }
예제 #3
0
        private void Form1_Load(object sender, EventArgs e)
        {
            // Get the image path
            imagePath   = System.IO.Path.Combine(ExampleImagesFolder.GetExampleImagesFolder(), @"Blister\");
            imageNumber = 0;

            // Load the template image.
            template.ReadVisionFile(System.IO.Path.Combine(imagePath, "template.png"));

            // Set up the color pattern matching options
            matchOptions.ColorWeight       = 300;
            matchOptions.MinimumMatchScore = 500;
            matchOptions.SearchStrategy    = SearchStrategy.Aggressive;

            // Enable the timer
            timer1.Enabled = true;
            timer1.Start();
            timer1_Tick(timer1, EventArgs.Empty);
        }
예제 #4
0
        private void Form1_Load(object sender, EventArgs e)
        {
            // Create the lookup table.
            lookupTable.Add(255);
            for (short i = 1; i < 256; ++i)
            {
                lookupTable.Add(i);
            }

            // Read the golden template file.
            template.ReadVisionFile(System.IO.Path.Combine(imagePath, "template.png"));

            // Attach the template image to the viewer.
            imageViewer1.Attach(template);

            // Enable the timer.
            timer1.Enabled = true;
            timer1.Start();
            timer1_Tick(timer1, EventArgs.Empty);
        }
예제 #5
0
        public static PointContour MatchPattern(VisionImage SourceImage, RectangleContour vaRect, string TemplateFile, int vaNumMatchesRequested, float vaMinMatchScore, float fUpper = 0, float fLower = 0)
        {
            PointContour point = new PointContour();

            point.X = -10000;
            point.Y = -10000;
            // Creates a new, empty region of interest.
            Roi roi = new Roi();
            // Creates a new RotatedRectangleContour using the given values.
            PointContour            vaCenter      = new PointContour(vaRect.Left + vaRect.Width / 2, vaRect.Top + vaRect.Height / 2);
            RotatedRectangleContour vaRotatedRect = new RotatedRectangleContour(vaCenter, vaRect.Width - 50, vaRect.Height - 50, 0);

            roi.Add(vaRotatedRect);
            // MatchPattern Grayscale
            // string TemplateFile = "D:\\t1.png";
            MatchingAlgorithm matchAlgorithm = MatchingAlgorithm.MatchGrayValuePyramid;

            float[]  minAngleVals          = { fLower, 0 };
            float[]  maxAngleVals          = { fUpper, 0 };
            int[]    advancedOptionsItems  = { 102, 106, 107, 108, 109, 111, 112, 113, 103, 104, 105, 100 };
            double[] advancedOptionsValues = { 10, 300, 0, 6, 1, 20, 10, 20, 1, 20, 0, 5 };
            int      numberAdvOptions      = 12;

            //int vaNumMatchesRequested = 1;
            //float vaMinMatchScore = 800;

            using (VisionImage imageTemplate = new VisionImage(ImageType.U8, 7))
            {
                Collection <PatternMatchReport> patternMatchingResults = new Collection <PatternMatchReport>();

                // Read the image template.
                imageTemplate.ReadVisionFile(TemplateFile);
                // Set the angle range.
                Collection <RotationAngleRange> angleRange = new Collection <RotationAngleRange>();
                for (int i = 0; i < 2; ++i)
                {
                    angleRange.Add(new RotationAngleRange(minAngleVals[i], maxAngleVals[i]));
                }

                // Set the advanced options.
                Collection <PMMatchAdvancedSetupDataOption> advancedMatchOptions = new Collection <PMMatchAdvancedSetupDataOption>();
                for (int i = 0; i < numberAdvOptions; ++i)
                {
                    advancedMatchOptions.Add(new PMMatchAdvancedSetupDataOption((MatchSetupOption)advancedOptionsItems[i], advancedOptionsValues[i]));
                }

                // Searches for areas in the image that match a given pattern.
                patternMatchingResults = Algorithms.MatchPattern3(SourceImage, imageTemplate, matchAlgorithm, vaNumMatchesRequested, vaMinMatchScore, angleRange, roi, advancedMatchOptions);
                string sPatterScore = "";
                if (patternMatchingResults.Count > 0)
                {
                    for (int i = 0; i < 1 /* patternMatchingResults.Count*/; ++i)
                    {
                        point.X       = patternMatchingResults[i].Position.X;
                        point.Y       = patternMatchingResults[i].Position.Y;
                        sPatterScore += " " + patternMatchingResults[i].Score.ToString();
                        //SourceImage.Overlays.Default.AddRectangle(new RectangleContour(point.X - imageTemplate.Width / 2 - 1, point.Y - imageTemplate.Height / 2 - 1, imageTemplate.Width, imageTemplate.Height), Rgb32Value.GreenColor);
                        LineContour l1 = new LineContour();
                        l1.Start.X = point.X - (imageTemplate.Width / 2 - 3);
                        l1.Start.Y = point.Y;
                        l1.End.X   = point.X + (imageTemplate.Width / 2 - 3);
                        l1.End.Y   = point.Y;
                        SourceImage.Overlays.Default.AddLine(l1, Rgb32Value.RedColor);
                        LineContour l2 = new LineContour();
                        l2.Start.X = point.X;
                        l2.Start.Y = point.Y - (imageTemplate.Height / 2 - 3);
                        l2.End.X   = point.X;
                        l2.End.Y   = point.Y + (imageTemplate.Height / 2 - 3);
                        SourceImage.Overlays.Default.AddLine(l2, Rgb32Value.RedColor);
                    }
                }
            }
            roi.Dispose();
            return(point);
        }
예제 #6
0
        private static Collection <PatternMatchReport> IVA_MatchPattern(VisionImage image,
                                                                        IVA_Data ivaData,
                                                                        string templatePath,
                                                                        MatchingAlgorithm algorithm,
                                                                        float[] angleRangeMin,
                                                                        float[] angleRangeMax,
                                                                        int[] advOptionsItems,
                                                                        double[] advOptionsValues,
                                                                        int numAdvancedOptions,
                                                                        int matchesRequested,
                                                                        float score,
                                                                        Roi roi,
                                                                        int stepIndex)
        {
            FileInformation fileInfo;

            fileInfo = Algorithms.GetFileInformation(templatePath);
            using (VisionImage imageTemplate = new VisionImage(fileInfo.ImageType, 7))
            {
                int numObjectResults = 4;
                Collection <PatternMatchReport> patternMatchingResults = new Collection <PatternMatchReport>();

                // Read the image template.
                imageTemplate.ReadVisionFile(templatePath);

                // If the image is calibrated, we also need to log the calibrated position (x and y) and angle -> 7 results instead of 4
                if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                {
                    numObjectResults = 7;
                }

                // Set the angle range.
                Collection <RotationAngleRange> angleRange = new Collection <RotationAngleRange>();
                for (int i = 0; i < 2; ++i)
                {
                    angleRange.Add(new RotationAngleRange(angleRangeMin[i], angleRangeMax[i]));
                }

                // Set the advanced options.
                Collection <PMMatchAdvancedSetupDataOption> advancedMatchOptions = new Collection <PMMatchAdvancedSetupDataOption>();
                for (int i = 0; i < numAdvancedOptions; ++i)
                {
                    advancedMatchOptions.Add(new PMMatchAdvancedSetupDataOption((MatchSetupOption)advOptionsItems[i], advOptionsValues[i]));
                }

                // Searches for areas in the image that match a given pattern.
                patternMatchingResults = Algorithms.MatchPattern3(image, imageTemplate, algorithm, matchesRequested, score, angleRange, roi, advancedMatchOptions);

                // ////////////////////////////////////////
                // Store the results in the data structure.
                // ////////////////////////////////////////

                // First, delete all the results of this step (from a previous iteration)
                Functions.IVA_DisposeStepResults(ivaData, stepIndex);

                if (patternMatchingResults.Count > 0)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("# of objects", patternMatchingResults.Count));

                    for (int i = 0; i < patternMatchingResults.Count; ++i)
                    {
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (Pix.)", i + 1), patternMatchingResults[i].Position.X));
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (Pix.)", i + 1), patternMatchingResults[i].Position.Y));

                        // If the image is calibrated, add the calibrated positions.
                        if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                        {
                            ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (World)", i + 1), patternMatchingResults[i].CalibratedPosition.X));
                            ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (World)", i + 1), patternMatchingResults[i].CalibratedPosition.Y));
                        }

                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Angle (degrees)", i + 1), patternMatchingResults[i].Rotation));
                        if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                        {
                            ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Calibrated Angle (degrees)", i + 1), patternMatchingResults[i].CalibratedRotation));
                        }

                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Score", i + 1), patternMatchingResults[i].Score));
                    }
                }

                return(patternMatchingResults);
            }
        }
예제 #7
0
        private Collection <PatternMatch> IVA_MatchPattern(VisionImage image,
                                                           IVA_Data ivaData,
                                                           string templatePath,
                                                           MatchMode matchMode,
                                                           bool subpixel,
                                                           int[] angleRangeMin,
                                                           int[] angleRangeMax,
                                                           int matchesRequested,
                                                           double score,
                                                           Roi roi,
                                                           double matchOffset_x,
                                                           double matchOffset_y,
                                                           int stepIndex)
        {
            using (VisionImage imageTemplate = new VisionImage(ImageType.U8, 7))
            {
                int numObjectResults = 4;
                Collection <PatternMatch> patternMatchingResults = new Collection <PatternMatch>();
                CoordinatesReport         realWorldPosition      = new CoordinatesReport();
                PointContour matchLocation = new PointContour();

                // Read the image template.
                imageTemplate.ReadVisionFile(templatePath);
                //Algorithms.LearnPattern(imageTemplate);

                // If the image is calibrated, we also need to log the calibrated position (x and y) -> 6 results instead of 4
                if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                {
                    numObjectResults = 6;
                }

                // Fill in the Pattern Matching options.
                MatchPatternOptions matchPatternOptions = new MatchPatternOptions(matchMode, matchesRequested);
                matchPatternOptions.MinimumContrast  = 0;
                matchPatternOptions.SubpixelAccuracy = subpixel;
                for (int i = 0; i < 2; ++i)
                {
                    matchPatternOptions.RotationAngleRanges.Add(new Range(angleRangeMin[i], angleRangeMax[i]));
                }
                matchPatternOptions.MinimumMatchScore = score;

                // Searches for areas in the image that match a given pattern.
                patternMatchingResults = Algorithms.MatchPattern2(image, imageTemplate, matchPatternOptions, roi);

                // ////////////////////////////////////////
                // Store the results in the data structure.
                // ////////////////////////////////////////

                // First, delete all the results of this step (from a previous iteration)
                Functions.IVA_DisposeStepResults(ivaData, stepIndex);

                if (patternMatchingResults.Count > 0)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("# of objects", patternMatchingResults.Count));

                    for (int i = 0; i < patternMatchingResults.Count; ++i)
                    {
                        // Adjust the match location using the specified offsets.
                        matchLocation.X = patternMatchingResults[i].Position.X + Functions.IVA_ComputePMOffset(matchOffset_x, matchOffset_y, patternMatchingResults[i].Rotation).X;
                        matchLocation.Y = patternMatchingResults[i].Position.Y + Functions.IVA_ComputePMOffset(matchOffset_x, matchOffset_y, patternMatchingResults[i].Rotation).Y;

                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (Pix.)", i + 1), matchLocation.X));
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (Pix.)", i + 1), matchLocation.Y));

                        // If the image is calibrated, convert the pixel values to real world coordinates.
                        if (numObjectResults == 6)
                        {
                            realWorldPosition = Algorithms.ConvertPixelToRealWorldCoordinates(image, new Collection <PointContour>(new PointContour[] { matchLocation }));

                            ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (World)", i + 1), realWorldPosition.Points[0].X));
                            ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (World)", i + 1), realWorldPosition.Points[0].Y));
                        }

                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Angle (degrees)", i + 1), patternMatchingResults[i].Rotation));
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Score", i + 1), patternMatchingResults[i].Score));
                    }
                }

                return(patternMatchingResults);
            }
        }
예제 #8
0
        // timer1_Tick is called when the timer interval has passed.  The next image
        // is read in, barcode settings are initialized based on the type,
        // and the barcode is read and processed.
        private void timer1_Tick(object sender, EventArgs e)
        {
            // Stop the timer so that we don't count the time spent reading the barcode
            timer1.Enabled = false;
            using (VisionImage tempImage = new VisionImage()) {
                // Load the barcode image.
                tempImage.ReadVisionFile(curImages[imageIndex]);

                Collection <Pdf417Report> pdf417Report = new Collection <Pdf417Report>();
                DataMatrixReport          dmReport     = new DataMatrixReport();
                QRReport qrReport = new QRReport();
                // Read barcode and time how long it takes.
                int startTime = System.Environment.TickCount;
                if (barcodeType.Text == "PDF417")
                {
                    // Decode a PDF417 code.
                    pdf417Report = Algorithms.ReadPdf417Barcode(tempImage);
                }
                else if (barcodeType.Text == "Data Matrix")
                {
                    // Decode a Data Matrix
                    DataMatrixDescriptionOptions descOptions   = new DataMatrixDescriptionOptions();
                    DataMatrixSizeOptions        sizeOptions   = new DataMatrixSizeOptions();
                    DataMatrixSearchOptions      searchOptions = new DataMatrixSearchOptions();
                    if (useOptionsBox.Checked)
                    {
                        // Read the options from the image.
                        GetDataMatrixCodeSettings(tempImage, ref descOptions, ref sizeOptions, ref searchOptions);
                    }
                    DataMatrixGradingMode gradingMode = DataMatrixGradingMode.None;
                    if (gradeDMBox.Checked)
                    {
                        gradingMode = DataMatrixGradingMode.PrepareForAim;
                    }
                    dmReport = Algorithms.ReadDataMatrixBarcode(tempImage, null, gradingMode, descOptions, sizeOptions, searchOptions);
                }
                else
                {
                    // Decode a QR Code
                    QRDescriptionOptions descOptions   = new QRDescriptionOptions();
                    QRSizeOptions        sizeOptions   = new QRSizeOptions();
                    QRSearchOptions      searchOptions = new QRSearchOptions();
                    if (useOptionsBox.Checked)
                    {
                        // Read the options from the image.
                    }
                    qrReport = Algorithms.ReadQRCode(tempImage, null, descOptions, sizeOptions, searchOptions);
                }
                int  elapsedTime = System.Environment.TickCount - startTime;
                bool found;
                if (barcodeType.Text == "PDF417")
                {
                    found = pdf417Report.Count > 0;
                }
                else if (barcodeType.Text == "Data Matrix")
                {
                    found = dmReport.Found;
                }
                else
                {
                    found = qrReport.Found;
                }
                // Process info
                if (found)
                {
                    PointContour centerPoint = new PointContour();
                    if (barcodeType.Text == "PDF417")
                    {
                        dataFound.Text = pdf417Report[0].StringData;
                        typeFound.Text = "Pdf417";
                        tempImage.Overlays.Default.AddPolygon(new PolygonContour(pdf417Report[0].Corners), Rgb32Value.GreenColor, DrawingMode.DrawValue);
                        // Center the viewer on the barcode.
                        centerPoint.Initialize((pdf417Report[0].Corners[0].X + pdf417Report[0].Corners[2].X) / 2, (pdf417Report[0].Corners[0].Y + pdf417Report[0].Corners[2].Y) / 2);
                    }
                    else if (barcodeType.Text == "Data Matrix")
                    {
                        if (dmReport.Binary)
                        {
                            dataFound.Text = System.Text.Encoding.Default.GetString(dmReport.GetBinaryData());
                        }
                        else
                        {
                            dataFound.Text = dmReport.StringData;
                        }
                        DisplayDataMatrixType(dmReport);
                        tempImage.Overlays.Default.AddPolygon(new PolygonContour(dmReport.Corners), Rgb32Value.GreenColor, DrawingMode.DrawValue);
                        // Center the viewer on the barcode.
                        centerPoint.Initialize((dmReport.Corners[0].X + dmReport.Corners[2].X) / 2, (dmReport.Corners[0].Y + dmReport.Corners[2].Y) / 2);
                        // Grade the barcode if requested.
                        if (gradeDMBox.Checked)
                        {
                            AimGradeReport gradeReport = Algorithms.GradeDataMatrixBarcodeAim(tempImage);
                            gradeOverall.Text               = gradeReport.OverallGrade.ToString();
                            gradeDecoding.Text              = gradeReport.DecodingGrade.ToString();
                            gradeSymbolContrast.Text        = gradeReport.SymbolContrastGrade.ToString();
                            gradePrintGrowth.Text           = gradeReport.PrintGrowthGrade.ToString();
                            gradeAxialNonuniformity.Text    = gradeReport.AxialNonuniformityGrade.ToString();
                            gradeUnusedErrorCorrection.Text = gradeReport.UnusedErrorCorrectionGrade.ToString();
                        }
                        else
                        {
                            gradeOverall.Text               = "";
                            gradeDecoding.Text              = "";
                            gradeSymbolContrast.Text        = "";
                            gradePrintGrowth.Text           = "";
                            gradeAxialNonuniformity.Text    = "";
                            gradeUnusedErrorCorrection.Text = "";
                        }
                    }
                    else
                    {
                        dataFound.Text = System.Text.Encoding.Default.GetString(qrReport.GetData());
                        DisplayQRType(qrReport);
                        tempImage.Overlays.Default.AddPolygon(new PolygonContour(qrReport.Corners), Rgb32Value.GreenColor, DrawingMode.DrawValue);
                        // Center the viewer on the barcode.
                        centerPoint.Initialize((qrReport.Corners[0].X + qrReport.Corners[2].X) / 2, (qrReport.Corners[0].Y + qrReport.Corners[2].Y) / 2);
                    }
                    readTime.Text = elapsedTime.ToString();
                    Algorithms.Copy(tempImage, imageViewer1.Image);
                    imageViewer1.RefreshImage();
                    imageViewer1.Center.Initialize(centerPoint.X, centerPoint.Y);
                }
            }
            // Set up for next image
            imageIndex     = (imageIndex + 1) % curImages.Count;
            timer1.Enabled = true;
        }