Ejemplo n.º 1
0
        private void btnMatch3_Click(object sender, EventArgs e)
        {
            try
            {
                imageViewer3.Image.Overlays.Default.Clear();
                MatchPatternOptions options = new MatchPatternOptions(MatchMode.RotationInvariant, 1);

                options.MinimumMatchScore = 800;
                options.MinimumContrast   = 0;
                options.SubpixelAccuracy  = false;

                Collection <PatternMatch> matches  = Algorithms.MatchPattern(imageViewer3.Image, imageViewer4.Image, options, imageViewer3.Roi);
                RectangleContour          rectROI1 = (RectangleContour)imageViewer3.Roi.GetContour(0).Shape;
                userProgram.TemplateConfig.Rectangle.Left   = rectROI1.Left;
                userProgram.TemplateConfig.Rectangle.Top    = rectROI1.Top;
                userProgram.TemplateConfig.Rectangle.Width  = rectROI1.Width;
                userProgram.TemplateConfig.Rectangle.Height = rectROI1.Height;

                // Display results.
                foreach (PatternMatch match in matches)
                {
                    imageViewer3.Image.Overlays.Default.AddPolygon(new PolygonContour(match.Corners), Rgb32Value.RedColor);
                    userProgram.TemplateConfig.Position.X = match.Position.X;
                    userProgram.TemplateConfig.Position.Y = match.Position.Y;
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
Ejemplo n.º 2
0
        private bool MatchTemplate(ref ImageViewer imageWiewer, ref ImageViewer imageTemplate)
        {
            imageWiewer.Image.Overlays.Default.Clear();
            MatchPatternOptions options = new MatchPatternOptions(MatchMode.RotationInvariant, 1);

            options.MinimumMatchScore = 800;
            options.MinimumContrast   = 0;
            options.SubpixelAccuracy  = false;

            Collection <PatternMatch> matches = Algorithms.MatchPattern(imageWiewer.Image,
                                                                        imageTemplate.Image, options, imageViewerDatamatrix.Roi);

            if (matches.Count < 1)
            {
                return(false);
            }
            // Display results.
            foreach (PatternMatch match in matches)
            {
                imageWiewer.Image.Overlays.Default.AddPolygon(new PolygonContour(match.Corners), Rgb32Value.RedColor);
                userProgram.TemplateConfig.Position.X = match.Position.X;
                userProgram.TemplateConfig.Position.Y = match.Position.Y;
            }
            RectangleContour rectROI1 = (RectangleContour)imageWiewer.Roi.GetContour(0).Shape;

            userProgram.TemplateConfig.Rectangle.Left   = rectROI1.Left;
            userProgram.TemplateConfig.Rectangle.Top    = rectROI1.Top;
            userProgram.TemplateConfig.Rectangle.Width  = rectROI1.Width;
            userProgram.TemplateConfig.Rectangle.Height = rectROI1.Height;
            return(false);
        }
Ejemplo n.º 3
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            // Get the current coordinate system.
            CoordinateSystem coordinateSystem = stageMotionInformation[imageNumber];

            // Display the current coordinate system.
            calibrationOriginPixelX.Text  = String.Format("{0:0.0}", coordinateSystem.Origin.X);
            calibrationOriginPixelY.Text  = String.Format("{0:0.0}", coordinateSystem.Origin.Y);
            calibrationAngle.Text         = String.Format("{0:0.0}", coordinateSystem.Angle);
            calibrationAxisReference.Text = (coordinateSystem.AxisOrientation == AxisOrientation.Direct) ? "Direct" : "Indirect";

            // Get the next image in the sequence.
            VisionImage image = GetNextImage();

            // Set the local coordinate system information using simple calibration.
            Algorithms.SetSimpleCalibration(image, coordinateSystem, new GridDescriptor());

            // Find the location of the fiducial in the image.
            MatchPatternOptions matchOptions = new MatchPatternOptions(MatchMode.RotationInvariant);

            matchOptions.MinimumMatchScore = 600;
            Collection <PatternMatch> matches = Algorithms.MatchPattern(image, template, matchOptions);

            // Convert the match position to real-world coordinates.
            PointContour realWorldPoint = Algorithms.ConvertPixelToRealWorldCoordinates(image, matches[0].Position).Points[0];

            // Display the coordinates of the pattern.
            measurementsPixelX.Text      = String.Format("{0:0.0}", matches[0].Position.X);
            measurementsPixelY.Text      = String.Format("{0:0.0}", matches[0].Position.Y);
            measurementsCalibratedX.Text = String.Format("{0:0.0}", realWorldPoint.X);
            measurementsCalibratedY.Text = String.Format("{0:0.0}", realWorldPoint.Y);

            // Overlay the position of the pattern match.
            // First draw the bounding box.
            image.Overlays.Default.AddPolygon(new PolygonContour(matches[0].Corners), Rgb32Value.RedColor);
            // Now draw the center point.
            image.Overlays.Default.AddOval(new OvalContour(matches[0].Position.X - 5, matches[0].Position.Y - 5, 11, 11), Rgb32Value.RedColor);
            // Finally draw the crosshair.
            image.Overlays.Default.AddLine(new LineContour(new PointContour(matches[0].Position.X - 10, matches[0].Position.Y), new PointContour(matches[0].Position.X + 10, matches[0].Position.Y)), Rgb32Value.RedColor);
            image.Overlays.Default.AddLine(new LineContour(new PointContour(matches[0].Position.X, matches[0].Position.Y - 10), new PointContour(matches[0].Position.X, matches[0].Position.Y + 10)), Rgb32Value.RedColor);

            // Overlay the coordinate system on the image.
            OverlayCoordinateSystem(image.Overlays.Default, coordinateSystem);

            // Display the image.
            imageViewer1.Attach(image);
        }
Ejemplo n.º 4
0
        private void defineCoordinateSystemButton_Click(object sender, EventArgs e)
        {
            // Read a template file representing a characteristic portion of the object
            // used as a reference coordinate system.
            imageViewer2.Image.ReadVisionFile(System.IO.Path.Combine(imagePath, "template.png"));

            MatchPatternOptions matchOptions = new MatchPatternOptions(MatchMode.RotationInvariant);

            matchOptions.MinimumMatchScore = 700;
            matchOptions.SubpixelAccuracy  = true;
            DrawOptions drawOptions = new DrawOptions();

            drawOptions.ShowResult = true;
            FindTransformWithPattern(imageViewer1.Image, imageViewer2.Image, FindTransformMode.FindReference, matchOptions, drawOptions, transform);

            // Update buttons.
            defineCoordinateSystemButton.Enabled = false;
            defineMeasurementsButton.Enabled     = true;
        }
Ejemplo n.º 5
0
        private void runButton_Click(object sender, EventArgs e)
        {
            // Initialize the pattern matching options.
            matchPatternRoi     = new Roi(new Shape[] { new RotatedRectangleContour(new PointContour(210, 220), 130, 110, 0) });
            matchPatternOptions = new MatchPatternOptions(MatchMode.RotationInvariant, 1);
            matchPatternOptions.MinimumMatchScore = 650;
            matchPatternOptions.RotationAngleRanges.Add(new Range(-40, 40));
            matchPatternOptions.SubpixelAccuracy = true;

            // Initialize text options.
            overlayTextOptions = new OverlayTextOptions("Arial Black", 36);

            // Update buttons.
            runButton.Enabled = false;

            // Enable the timer.
            timer1.Enabled = true;
            timer1_Tick(timer1, EventArgs.Empty);
        }
Ejemplo n.º 6
0
        private void matchPatternButton_Click(object sender, EventArgs e)
        {
            MatchPatternOptions options = new MatchPatternOptions((MatchMode)(matchMode.SelectedIndex + 1), (int)matchesRequested.Value);

            options.MinimumMatchScore = (int)minimumScore.Value;
            options.MinimumContrast   = (int)minimumContrast.Value;
            options.SubpixelAccuracy  = subpixelAccuracy.Checked;

            // Match
            Collection <PatternMatch> matches = Algorithms.MatchPattern(imageViewerMain.Image, imageViewerPattern.Image, options, imageViewerMain.Roi);

            // Display results.
            imageViewerMain.Image.Overlays.Default.Clear();
            foreach (PatternMatch match in matches)
            {
                imageViewerMain.Image.Overlays.Default.AddPolygon(new PolygonContour(match.Corners), Rgb32Value.RedColor);
            }
            matchesFound.Text          = matches.Count.ToString();
            learnPatternButton.Enabled = false;
        }
Ejemplo n.º 7
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            // Get the next image.
            VisionImage image = GetNextImage();

            // Find the new coordinate transformation.
            MatchPatternOptions matchOptions = new MatchPatternOptions(MatchMode.RotationInvariant);

            matchOptions.MinimumMatchScore = 700;
            matchOptions.SubpixelAccuracy  = true;
            DrawOptions drawOptions = new DrawOptions();

            drawOptions.ShowResult = true;
            FindTransformWithPattern(image, imageViewer2.Image, FindTransformMode.UpdateTransform, matchOptions, drawOptions, transform);

            // Initialize search rectangle and search annulus.
            RectangleContour searchRectangle = new RectangleContour(470, 110, 30, 190);
            AnnulusContour   searchAnnulus   = new AnnulusContour(new PointContour(366, 201), 33, 121, 42.71, 314.13);

            // Overlay the search area for the distance measurement.
            drawOptions.ShowEdgesFound  = true;
            drawOptions.ShowSearchArea  = true;
            drawOptions.ShowSearchLines = true;
            drawOptions.ShowResult      = true;
            double distance = MeasureMaximumDistance(image, searchRectangle, RakeDirection.TopToBottom, drawOptions, transform);

            // Overlay the search area for the circle measurement.
            FitCircleReport circleReport = FindCircularEdge(image, searchAnnulus, SpokeDirection.InsideToOutside, drawOptions, transform);

            // Display results.
            distanceBox.Text = String.Format("{0:0.00}", distance);
            centerXBox.Text  = String.Format("{0:0.00}", circleReport.Center.X);
            centerYBox.Text  = String.Format("{0:0.00}", circleReport.Center.Y);
            radiusBox.Text   = String.Format("{0:0.00}", circleReport.Radius);

            // Display the image.
            imageViewer1.Attach(image);
        }
Ejemplo n.º 8
0
        private void FindTransformWithPattern(VisionImage image, VisionImage template, FindTransformMode mode, MatchPatternOptions matchOptions, DrawOptions drawOptions, CoordinateTransform transform)
        {
            // Find the pattern in the image.
            Collection <PatternMatch> matches = Algorithms.MatchPattern(image, template, matchOptions);

            // If the pattern was found:
            if (matches.Count > 0)
            {
                // The points in the Corners collection are returned like this:
                //
                //   0 — 1
                //   |   |
                //   3 — 2
                //
                // Our main axis will be along the line from point 3 to point 2 and
                // our secondary axis will be from point 3 to point 0. The origin will
                // be at point 3.
                LineContour mainAxis      = new LineContour(matches[0].Corners[3], matches[0].Corners[2]);
                LineContour secondaryAxis = new LineContour(matches[0].Corners[3], matches[0].Corners[0]);

                // Fill in the coordinate transform with the data obtained by the pattern matching.
                transform.MeasurementSystem.Origin          = matches[0].Corners[3];
                transform.MeasurementSystem.Angle           = matches[0].Rotation;
                transform.MeasurementSystem.AxisOrientation = AxisOrientation.Direct;

                // If this is the first run, fill in the reference system too.
                if (mode == FindTransformMode.FindReference)
                {
                    transform.ReferenceSystem.Origin          = matches[0].Corners[3];
                    transform.ReferenceSystem.Angle           = matches[0].Rotation;
                    transform.ReferenceSystem.AxisOrientation = AxisOrientation.Direct;
                }

                // Draw the results on the image.
                if (drawOptions.ShowResult)
                {
                    // Draw the origin.
                    image.Overlays.Default.AddRectangle(new RectangleContour(mainAxis.Start.X - 2, mainAxis.Start.Y - 2, 5, 5), Rgb32Value.RedColor, DrawingMode.DrawValue);

                    // Draw each axis.
                    image.Overlays.Default.AddLine(mainAxis, Rgb32Value.RedColor);
                    DrawArrow(image.Overlays.Default, mainAxis, Rgb32Value.RedColor);
                    image.Overlays.Default.AddLine(secondaryAxis, Rgb32Value.RedColor);
                    DrawArrow(image.Overlays.Default, secondaryAxis, Rgb32Value.RedColor);
                }
            }
        }
Ejemplo n.º 9
0
        private Collection <PatternMatch> IVA_MatchPattern(VisionImage image,
                                                           IVA_Data ivaData,
                                                           string templatePath,
                                                           MatchMode matchMode,
                                                           bool subpixel,
                                                           int[] angleRangeMin,
                                                           int[] angleRangeMax,
                                                           int matchesRequested,
                                                           double score,
                                                           Roi roi,
                                                           double matchOffset_x,
                                                           double matchOffset_y,
                                                           int stepIndex)
        {
            using (VisionImage imageTemplate = new VisionImage(ImageType.U8, 7))
            {
                int numObjectResults = 4;
                Collection <PatternMatch> patternMatchingResults = new Collection <PatternMatch>();
                CoordinatesReport         realWorldPosition      = new CoordinatesReport();
                PointContour matchLocation = new PointContour();

                // Read the image template.
                imageTemplate.ReadVisionFile(templatePath);
                //Algorithms.LearnPattern(imageTemplate);

                // If the image is calibrated, we also need to log the calibrated position (x and y) -> 6 results instead of 4
                if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                {
                    numObjectResults = 6;
                }

                // Fill in the Pattern Matching options.
                MatchPatternOptions matchPatternOptions = new MatchPatternOptions(matchMode, matchesRequested);
                matchPatternOptions.MinimumContrast  = 0;
                matchPatternOptions.SubpixelAccuracy = subpixel;
                for (int i = 0; i < 2; ++i)
                {
                    matchPatternOptions.RotationAngleRanges.Add(new Range(angleRangeMin[i], angleRangeMax[i]));
                }
                matchPatternOptions.MinimumMatchScore = score;

                // Searches for areas in the image that match a given pattern.
                patternMatchingResults = Algorithms.MatchPattern2(image, imageTemplate, matchPatternOptions, roi);

                // ////////////////////////////////////////
                // Store the results in the data structure.
                // ////////////////////////////////////////

                // First, delete all the results of this step (from a previous iteration)
                Functions.IVA_DisposeStepResults(ivaData, stepIndex);

                if (patternMatchingResults.Count > 0)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("# of objects", patternMatchingResults.Count));

                    for (int i = 0; i < patternMatchingResults.Count; ++i)
                    {
                        // Adjust the match location using the specified offsets.
                        matchLocation.X = patternMatchingResults[i].Position.X + Functions.IVA_ComputePMOffset(matchOffset_x, matchOffset_y, patternMatchingResults[i].Rotation).X;
                        matchLocation.Y = patternMatchingResults[i].Position.Y + Functions.IVA_ComputePMOffset(matchOffset_x, matchOffset_y, patternMatchingResults[i].Rotation).Y;

                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (Pix.)", i + 1), matchLocation.X));
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (Pix.)", i + 1), matchLocation.Y));

                        // If the image is calibrated, convert the pixel values to real world coordinates.
                        if (numObjectResults == 6)
                        {
                            realWorldPosition = Algorithms.ConvertPixelToRealWorldCoordinates(image, new Collection <PointContour>(new PointContour[] { matchLocation }));

                            ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (World)", i + 1), realWorldPosition.Points[0].X));
                            ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (World)", i + 1), realWorldPosition.Points[0].Y));
                        }

                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Angle (degrees)", i + 1), patternMatchingResults[i].Rotation));
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Score", i + 1), patternMatchingResults[i].Score));
                    }
                }

                return(patternMatchingResults);
            }
        }
Ejemplo n.º 10
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            // Get the next image.
            VisionImage image = GetNextImage();

            // Locate the template in the image.
            MatchPatternOptions matchOptions = new MatchPatternOptions(MatchMode.RotationInvariant, 1);

            matchOptions.MinimumMatchScore = 500;
            matchOptions.RotationAngleRanges.Add(new Range(-5, 5));
            Collection <PatternMatch> matches = Algorithms.MatchPattern(image, template, matchOptions);

            if (matches.Count != 1)
            {
                // If the template could not be located, the part fails.
                passFailLed.Value = false;
            }
            else
            {
                // Perform the inspection
                InspectionAlignment alignment         = new InspectionAlignment(matches[0].Position, matches[0].Rotation);
                InspectionOptions   inspectionOptions = new InspectionOptions();
                inspectionOptions.EdgeThicknessToIgnore = 1;
                using (VisionImage defectImage = new VisionImage())
                {
                    Algorithms.CompareGoldenTemplate(image, template, defectImage, alignment, inspectionOptions);

                    // Remove small defects from the image.
                    Algorithms.RemoveParticle(defectImage, defectImage);

                    // If there are still defects in the image, the part does not pass inspection.
                    if (Algorithms.ParticleReport(defectImage).Count > 0)
                    {
                        passFailLed.Value = false;
                    }
                    else
                    {
                        passFailLed.Value = true;
                    }

                    // Make a custom palette that displays the defects of interest (dark defects
                    // are set to 1 in the defect image and bright defects are set to 2):
                    // 1. Set the value of all non-defect pixels in the defect image to 255.
                    // 2. Set all of the pixels with a value of 1 or 2 to zero in the inspected image.
                    // 3. Make the requested defects visible by modifying the palette for the Viewer
                    // control.  Start with a grayscale palette.  Then modify the palette for the
                    // defect values (1 or 2) by changing them to a visible color.
                    // 4. Merge the defect image with the inspected image and place the output in
                    // the image attached to the viewer.
                    Algorithms.UserLookup(defectImage, defectImage, lookupTable);
                    Algorithms.Max(image, new PixelValue(3), image);
                    Palette palette = new Palette(PaletteType.Gray);
                    if (darkSwitch.Value)
                    {
                        palette.Entries[1] = new PaletteEntry(255, palette.Entries[1].Green, palette.Entries[1].Blue);
                    }
                    if (brightSwitch.Value)
                    {
                        palette.Entries[2] = new PaletteEntry(palette.Entries[2].Red, 255, palette.Entries[2].Blue);
                    }
                    imageViewer2.Palette = palette;
                    Algorithms.Min(image, defectImage, image);
                    imageViewer2.Attach(image);
                }
            }
        }