Beispiel #1
0
        private static Collection <double> IVA_GetDistance(VisionImage image,
                                                           IVA_Data ivaData,
                                                           int stepIndex,
                                                           int stepIndex1,
                                                           int resultIndex1,
                                                           int stepIndex2,
                                                           int resultIndex2)
        {
            Collection <PointContour> points = new Collection <PointContour>();

            points.Add(Functions.IVA_GetPoint(ivaData, stepIndex1, resultIndex1));
            points.Add(Functions.IVA_GetPoint(ivaData, stepIndex2, resultIndex2));

            // Computes the distance between the points.
            Collection <double> distances = Algorithms.FindPointDistances(points);

            // Store the results in the data structure.
            ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Distance (Pix.)", distances[0]));

            // If the image is calibrated, compute the real world distance.
            if ((image.InfoTypes & InfoTypes.Calibration) != 0)
            {
                CoordinatesReport   realWorldPosition  = Algorithms.ConvertPixelToRealWorldCoordinates(image, points);
                Collection <double> calibratedDistance = Algorithms.FindPointDistances(realWorldPosition.Points);
                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Distance (Calibrated)", calibratedDistance[0]));
                distances.Add(calibratedDistance[0]);
            }

            return(distances);
        }
        public static PaletteType ProcessImage(VisionImage image)
        {
            // Initialize the IVA_Data structure to pass results and coordinate systems.
            IVA_Data ivaData = new IVA_Data(3, 0);

            // Extract Color Plane
            using (VisionImage plane = new VisionImage(ImageType.U8, 7))
            {
                // Extract the green color plane and copy it to the main image.
                Algorithms.ExtractColorPlanes(image, ColorMode.Rgb, null, plane, null);
                Algorithms.Copy(plane, image);
            }

            // Creates a new, empty region of interest.
            Roi roi = new Roi();
            // Creates a new RectangleContour using the given values.
            RectangleContour vaRect = new RectangleContour(20, 20, 2552, 1904);

            roi.Add(vaRect);
            // Geometric Matching
            string       vaTemplateFile = $"{ @"./VisionModel/Polygon_20.5M/Mode.png"}";
            CurveOptions vaCurveOptions = new CurveOptions();

            vaCurveOptions.ColumnStepSize     = 15;
            vaCurveOptions.ExtractionMode     = ExtractionMode.NormalImage;
            vaCurveOptions.FilterSize         = EdgeFilterSize.Normal;
            vaCurveOptions.MaximumEndPointGap = 10;
            vaCurveOptions.MinimumLength      = 20;
            vaCurveOptions.RowStepSize        = 15;
            vaCurveOptions.Threshold          = 145;

            MatchGeometricPatternEdgeBasedOptions matchGPMOptions = new MatchGeometricPatternEdgeBasedOptions();

            matchGPMOptions.Advanced.ContrastMode  = ContrastMode.Original;
            matchGPMOptions.Advanced.MatchStrategy = GeometricMatchingSearchStrategy.Balanced;
            matchGPMOptions.MinimumMatchScore      = 800;
            matchGPMOptions.Mode = GeometricMatchModes.RotationInvariant;
            matchGPMOptions.NumberOfMatchesRequested = 1;
            double[] vaRangesMin = { -20, 0, 50, 0 };
            double[] vaRangesMax = { 20, 0, 200, 50 };
            matchGPMOptions.OcclusionRange = new Range(vaRangesMin[3], vaRangesMax[3]);
            matchGPMOptions.RotationAngleRanges.Add(new Range(vaRangesMin[0], vaRangesMax[0]));
            matchGPMOptions.RotationAngleRanges.Add(new Range(vaRangesMin[1], vaRangesMax[1]));
            matchGPMOptions.ScaleRange       = new Range(vaRangesMin[2], vaRangesMax[2]);
            matchGPMOptions.SubpixelAccuracy = true;

            gpm2Results = IVA_MatchGeometricPattern2(image, vaTemplateFile, vaCurveOptions, matchGPMOptions, ivaData, 2, roi);

            roi.Dispose();

            // Dispose the IVA_Data structure.
            ivaData.Dispose();

            // Return the palette type of the final image.
            return(PaletteType.Gray);
        }
Beispiel #3
0
        public static PaletteType ProcessImage(VisionImage image)
        {
            // Initialize the IVA_Data structure to pass results and coordinate systems.
            IVA_Data ivaData = new IVA_Data(2, 0);

            // Creates a new, empty region of interest.
            Roi roi = new Roi();

            // 建一條巡邊線Star(229,40) End(229,300)
            PointContour vaStartPoint = new PointContour(229, 40);
            PointContour vaEndPoint   = new PointContour(229, 300);
            LineContour  vaLine       = new LineContour(vaStartPoint, vaEndPoint);

            roi.Add(vaLine);

            // 邊緣偵測;採用簡單邊緣偵測方式
            SimpleEdgeOptions vaSimpleEdgeOptions = new SimpleEdgeOptions();

            vaSimpleEdgeOptions.Process    = EdgeProcess.All;
            vaSimpleEdgeOptions.Type       = LevelType.Absolute;
            vaSimpleEdgeOptions.Threshold  = 128;
            vaSimpleEdgeOptions.Hysteresis = 2;
            vaSimpleEdgeOptions.SubPixel   = true;
            simpleEdges = IVA_SimpleEdge(image, roi, vaSimpleEdgeOptions, ivaData, 0);
            roi.Dispose();

            // Caliper
            // Delete all the results of this step (from a previous iteration)
            Functions.IVA_DisposeStepResults(ivaData, 1);

            // Computes the vaDistance between two points.
            Collection <double> vaDistance = IVA_GetDistance(image, ivaData, 1, 0, 3, 0, 5);

            caliperDistance = vaDistance[0];

            // Dispose the IVA_Data structure.
            ivaData.Dispose();

            //列出相關AOI資料
            MessageBox.Show("座標點1" + simpleEdges[2].ToString() + "\r\n" +
                            "座標點2" + simpleEdges[0].ToString() + "\r\n" +
                            "座標點3" + simpleEdges[1].ToString() + "\r\n" + "\r\n" +
                            "間距量測" + caliperDistance.ToString());

            //繪出檢測直線(巡邊線)
            //Graphics g = Graphics.FromImage(FileName)
            //g.DrawLine(0, 0, 100, 100);

            // Return the palette type of the final image.
            return(PaletteType.Gray);
        }
Beispiel #4
0
        public string ProcessQR(VisionImage image, List <QRConfig> qRConfigs)
        {
            string qRInfo = string.Empty;
            // Initialize the IVA_Data structure to pass results and coordinate systems.
            IVA_Data ivaData = new IVA_Data(1, 0);

            for (int i = 0; i < qRConfigs.Count; i++)
            {
                // Creates a new, empty region of interest.
                Roi roi = new Roi();
                //// Creates a new RectangleContour using the given values.
                //RectangleContour vaRect = new RectangleContour(720, 96, 1792, 1240);
                RectangleContour vaRect = new RectangleContour(qRConfigs[i].Rectangle.Left,
                                                               qRConfigs[i].Rectangle.Top, qRConfigs[i].Rectangle.Height, qRConfigs[i].Rectangle.Width);
                roi.Add(vaRect);
                image.Overlays.Default.AddRoi(roi);
                // Read QR Code
                QRDescriptionOptions vaQROptions = new QRDescriptionOptions();
                vaQROptions.Dimensions = qRConfigs[i].QRDimension;
                vaQROptions.MirrorMode = QRMirrorMode.AutoDetect;
                vaQROptions.ModelType  = QRModelType.AutoDetect;
                vaQROptions.Polarity   = qRConfigs[i].Polarity;
                QRSizeOptions   vaQRSizeOptions   = new QRSizeOptions(3, 15);
                QRSearchOptions vaQRSearchOptions = new QRSearchOptions();
                vaQRSearchOptions.CellFilterMode     = QRCellFilterMode.AutoDetect;
                vaQRSearchOptions.CellSampleSize     = qRConfigs[i].CellSize;
                vaQRSearchOptions.DemodulationMode   = QRDemodulationMode.AutoDetect;
                vaQRSearchOptions.EdgeThreshold      = 30;
                vaQRSearchOptions.RotationMode       = QRRotationMode.Unlimited;
                vaQRSearchOptions.SkewDegreesAllowed = 10;
                vaQRSearchOptions.SkipLocation       = false;
                vaQRCode = Algorithms.ReadQRCode(image, roi, vaQROptions, vaQRSizeOptions, vaQRSearchOptions);
                if (vaQRCode.Found)
                {
                    image.Overlays.Default.AddPolygon(new PolygonContour(vaQRCode.Corners), Rgb32Value.RedColor, DrawingMode.DrawValue);
                }
                System.Text.ASCIIEncoding vaASCIIEncoding = new System.Text.ASCIIEncoding();
                vaQRCodeData = vaASCIIEncoding.GetString(vaQRCode.GetData());

                qRInfo += string.Format("{0},", vaQRCodeData);

                roi.Dispose();
            }
            qRInfo = qRInfo.Substring(0, qRInfo.Length - 1);
            // Dispose the IVA_Data structure.
            ivaData.Dispose();

            // Return the palette type of the final image.
            return(qRInfo);
        }
Beispiel #5
0
        public static PaletteType ProcessImage(VisionImage image)
        {
            // Initialize the IVA_Data structure to pass results and coordinate systems.
            IVA_Data ivaData = new IVA_Data(4, 0);

            // Creates a new, empty region of interest.
            Roi roi = new Roi();
            // Creates a new AnnulusContour using the given values.
            PointContour   vaCenter = new PointContour(1295, 970);
            AnnulusContour vaOval   = new AnnulusContour(vaCenter, 412, 954, 0, 0);

            roi.Add(vaOval);
            // Find Circular Edge
            EdgeOptions vaOptions = new EdgeOptions();

            vaOptions.ColumnProcessingMode = ColumnProcessingMode.Average;
            vaOptions.InterpolationType    = InterpolationMethod.Bilinear;
            vaOptions.KernelSize           = 3;
            vaOptions.MinimumThreshold     = 50;
            vaOptions.Polarity             = EdgePolaritySearchMode.Rising;
            vaOptions.Width = 3;
            CircularEdgeFitOptions vaFitOptions = new CircularEdgeFitOptions();

            vaFitOptions.ProcessType    = RakeProcessType.GetBestEdges;
            vaFitOptions.StepSize       = 10;
            vaFitOptions.MaxPixelRadius = 3;

            vaCircularEdgeReport = IVA_FindCircularEdge(image, roi, SpokeDirection.InsideToOutside, vaOptions, vaFitOptions, ivaData, 1);

            roi.Dispose();

            // Overlays a line onto the image.
            int[]  xCoords = { 1296, 1296 };
            int[]  yCoords = { 0, 1944 };
            byte[] vaColor = { 6, 0, 239 };
            IVA_OverlayLine(image, xCoords, yCoords, vaColor);

            // Overlays a line onto the image.
            int[]  xCoords2 = { 0, 2592 };
            int[]  yCoords2 = { 972, 972 };
            byte[] vaColor2 = { 6, 0, 239 };
            IVA_OverlayLine(image, xCoords2, yCoords2, vaColor2);

            // Dispose the IVA_Data structure.
            ivaData.Dispose();

            // Return the palette type of the final image.
            return(PaletteType.Gray);
        }
Beispiel #6
0
        private static Collection <PointContour> IVA_SimpleEdge(VisionImage image,
                                                                Roi roi,
                                                                SimpleEdgeOptions simpleEdgeOptions,
                                                                IVA_Data ivaData,
                                                                int stepIndex)
        {
            // Calculates the profile of the pixels along the edge of each contour in the region of interest.
            using (VisionImage monoImage = new VisionImage(ImageType.U8, 7))
            {
                if (image.Type == ImageType.Rgb32 || image.Type == ImageType.Hsl32)
                {
                    Algorithms.ExtractColorPlanes(image, ColorMode.Hsl, null, null, monoImage);
                }
                else
                {
                    Algorithms.Copy(image, monoImage);
                }

                RoiProfileReport roiProfile = Algorithms.RoiProfile(monoImage, roi);

                // Finds prominent edges along the array of pixel coordinates.
                Collection <PointContour> edges = Algorithms.SimpleEdge(monoImage, roiProfile.Pixels, simpleEdgeOptions);

                // Store the results in the data structure.

                // First, delete all the results of this step (from a previous iteration)
                Functions.IVA_DisposeStepResults(ivaData, stepIndex);

                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("# of Edges", edges.Count));

                for (int i = 0; i < edges.Count; ++i)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Edge {0}.X Position (Pix.)", i + 1), edges[i].X));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Edge {0}.Y Position (Pix.)", i + 1), edges[i].Y));

                    // If the image is calibrated, convert the pixel values to real world coordinates.
                    if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                    {
                        PointContour      edgeLocation      = new PointContour(edges[i].X, edges[i].Y);
                        CoordinatesReport realWorldPosition = Algorithms.ConvertPixelToRealWorldCoordinates(image, new Collection <PointContour>(new PointContour[] { edgeLocation }));

                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Edge {0}.X Position (World)", i + 1), realWorldPosition.Points[0].X));
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Edge {0}.Y Position (World)", i + 1), realWorldPosition.Points[0].Y));
                    }
                }
                return(edges);
            }
        }
        private static void IVA_CoordSys(int coordSysIndex,
                                         int originStepIndex,
                                         int originResultIndex,
                                         int angleStepIndex,
                                         int angleResultIndex,
                                         double baseOriginX,
                                         double baseOriginY,
                                         double baseAngle,
                                         AxisOrientation baseAxisOrientation,
                                         int mode,
                                         IVA_Data ivaData)
        {
            ivaData.baseCoordinateSystems[coordSysIndex].Origin.X        = baseOriginX;
            ivaData.baseCoordinateSystems[coordSysIndex].Origin.Y        = baseOriginY;
            ivaData.baseCoordinateSystems[coordSysIndex].Angle           = baseAngle;
            ivaData.baseCoordinateSystems[coordSysIndex].AxisOrientation = baseAxisOrientation;

            ivaData.MeasurementSystems[coordSysIndex].Origin.X        = baseOriginX;
            ivaData.MeasurementSystems[coordSysIndex].Origin.Y        = baseOriginY;
            ivaData.MeasurementSystems[coordSysIndex].Angle           = baseAngle;
            ivaData.MeasurementSystems[coordSysIndex].AxisOrientation = baseAxisOrientation;

            switch (mode)
            {
            // Horizontal motion
            case 0:
                ivaData.MeasurementSystems[coordSysIndex].Origin.X = Functions.IVA_GetNumericResult(ivaData, originStepIndex, originResultIndex);
                break;

            // Vertical motion
            case 1:
                ivaData.MeasurementSystems[coordSysIndex].Origin.Y = Functions.IVA_GetNumericResult(ivaData, originStepIndex, originResultIndex + 1);
                break;

            // Horizontal and vertical motion
            case 2:
                ivaData.MeasurementSystems[coordSysIndex].Origin = Functions.IVA_GetPoint(ivaData, originStepIndex, originResultIndex);
                break;

            // Horizontal, vertical and angular motion
            case 3:
                ivaData.MeasurementSystems[coordSysIndex].Origin = Functions.IVA_GetPoint(ivaData, originStepIndex, originResultIndex);
                ivaData.MeasurementSystems[coordSysIndex].Angle  = Functions.IVA_GetNumericResult(ivaData, angleStepIndex, angleResultIndex);
                break;
            }
        }
Beispiel #8
0
        private static FindEdgeReport IVA_FindEdge(VisionImage image,
                                                   Roi roi,
                                                   RakeDirection direction,
                                                   EdgeOptions options,
                                                   StraightEdgeOptions straightEdgeOptions,
                                                   IVA_Data ivaData,
                                                   int stepIndex)
        {
            // First, delete all the results of this step (from a previous iteration)
            Functions.IVA_DisposeStepResults(ivaData, stepIndex);

            // Find the Edge
            FindEdgeOptions edgeOptions = new FindEdgeOptions(direction);

            edgeOptions.EdgeOptions         = options;
            edgeOptions.StraightEdgeOptions = straightEdgeOptions;
            FindEdgeReport lineReport = new FindEdgeReport();

            lineReport = Algorithms.FindEdge(image, roi, edgeOptions);

            // If there was at least one line, get data
            if (lineReport.StraightEdges.Count >= 1)
            {
                // Store the results in the data structure.
                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Point 1.X Position (Pix.)", lineReport.StraightEdges[0].StraightEdge.Start.X));
                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Point 1.Y Position (Pix.)", lineReport.StraightEdges[0].StraightEdge.Start.Y));
                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Point 2.X Position (Pix.)", lineReport.StraightEdges[0].StraightEdge.End.X));
                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Point 2.Y Position (Pix.)", lineReport.StraightEdges[0].StraightEdge.End.Y));
                if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Point 1.X Position (World)", lineReport.StraightEdges[0].CalibratedStraightEdge.Start.X));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Point 1.Y Position (World)", lineReport.StraightEdges[0].CalibratedStraightEdge.Start.Y));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Point 2.X Position (World)", lineReport.StraightEdges[0].CalibratedStraightEdge.End.X));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Point 2.Y Position (World)", lineReport.StraightEdges[0].CalibratedStraightEdge.End.Y));
                }

                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Angle", lineReport.StraightEdges[0].Angle));
                if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Angle (World)", lineReport.StraightEdges[0].CalibratedAngle));
                }
            }
            return(lineReport);
        }
Beispiel #9
0
        private static Collection <PointContour> IVA_GetIntersection(VisionImage image,
                                                                     IVA_Data ivaData,
                                                                     int stepIndex,
                                                                     int stepIndex1,
                                                                     int resultIndex1,
                                                                     int stepIndex2,
                                                                     int resultIndex2,
                                                                     int stepIndex3,
                                                                     int resultIndex3,
                                                                     int stepIndex4,
                                                                     int resultIndex4)

        {
            // Caliper: Lines Intersection
            // Computes the intersection point between two lines.
            PointContour point1 = Functions.IVA_GetPoint(ivaData, stepIndex1, resultIndex1);
            PointContour point2 = Functions.IVA_GetPoint(ivaData, stepIndex2, resultIndex2);
            PointContour point3 = Functions.IVA_GetPoint(ivaData, stepIndex3, resultIndex3);
            PointContour point4 = Functions.IVA_GetPoint(ivaData, stepIndex4, resultIndex4);

            LineContour line1 = new LineContour(point1, point2);
            LineContour line2 = new LineContour(point3, point4);

            Collection <PointContour> intersectionPoint = new Collection <PointContour>();

            intersectionPoint.Add(Algorithms.FindIntersectionPoint(line1, line2));

            // Store the results in the data structure.
            ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Intersection Point X Position (Pix.)", intersectionPoint[0].X));
            ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Intersection Point X Position (Pix.)", intersectionPoint[0].Y));

            // If the image is calibrated, compute the real world position.
            if ((image.InfoTypes & InfoTypes.Calibration) != 0)
            {
                CoordinatesReport realWorldPosition = Algorithms.ConvertPixelToRealWorldCoordinates(image, intersectionPoint);
                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Intersection Point X Position (Calibrated)", realWorldPosition.Points[0].X));
                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Intersection Point X Position (Calibrated)", realWorldPosition.Points[0].Y));
                intersectionPoint.Add(realWorldPosition.Points[0]);
            }

            return(intersectionPoint);
        }
        public static PaletteType ProcessImage(VisionImage image)
        {
            // Initialize the IVA_Data structure to pass results and coordinate systems.
            IVA_Data ivaData = new IVA_Data(2, 0);

            // Creates a new, empty region of interest.
            Roi roi = new Roi();
            // Creates a new LineContour using the given values.
            PointContour vaStartPoint = new PointContour(229, 42);
            PointContour vaEndPoint   = new PointContour(229, 298);
            LineContour  vaLine       = new LineContour(vaStartPoint, vaEndPoint);

            roi.Add(vaLine);
            // Edge Detector - Simple Edge
            SimpleEdgeOptions vaSimpleEdgeOptions = new SimpleEdgeOptions();

            vaSimpleEdgeOptions.Process    = EdgeProcess.All;
            vaSimpleEdgeOptions.Type       = LevelType.Absolute;
            vaSimpleEdgeOptions.Threshold  = 128;
            vaSimpleEdgeOptions.Hysteresis = 2;
            vaSimpleEdgeOptions.SubPixel   = true;
            simpleEdges = IVA_SimpleEdge(image, roi, vaSimpleEdgeOptions, ivaData, 0);
            roi.Dispose();

            // Caliper
            // Delete all the results of this step (from a previous iteration)
            Functions.IVA_DisposeStepResults(ivaData, 1);

            // Computes the vaDistance between two points.
            Collection <double> vaDistance = IVA_GetDistance(image, ivaData, 1, 0, 3, 0, 5);

            caliperDistance = vaDistance[0];

            // Dispose the IVA_Data structure.
            ivaData.Dispose();

            // Return the palette type of the final image.
            return(PaletteType.Gray);
        }
        private static FindCircularEdgeReport IVA_FindCircularEdge(VisionImage image,
                                                                   Roi roi,
                                                                   SpokeDirection direction,
                                                                   EdgeOptions options,
                                                                   CircularEdgeFitOptions fitOptions,
                                                                   IVA_Data ivaData,
                                                                   int stepIndex)
        {
            // First, delete all the results of this step (from a previous iteration)
            Functions.IVA_DisposeStepResults(ivaData, stepIndex);
            FindCircularEdgeOptions circleOptions = new FindCircularEdgeOptions(direction);

            circleOptions.EdgeOptions = options;
            FindCircularEdgeReport circleReport = new FindCircularEdgeReport();

            // Calculate the edge locations
            circleReport = Algorithms.FindCircularEdge(image, roi, circleOptions, fitOptions);

            // If a circle was found, add results
            if (circleReport.CircleFound)
            {
                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Center.X Position (Pix.)", circleReport.Center.X));
                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Center.Y Position (Pix.)", circleReport.Center.Y));
                if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Center.X Position (Calibrated)", circleReport.CenterCalibrated.X));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Center.Y Position (Calibrated)", circleReport.CenterCalibrated.Y));
                }
                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Radius (Pix.)", circleReport.Radius));
                if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Radius (Calibrated)", circleReport.RadiusCalibrated));
                }
                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Deviation", circleReport.Roundness));
            }
            return(circleReport);
        }
Beispiel #12
0
        private Collection <PatternMatch> IVA_MatchPattern(VisionImage image,
                                                           IVA_Data ivaData,
                                                           string templatePath,
                                                           MatchMode matchMode,
                                                           bool subpixel,
                                                           int[] angleRangeMin,
                                                           int[] angleRangeMax,
                                                           int matchesRequested,
                                                           double score,
                                                           Roi roi,
                                                           double matchOffset_x,
                                                           double matchOffset_y,
                                                           int stepIndex)
        {
            using (VisionImage imageTemplate = new VisionImage(ImageType.U8, 7))
            {
                int numObjectResults = 4;
                Collection <PatternMatch> patternMatchingResults = new Collection <PatternMatch>();
                CoordinatesReport         realWorldPosition      = new CoordinatesReport();
                PointContour matchLocation = new PointContour();

                // Read the image template.
                imageTemplate.ReadVisionFile(templatePath);
                //Algorithms.LearnPattern(imageTemplate);

                // If the image is calibrated, we also need to log the calibrated position (x and y) -> 6 results instead of 4
                if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                {
                    numObjectResults = 6;
                }

                // Fill in the Pattern Matching options.
                MatchPatternOptions matchPatternOptions = new MatchPatternOptions(matchMode, matchesRequested);
                matchPatternOptions.MinimumContrast  = 0;
                matchPatternOptions.SubpixelAccuracy = subpixel;
                for (int i = 0; i < 2; ++i)
                {
                    matchPatternOptions.RotationAngleRanges.Add(new Range(angleRangeMin[i], angleRangeMax[i]));
                }
                matchPatternOptions.MinimumMatchScore = score;

                // Searches for areas in the image that match a given pattern.
                patternMatchingResults = Algorithms.MatchPattern2(image, imageTemplate, matchPatternOptions, roi);

                // ////////////////////////////////////////
                // Store the results in the data structure.
                // ////////////////////////////////////////

                // First, delete all the results of this step (from a previous iteration)
                Functions.IVA_DisposeStepResults(ivaData, stepIndex);

                if (patternMatchingResults.Count > 0)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("# of objects", patternMatchingResults.Count));

                    for (int i = 0; i < patternMatchingResults.Count; ++i)
                    {
                        // Adjust the match location using the specified offsets.
                        matchLocation.X = patternMatchingResults[i].Position.X + Functions.IVA_ComputePMOffset(matchOffset_x, matchOffset_y, patternMatchingResults[i].Rotation).X;
                        matchLocation.Y = patternMatchingResults[i].Position.Y + Functions.IVA_ComputePMOffset(matchOffset_x, matchOffset_y, patternMatchingResults[i].Rotation).Y;

                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (Pix.)", i + 1), matchLocation.X));
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (Pix.)", i + 1), matchLocation.Y));

                        // If the image is calibrated, convert the pixel values to real world coordinates.
                        if (numObjectResults == 6)
                        {
                            realWorldPosition = Algorithms.ConvertPixelToRealWorldCoordinates(image, new Collection <PointContour>(new PointContour[] { matchLocation }));

                            ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (World)", i + 1), realWorldPosition.Points[0].X));
                            ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (World)", i + 1), realWorldPosition.Points[0].Y));
                        }

                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Angle (degrees)", i + 1), patternMatchingResults[i].Rotation));
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Score", i + 1), patternMatchingResults[i].Score));
                    }
                }

                return(patternMatchingResults);
            }
        }
Beispiel #13
0
        private static Collection <PatternMatchReport> IVA_MatchPattern(VisionImage image,
                                                                        IVA_Data ivaData,
                                                                        string templatePath,
                                                                        MatchingAlgorithm algorithm,
                                                                        float[] angleRangeMin,
                                                                        float[] angleRangeMax,
                                                                        int[] advOptionsItems,
                                                                        double[] advOptionsValues,
                                                                        int numAdvancedOptions,
                                                                        int matchesRequested,
                                                                        float score,
                                                                        Roi roi,
                                                                        int stepIndex)
        {
            FileInformation fileInfo;

            fileInfo = Algorithms.GetFileInformation(templatePath);
            using (VisionImage imageTemplate = new VisionImage(fileInfo.ImageType, 7))
            {
                int numObjectResults = 4;
                Collection <PatternMatchReport> patternMatchingResults = new Collection <PatternMatchReport>();

                // Read the image template.
                imageTemplate.ReadVisionFile(templatePath);

                // If the image is calibrated, we also need to log the calibrated position (x and y) and angle -> 7 results instead of 4
                if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                {
                    numObjectResults = 7;
                }

                // Set the angle range.
                Collection <RotationAngleRange> angleRange = new Collection <RotationAngleRange>();
                for (int i = 0; i < 2; ++i)
                {
                    angleRange.Add(new RotationAngleRange(angleRangeMin[i], angleRangeMax[i]));
                }

                // Set the advanced options.
                Collection <PMMatchAdvancedSetupDataOption> advancedMatchOptions = new Collection <PMMatchAdvancedSetupDataOption>();
                for (int i = 0; i < numAdvancedOptions; ++i)
                {
                    advancedMatchOptions.Add(new PMMatchAdvancedSetupDataOption((MatchSetupOption)advOptionsItems[i], advOptionsValues[i]));
                }

                // Searches for areas in the image that match a given pattern.
                patternMatchingResults = Algorithms.MatchPattern3(image, imageTemplate, algorithm, matchesRequested, score, angleRange, roi, advancedMatchOptions);

                // ////////////////////////////////////////
                // Store the results in the data structure.
                // ////////////////////////////////////////

                // First, delete all the results of this step (from a previous iteration)
                Functions.IVA_DisposeStepResults(ivaData, stepIndex);

                if (patternMatchingResults.Count > 0)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result("# of objects", patternMatchingResults.Count));

                    for (int i = 0; i < patternMatchingResults.Count; ++i)
                    {
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (Pix.)", i + 1), patternMatchingResults[i].Position.X));
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (Pix.)", i + 1), patternMatchingResults[i].Position.Y));

                        // If the image is calibrated, add the calibrated positions.
                        if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                        {
                            ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (World)", i + 1), patternMatchingResults[i].CalibratedPosition.X));
                            ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (World)", i + 1), patternMatchingResults[i].CalibratedPosition.Y));
                        }

                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Angle (degrees)", i + 1), patternMatchingResults[i].Rotation));
                        if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                        {
                            ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Calibrated Angle (degrees)", i + 1), patternMatchingResults[i].CalibratedRotation));
                        }

                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Score", i + 1), patternMatchingResults[i].Score));
                    }
                }

                return(patternMatchingResults);
            }
        }
Beispiel #14
0
        private static void IVA_Particle(VisionImage image,
                                         Connectivity connectivity,
                                         Collection <MeasurementType> pPixelMeasurements,
                                         Collection <MeasurementType> pCalibratedMeasurements,
                                         IVA_Data ivaData,
                                         int stepIndex,
                                         out ParticleMeasurementsReport partReport,
                                         out ParticleMeasurementsReport partReportCal)
        {
            // Computes the requested pixel measurements.
            if (pPixelMeasurements.Count != 0)
            {
                partReport = Algorithms.ParticleMeasurements(image, pPixelMeasurements, connectivity, ParticleMeasurementsCalibrationMode.Pixel);
            }
            else
            {
                partReport = new ParticleMeasurementsReport();
            }

            // Computes the requested calibrated measurements.
            if (pCalibratedMeasurements.Count != 0)
            {
                partReportCal = Algorithms.ParticleMeasurements(image, pCalibratedMeasurements, connectivity, ParticleMeasurementsCalibrationMode.Calibrated);
            }
            else
            {
                partReportCal = new ParticleMeasurementsReport();
            }

            // Computes the center of mass of each particle to log as results.
            ParticleMeasurementsReport   centerOfMass;
            Collection <MeasurementType> centerOfMassMeasurements = new Collection <MeasurementType>();

            centerOfMassMeasurements.Add(MeasurementType.CenterOfMassX);
            centerOfMassMeasurements.Add(MeasurementType.CenterOfMassY);

            if ((image.InfoTypes & InfoTypes.Calibration) != 0)
            {
                centerOfMass = Algorithms.ParticleMeasurements(image, centerOfMassMeasurements, connectivity, ParticleMeasurementsCalibrationMode.Both);
            }
            else
            {
                centerOfMass = Algorithms.ParticleMeasurements(image, centerOfMassMeasurements, connectivity, ParticleMeasurementsCalibrationMode.Pixel);
            }

            // Delete all the results of this step (from a previous iteration)
            Functions.IVA_DisposeStepResults(ivaData, stepIndex);

            ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Object #", centerOfMass.PixelMeasurements.GetLength(0)));

            if (centerOfMass.PixelMeasurements.GetLength(0) > 0)
            {
                for (int i = 0; i < centerOfMass.PixelMeasurements.GetLength(0); ++i)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Particle {0}.X Position (Pix.)", i + 1), centerOfMass.PixelMeasurements[i, 0]));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Particle {0}.Y Position (Pix.)", i + 1), centerOfMass.PixelMeasurements[i, 1]));

                    // If the image is calibrated, also store the real world coordinates.
                    if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                    {
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Particle {0}.X Position (Calibrated)", i + 1), centerOfMass.CalibratedMeasurements[i, 0]));
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Particle {0}.Y Position (Calibrated)", i + 1), centerOfMass.CalibratedMeasurements[i, 1]));
                    }
                }
            }
        }
Beispiel #15
0
        public static PaletteType ProcessImage(VisionImage image, string path, out double[] distance)
        {
            // Initialize the IVA_Data structure to pass results and coordinate systems.
            IVA_Data ivaData = new IVA_Data(17, 1);

            distance = new double[4] {
                0, 0, 0, 0
            };

            // Creates a new, empty region of interest.
            Roi roi = new Roi();
            // Creates a new AnnulusContour using the given values.
            PointContour   vaCenter = new PointContour(1283, 965);
            AnnulusContour vaOval   = new AnnulusContour(vaCenter, 418, 702, 0, 0);

            roi.Add(vaOval);
            // Find Circular Edge
            EdgeOptions vaOptions = new EdgeOptions();

            vaOptions.ColumnProcessingMode = ColumnProcessingMode.Average;
            vaOptions.InterpolationType    = InterpolationMethod.Bilinear;
            vaOptions.KernelSize           = 3;
            vaOptions.MinimumThreshold     = 18;
            vaOptions.Polarity             = EdgePolaritySearchMode.Rising;
            vaOptions.Width = 3;
            CircularEdgeFitOptions vaFitOptions = new CircularEdgeFitOptions();

            vaFitOptions.ProcessType    = RakeProcessType.GetFirstEdges;
            vaFitOptions.StepSize       = 7;
            vaFitOptions.MaxPixelRadius = 3;

            vaCircularEdgeReport = IVA_FindCircularEdge(image, roi, SpokeDirection.InsideToOutside, vaOptions, vaFitOptions, ivaData, 1);

            roi.Dispose();

            // Set Coordinate System
            int             vaCoordSystemIndex    = 0;
            int             stepIndexOrigin       = 1;
            int             resultIndexOrigin     = 0;
            int             stepIndexAngle        = -1;
            int             resultIndexAngle      = 0;
            double          refSysOriginX         = vaCircularEdgeReport.Center.X;
            double          refSysOriginY         = vaCircularEdgeReport.Center.Y;
            double          refSysAngle           = 0;
            AxisOrientation refSysAxisOrientation = AxisOrientation.Direct;
            int             vaCoordSystemType     = 0;

            IVA_CoordSys(vaCoordSystemIndex, stepIndexOrigin, resultIndexOrigin, stepIndexAngle, resultIndexAngle, refSysOriginX, refSysOriginY, refSysAngle, refSysAxisOrientation, vaCoordSystemType, ivaData);

            // Image Buffer: Push
            Functions.IVA_PushBuffer(ivaData, image, 0);

            // Get Image
            string          vaFilePath = path;
            FileInformation vaFileInfo = Algorithms.GetFileInformation(vaFilePath);

            // Set the image size to 0 to speed up the cast.
            //image.SetSize(0, 0);
            //image.Type = vaFileInfo.ImageType;
            //image.BitDepth = 0;
            image.ReadFile(vaFilePath);

            switch (image.Type)
            {
            case ImageType.I16:
            case ImageType.U16:
                if (image.BitDepth == 0 & false)
                {
                    image.BitDepth = 10;
                }
                break;

            default:
                break;
            }
            // Operators: Absolute Difference Image
            Algorithms.AbsoluteDifference(image, Functions.IVA_GetBuffer(ivaData, 0), image);

            // Creates a new, empty region of interest.
            Roi roi2 = new Roi();
            // Creates a new AnnulusContour using the given values.
            PointContour   vaCenter2 = new PointContour(vaCircularEdgeReport.Center.X, vaCircularEdgeReport.Center.Y);
            AnnulusContour vaOval2   = new AnnulusContour(vaCenter2, 527, 846, 0, 0);

            roi2.Add(vaOval2);
            // Reposition the region of interest based on the coordinate system.
            int coordSystemIndex = 0;

            Algorithms.TransformRoi(roi2, new CoordinateTransform(ivaData.baseCoordinateSystems[coordSystemIndex], ivaData.MeasurementSystems[coordSystemIndex]));
            // Mask from ROI
            IVA_Mask_From_ROI(image, roi2, false, false);
            roi2.Dispose();

            // Color Threshold
            Range plane1Range = new Range(0, 60);
            Range plane2Range = new Range(0, 50);
            Range plane3Range = new Range(0, 255);

            using (VisionImage thresholdImage = new VisionImage(ImageType.U8, 7))
            {
                Algorithms.ColorThreshold(image, thresholdImage, ColorMode.Rgb, 1, plane1Range, plane2Range, plane3Range);
                Algorithms.Copy(thresholdImage, image);
            }

            // Truncates the frequencies of an image.
            IVA_FFT_Truncate(image, TruncateMode.High, 7);

            // Advanced Morphology: Remove Objects
            int[] vaCoefficients            = { 1, 1, 1, 1, 1, 1, 1, 1, 1 };
            StructuringElement vaStructElem = new StructuringElement(3, 3, vaCoefficients);

            vaStructElem.Shape = StructuringElementShape.Square;
            // Filters particles based on their size.
            Algorithms.RemoveParticle(image, image, 30, SizeToKeep.KeepLarge, Connectivity.Connectivity8, vaStructElem);

            // Invert Binary Image.
            IVA_BinaryInverse(image);

            // Advanced Morphology: Remove Objects
            int[] vaCoefficients2            = { 1, 1, 1, 1, 1, 1, 1, 1, 1 };
            StructuringElement vaStructElem2 = new StructuringElement(3, 3, vaCoefficients2);

            vaStructElem.Shape = StructuringElementShape.Square;
            // Filters particles based on their size.
            Algorithms.RemoveParticle(image, image, 5, SizeToKeep.KeepLarge, Connectivity.Connectivity8, vaStructElem2);

            // Basic Morphology - Applies morphological transformations to binary images.
            int[] vaCoefficients3            = { 0, 1, 0, 1, 1, 1, 0, 1, 0 };
            StructuringElement vaStructElem3 = new StructuringElement(3, 3, vaCoefficients3);

            vaStructElem.Shape = StructuringElementShape.Square;
            // Applies morphological transformations
            for (int i = 0; i < 3; ++i)
            {
                Algorithms.Morphology(image, image, MorphologyMethod.Erode, vaStructElem3);
            }

            // Advanced Morphology: Fill Holes
            VisionImage image1 = new VisionImage();

            Algorithms.FillHoles(image, image1, Connectivity.Connectivity8);

            // Particle Analysis - Computes the number of particles detected in a binary image and
            // returns the requested measurements about the particles.
            Collection <MeasurementType> vaPixelMeasurements      = new Collection <MeasurementType>(new MeasurementType[] { MeasurementType.Area });
            Collection <MeasurementType> vaCalibratedMeasurements = new Collection <MeasurementType>(new MeasurementType[] { });

            IVA_Particle(image1, Connectivity.Connectivity8, vaPixelMeasurements, vaCalibratedMeasurements, ivaData, 16, out vaParticleReport, out vaParticleReportCalibrated);
            double[,] area = vaParticleReport.PixelMeasurements;
            double Maxarea = 0;

            for (int i = 0; i < area.GetLength(0); i++)
            {
                for (int j = 0; j < area.GetLength(1); j++)
                {
                    if (area[i, j] > Maxarea)
                    {
                        Maxarea = area[i, j];
                    }
                }
            }
            image1.Dispose();

            if (Maxarea > 1000000)
            {
                // Creates a new, empty region of interest.
                Roi roi3 = new Roi();
                // Creates a new AnnulusContour using the given values.
                PointContour   vaCenter3 = new PointContour(1295, 963);
                AnnulusContour vaOval3   = new AnnulusContour(vaCenter3, 496, 892, 0, 0);
                roi3.Add(vaOval3);
                // Reposition the region of interest based on the coordinate system.
                int coordSystemIndex2 = 0;
                Algorithms.TransformRoi(roi3, new CoordinateTransform(ivaData.baseCoordinateSystems[coordSystemIndex2], ivaData.MeasurementSystems[coordSystemIndex2]));
                // Extract the contour edges from the image
                CurveParameters            vaCurveParams         = new CurveParameters(ExtractionMode.NormalImage, 1, EdgeFilterSize.ContourTracing, 30, 20, 10, true);
                double[]                   vaConstraintMinArray  = { };
                double[]                   vaConstraintMaxArray  = { };
                ConnectionConstraintType[] vaConstraintTypeArray = { };
                ExtractContourReport       vaExtractReport       = IVA_ExtractContour(image, roi3, ExtractContourDirection.AnnulusOuterInner, vaCurveParams, vaConstraintTypeArray, vaConstraintMinArray, vaConstraintMaxArray, ExtractContourSelection.Closest);
                // Fit a circle to the contour
                ContourOverlaySettings vaEquationOverlay = new ContourOverlaySettings(true, Rgb32Value.GreenColor, 1, true);
                ContourOverlaySettings vaPointsOverlay   = new ContourOverlaySettings(true, Rgb32Value.RedColor, 1, true);
                PartialCircle          vaCircleReport    = Algorithms.ContourFitCircle(image, 100, true);
                Algorithms.ContourOverlay(image, image, vaPointsOverlay, vaEquationOverlay);
                ComputeDistanceReport vaDistanceReport = Algorithms.ContourComputeDistances(image, image, 0);

                MaxDistance      = 0;
                MaxDistanceIndex = 0;
                for (int i = 0; i < vaDistanceReport.Distances.Count; i++)
                {
                    if (vaDistanceReport.Distances[i].Distance > MaxDistance)
                    {
                        MaxDistance      = vaDistanceReport.Distances[i].Distance;
                        MaxDistanceIndex = i;
                    }
                }
                var pos = vaDistanceReport.Distances[MaxDistanceIndex];
                distance[0] = MaxDistance;

                roi3.Dispose();

                // Creates a new, empty region of interest.
                Roi roi4 = new Roi();
                // Creates a new AnnulusContour using the given values.
                PointContour   vaCenter4 = new PointContour(1294, 962);
                AnnulusContour vaOval4   = new AnnulusContour(vaCenter4, 499, 885, 0, 0);
                roi4.Add(vaOval4);
                // Reposition the region of interest based on the coordinate system.
                int coordSystemIndex3 = 0;
                Algorithms.TransformRoi(roi4, new CoordinateTransform(ivaData.baseCoordinateSystems[coordSystemIndex3], ivaData.MeasurementSystems[coordSystemIndex3]));
                // Extract the contour edges from the image
                CurveParameters            vaCurveParams2         = new CurveParameters(ExtractionMode.NormalImage, 1, EdgeFilterSize.ContourTracing, 30, 25, 10, true);
                double[]                   vaConstraintMinArray2  = { };
                double[]                   vaConstraintMaxArray2  = { };
                ConnectionConstraintType[] vaConstraintTypeArray2 = { };
                ExtractContourReport       vaExtractReport2       = IVA_ExtractContour(image, roi4, ExtractContourDirection.AnnulusInnerOuter, vaCurveParams2, vaConstraintTypeArray2, vaConstraintMinArray2, vaConstraintMaxArray2, ExtractContourSelection.Closest);
                // Fit a circle to the contour
                ContourOverlaySettings vaEquationOverlay2 = new ContourOverlaySettings(true, Rgb32Value.GreenColor, 1, true);
                ContourOverlaySettings vaPointsOverlay2   = new ContourOverlaySettings(true, Rgb32Value.RedColor, 1, true);
                PartialCircle          vaCircleReport2    = Algorithms.ContourFitCircle(image, 100, true);
                Algorithms.ContourOverlay(image, image, vaPointsOverlay2, vaEquationOverlay2);
                ComputeDistanceReport vaDistanceReport2 = Algorithms.ContourComputeDistances(image, image, 0);

                MaxDistance1      = 0;
                MaxDistanceIndex1 = 0;
                for (int i = 0; i < vaDistanceReport2.Distances.Count; i++)
                {
                    if (vaDistanceReport2.Distances[i].Distance > MaxDistance1)
                    {
                        MaxDistance1      = vaDistanceReport2.Distances[i].Distance;
                        MaxDistanceIndex1 = i;
                    }
                }
                var pos1 = vaDistanceReport2.Distances[MaxDistanceIndex1];
                distance[1] = MaxDistance1;
                distance[2] = (vaCircleReport2.Center.X - vaCircularEdgeReport.Center.X) / 96;
                distance[3] = (vaCircleReport2.Center.Y - vaCircularEdgeReport.Center.Y) / 96;
                roi4.Dispose();
            }
            else
            {
                distance[0] = 9999;
                distance[1] = 9999;
                distance[2] = 9999;
                distance[3] = 9999;
            }

            // Dispose the IVA_Data structure.
            ivaData.Dispose();
            if (path == $"{ @"./ImageTemp/temp.jpg"}")
            {
                image.Dispose();
            }

            // Return the palette type of the final image.
            return(PaletteType.Binary);
        }
Beispiel #16
0
        public string ProcessDatamatrix(VisionImage image, TemplateConfig templateConfig, List <DataMatrixConfig> dataMatrixConfigs)
        {
            string dataMatrixInfo = string.Empty;
            // Initialize the IVA_Data structure to pass results and coordinate systems.
            IVA_Data ivaData = new IVA_Data(3, 1);

            // Creates a new, empty region of interest.
            Roi roi = new Roi();
            //// Creates a new RotatedRectangleContour using the given values.
            //RotatedRectangleContour vaRotatedRect = new RotatedRectangleContour(vaCenter, 1268, 1220, 0);
            // Creates a new RectangleContour using the given values.
            RectangleContour vaRotatedRect = new RectangleContour(templateConfig.Rectangle.Left,
                                                                  templateConfig.Rectangle.Top, templateConfig.Rectangle.Width, templateConfig.Rectangle.Height);

            roi.Add(vaRotatedRect);
            image.Overlays.Default.AddRoi(roi);
            // MatchPattern Grayscale
            MatchMode vaMode        = MatchMode.RotationInvariant;
            bool      vaSubpixelVal = false;

            int[]  minAngleVals          = { -30, 0 };
            int[]  maxAngleVals          = { 30, 0 };
            int    vaNumMatchesRequested = 1;
            double vaMinMatchScore       = 800;
            double vaOffsetX             = 0;
            double vaOffsetY             = 0;

            pmResults = IVA_MatchPattern(image, ivaData, templateConfig.TemplatePath, vaMode, vaSubpixelVal,
                                         minAngleVals, maxAngleVals, vaNumMatchesRequested, vaMinMatchScore, roi, vaOffsetX, vaOffsetY, 0);

            foreach (PatternMatch match in pmResults)
            {
                image.Overlays.Default.AddPolygon(new PolygonContour(match.Corners), Rgb32Value.RedColor);
            }
            roi.Dispose();

            // Set Coordinate System
            int             vaCoordSystemIndex    = 0;
            int             stepIndexOrigin       = 0;
            int             resultIndexOrigin     = 1;
            int             stepIndexAngle        = 0;
            int             resultIndexAngle      = 3;
            double          refSysOriginX         = templateConfig.Position.X;
            double          refSysOriginY         = templateConfig.Position.Y;
            double          refSysAngle           = 0;
            AxisOrientation refSysAxisOrientation = AxisOrientation.Direct;
            int             vaCoordSystemType     = 3;

            IVA_CoordSys(vaCoordSystemIndex, stepIndexOrigin, resultIndexOrigin, stepIndexAngle,
                         resultIndexAngle, refSysOriginX, refSysOriginY, refSysAngle, refSysAxisOrientation, vaCoordSystemType, ivaData);

            for (int i = 0; i < dataMatrixConfigs.Count; i++)
            {
                // Creates a new, empty region of interest.
                Roi roiDM = new Roi();
                // Creates a new RectangleContour using the given values.
                RectangleContour vaRect = new RectangleContour(dataMatrixConfigs[i].Rectangle.Left,
                                                               dataMatrixConfigs[i].Rectangle.Top, dataMatrixConfigs[i].Rectangle.Width, dataMatrixConfigs[i].Rectangle.Height);

                roiDM.Add(vaRect);

                // Reposition the region of interest based on the coordinate system.
                int coordSystemIndex = 0;
                Algorithms.TransformRoi(roiDM, new CoordinateTransform(ivaData.baseCoordinateSystems[coordSystemIndex], ivaData.MeasurementSystems[coordSystemIndex]));
                image.Overlays.Default.AddRoi(roiDM);

                // Read DataMatrix Barcode
                DataMatrixDescriptionOptions vaDescriptionOptions = new DataMatrixDescriptionOptions();
                vaDescriptionOptions.AspectRatio = 0;
                vaDescriptionOptions.CellFill    = DataMatrixCellFillMode.AutoDetect;
                uint matrixSizeColumns = 25;
                uint.TryParse(dataMatrixConfigs[i].MatrixSize.Split('X')[1], out matrixSizeColumns);
                vaDescriptionOptions.Columns = matrixSizeColumns;
                vaDescriptionOptions.MinimumBorderIntegrity = 90;
                vaDescriptionOptions.MirrorMode             = DataMatrixMirrorMode.AutoDetect;
                vaDescriptionOptions.Polarity  = dataMatrixConfigs[0].Polarity;
                vaDescriptionOptions.Rectangle = false;
                uint matrixSizeRows = 25;
                uint.TryParse(dataMatrixConfigs[i].MatrixSize.Split('X')[0], out matrixSizeRows);
                vaDescriptionOptions.Rows = matrixSizeRows;

                DataMatrixSizeOptions vaSizeOptions = new DataMatrixSizeOptions();
                vaSizeOptions.MaximumSize    = 250;
                vaSizeOptions.MinimumSize    = 50;
                vaSizeOptions.QuietZoneWidth = 0;

                DataMatrixSearchOptions vaSearchOptions = new DataMatrixSearchOptions();
                vaSearchOptions.CellFilterMode           = DataMatrixCellFilterMode.AutoDetect;
                vaSearchOptions.CellSampleSize           = dataMatrixConfigs[0].CellSize;
                vaSearchOptions.DemodulationMode         = DataMatrixDemodulationMode.AutoDetect;
                vaSearchOptions.EdgeThreshold            = 30;
                vaSearchOptions.InitialSearchVectorWidth = 5;
                vaSearchOptions.MaximumIterations        = 150;
                vaSearchOptions.RotationMode             = DataMatrixRotationMode.Unlimited;
                vaSearchOptions.SkewDegreesAllowed       = 5;
                vaSearchOptions.SkipLocation             = false;

                // Reads the data matrix from the image.
                vaDataMatrixReport = Algorithms.ReadDataMatrixBarcode(image, roiDM, DataMatrixGradingMode.None,
                                                                      vaDescriptionOptions, vaSizeOptions, vaSearchOptions);

                if (vaDataMatrixReport.Found)
                {
                    image.Overlays.Default.AddPolygon(new PolygonContour(vaDataMatrixReport.Corners),
                                                      Rgb32Value.RedColor, DrawingMode.DrawValue);
                }
                dataMatrixInfo += string.Format("{0},", vaDataMatrixReport.StringData);
                roiDM.Dispose();
            }
            dataMatrixInfo = dataMatrixInfo.Substring(0, dataMatrixInfo.Length - 1);
            // Dispose the IVA_Data structure.
            ivaData.Dispose();

            // Return the palette type of the final image.
            return(dataMatrixInfo);
        }
Beispiel #17
0
        /// <summary>
        /// Process QR code with coordinate
        /// </summary>
        /// <param name="image"></param>
        /// <param name="userProgram"></param>
        /// <returns>split with ','</returns>
        public string ProcessQRCoordinate(VisionImage image, UserProgram userProgram)
        {
            string          qRInfo         = string.Empty;
            TemplateConfig  templateConfig = userProgram.TemplateConfig;
            List <QRConfig> qRConfigs      = userProgram.QRConfigs;
            // Initialize the IVA_Data structure to pass results and coordinate systems.
            IVA_Data ivaData = new IVA_Data(3, 1);

            // Creates a new, empty region of interest.
            Roi roiFullRange = new Roi();
            // Creates a new RotatedRectangleContour using the given values.
            PointContour            vaCenter      = new PointContour(1405.5, 954);
            RotatedRectangleContour vaRotatedRect = new RotatedRectangleContour(vaCenter, 1661, 1184, 0);
            RectangleContour        rectangle     = new RectangleContour(templateConfig.Rectangle.Left, templateConfig.Rectangle.Top,
                                                                         templateConfig.Rectangle.Width, templateConfig.Rectangle.Height);

            roiFullRange.Add(rectangle);
            // MatchPattern Grayscale
            string    vaTemplateFile = templateConfig.TemplatePath;
            MatchMode vaMode         = MatchMode.RotationInvariant;
            bool      vaSubpixelVal  = false;

            int[]  minAngleVals          = { -20, 0 };
            int[]  maxAngleVals          = { 20, 0 };
            int    vaNumMatchesRequested = 1;
            double vaMinMatchScore       = 800;
            double vaOffsetX             = 0;
            double vaOffsetY             = 0;

            pmResults = IVA_MatchPattern(image, ivaData, vaTemplateFile, vaMode, vaSubpixelVal,
                                         minAngleVals, maxAngleVals, vaNumMatchesRequested, vaMinMatchScore, roiFullRange, vaOffsetX, vaOffsetY, 0);
            if (pmResults.Count < 1)
            {
                return(string.Empty);
            }
            foreach (PatternMatch match in pmResults)
            {
                image.Overlays.Default.AddPolygon(new PolygonContour(match.Corners), Rgb32Value.RedColor);
            }
            roiFullRange.Dispose();

            // Set Coordinate System
            int             vaCoordSystemIndex    = 0;
            int             stepIndexOrigin       = 0;
            int             resultIndexOrigin     = 1;
            int             stepIndexAngle        = 0;
            int             resultIndexAngle      = 3;
            double          refSysOriginX         = templateConfig.Position.X;
            double          refSysOriginY         = templateConfig.Position.Y;
            double          refSysAngle           = 0;
            AxisOrientation refSysAxisOrientation = AxisOrientation.Direct;
            int             vaCoordSystemType     = 3;

            IVA_CoordSys(vaCoordSystemIndex, stepIndexOrigin, resultIndexOrigin, stepIndexAngle,
                         resultIndexAngle, refSysOriginX, refSysOriginY, refSysAngle, refSysAxisOrientation, vaCoordSystemType, ivaData);

            for (int i = 0; i < qRConfigs.Count; i++)
            {
                // Creates a new, empty region of interest.
                Roi roi = new Roi();
                // Creates a new RectangleContour using the given values.
                RectangleContour vaRect = new RectangleContour(qRConfigs[i].Rectangle.Left,
                                                               qRConfigs[i].Rectangle.Top, qRConfigs[i].Rectangle.Width, qRConfigs[i].Rectangle.Height);
                roi.Add(vaRect);
                // Reposition the region of interest based on the coordinate system.
                int coordSystemIndex = 0;
                Algorithms.TransformRoi(roi, new CoordinateTransform(ivaData.baseCoordinateSystems[coordSystemIndex],
                                                                     ivaData.MeasurementSystems[coordSystemIndex]));
                // Read QR Code
                QRDescriptionOptions vaQROptions = new QRDescriptionOptions();
                vaQROptions.Dimensions = qRConfigs[i].QRDimension;
                vaQROptions.MirrorMode = QRMirrorMode.AutoDetect;
                vaQROptions.ModelType  = QRModelType.AutoDetect;
                vaQROptions.Polarity   = qRConfigs[i].Polarity;
                QRSizeOptions   vaQRSizeOptions   = new QRSizeOptions(3, 15);
                QRSearchOptions vaQRSearchOptions = new QRSearchOptions();
                vaQRSearchOptions.CellFilterMode     = QRCellFilterMode.AutoDetect;
                vaQRSearchOptions.CellSampleSize     = qRConfigs[i].CellSize;
                vaQRSearchOptions.DemodulationMode   = QRDemodulationMode.AutoDetect;
                vaQRSearchOptions.EdgeThreshold      = 30;
                vaQRSearchOptions.RotationMode       = QRRotationMode.Unlimited;
                vaQRSearchOptions.SkewDegreesAllowed = 5;
                vaQRSearchOptions.SkipLocation       = false;
                vaQRCode = Algorithms.ReadQRCode(image, roi, vaQROptions, vaQRSizeOptions, vaQRSearchOptions);

                if (vaQRCode.Found)
                {
                    image.Overlays.Default.AddPolygon(new PolygonContour(vaQRCode.Corners), Rgb32Value.RedColor, DrawingMode.DrawValue);
                }

                System.Text.ASCIIEncoding vaASCIIEncoding = new System.Text.ASCIIEncoding();
                vaQRCodeData = vaASCIIEncoding.GetString(vaQRCode.GetData());
                qRInfo      += string.Format("{0},", vaQRCodeData);
                roi.Dispose();
            }
            if (!string.IsNullOrEmpty(qRInfo))
            {
                qRInfo = qRInfo.Substring(0, qRInfo.Length - 1);
            }
            // Dispose the IVA_Data structure.
            ivaData.Dispose();
            // Return the palette type of the final image.
            return(qRInfo);
        }
Beispiel #18
0
        private static Collection <GeometricEdgeBasedPatternMatch> IVA_MatchGeometricPattern2(VisionImage image,
                                                                                              string templatePath,
                                                                                              CurveOptions curveOptions,
                                                                                              MatchGeometricPatternEdgeBasedOptions matchOptions,
                                                                                              IVA_Data ivaData,
                                                                                              int stepIndex,
                                                                                              Roi roi)
        {
            // Geometric Matching (Edge Based)

            // Creates the image template.
            using (VisionImage imageTemplate = new VisionImage(ImageType.U8, 7))
            {
                // Read the image template.
                imageTemplate.ReadVisionFile(templatePath);

                Collection <GeometricEdgeBasedPatternMatch> gpmResults = Algorithms.MatchGeometricPatternEdgeBased(image, imageTemplate, curveOptions, matchOptions, roi);

                // Store the results in the data structure.

                // First, delete all the results of this step (from a previous iteration)
                Functions.IVA_DisposeStepResults(ivaData, stepIndex);

                ivaData.stepResults[stepIndex].results.Add(new IVA_Result("# Matches", gpmResults.Count));

                for (int i = 0; i < gpmResults.Count; ++i)
                {
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (Pix.)", i + 1), gpmResults[i].Position.X));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (Pix.)", i + 1), gpmResults[i].Position.Y));

                    // If the image is calibrated, log the calibrated results.
                    if ((image.InfoTypes & InfoTypes.Calibration) != 0)
                    {
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (World)", i + 1), gpmResults[i].CalibratedPosition.X));
                        ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (World)", i + 1), gpmResults[i].CalibratedPosition.Y));
                    }

                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Angle (degrees)", i + 1), gpmResults[i].Rotation));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Scale", i + 1), gpmResults[i].Scale));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Score", i + 1), gpmResults[i].Score));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Occlusion", i + 1), gpmResults[i].Occlusion));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Template Target Curve Score", i + 1), gpmResults[i].TemplateMatchCurveScore));
                    ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Correlation Score", i + 1), gpmResults[i].CorrelationScore));
                }

                return(gpmResults);
            }
        }
Beispiel #19
0
        public string Process1DBarcode(VisionImage image, TemplateConfig templateConfig, List <BarcodeConfig> barcodeConfigs)
        {
            // Initialize the IVA_Data structure to pass results and coordinate systems.
            IVA_Data ivaData = new IVA_Data(5, 1);

            // Creates a new, empty region of interest.
            Roi roi = new Roi();

            // Creates a new RotatedRectangleContour using the given values.
            //PointContour vaCenter = new PointContour(1294, 972);
            //RotatedRectangleContour vaRotatedRect = new RotatedRectangleContour(vaCenter, 2548, 1904, 0);

            // Creates a new RectangleContour using the given values.
            RectangleContour vaRotatedRect = new RectangleContour(templateConfig.Rectangle.Left,
                                                                  templateConfig.Rectangle.Top, templateConfig.Rectangle.Width, templateConfig.Rectangle.Height);

            roi.Add(vaRotatedRect);

            image.Overlays.Default.AddRoi(roi);

            // MatchPattern Grayscale
            MatchMode vaMode        = MatchMode.RotationInvariant;
            bool      vaSubpixelVal = false;

            int[]  minAngleVals          = { -30, 0 };
            int[]  maxAngleVals          = { 30, 0 };
            int    vaNumMatchesRequested = 1;
            double vaMinMatchScore       = 800;
            double vaOffsetX             = 0;
            double vaOffsetY             = 0;

            pmResults = IVA_MatchPattern(image, ivaData, templateConfig.TemplatePath, vaMode, vaSubpixelVal,
                                         minAngleVals, maxAngleVals, vaNumMatchesRequested, vaMinMatchScore, roi, vaOffsetX, vaOffsetY, 0);

            foreach (PatternMatch match in pmResults)
            {
                image.Overlays.Default.AddPolygon(new PolygonContour(match.Corners), Rgb32Value.RedColor);
            }
            roi.Dispose();
            // Set Coordinate System
            int             vaCoordSystemIndex    = 0;
            int             stepIndexOrigin       = 0;
            int             resultIndexOrigin     = 1;
            int             stepIndexAngle        = 0;
            int             resultIndexAngle      = 3;
            double          refSysOriginX         = templateConfig.Position.X;
            double          refSysOriginY         = templateConfig.Position.Y;
            double          refSysAngle           = 0;
            AxisOrientation refSysAxisOrientation = AxisOrientation.Direct;
            int             vaCoordSystemType     = 3;

            IVA_CoordSys(vaCoordSystemIndex, stepIndexOrigin, resultIndexOrigin, stepIndexAngle,
                         resultIndexAngle, refSysOriginX, refSysOriginY, refSysAngle, refSysAxisOrientation, vaCoordSystemType, ivaData);

            string barcodeInfo = "";

            for (int i = 0; i < barcodeConfigs.Count; i++)
            {
                Roi roiBarcode          = new Roi();
                RectangleContour vaRect = new RectangleContour(barcodeConfigs[i].Rectangle.Left,
                                                               barcodeConfigs[i].Rectangle.Top, barcodeConfigs[i].Rectangle.Width, barcodeConfigs[i].Rectangle.Height);
                roiBarcode.Add(vaRect);
                // Reposition the region of interest based on the coordinate system.
                int coordSystemIndex = 0;
                Algorithms.TransformRoi(roiBarcode, new CoordinateTransform(ivaData.baseCoordinateSystems[coordSystemIndex],
                                                                            ivaData.MeasurementSystems[coordSystemIndex]));
                image.Overlays.Default.AddRoi(roiBarcode);
                // Reads the barcode from the image.
                BarcodeReport vaBarcode = Algorithms.ReadBarcode(image, barcodeConfigs[i].Type, roiBarcode, false);

                barcodeInfo += string.Format("{0},", vaBarcode.Text);

                roiBarcode.Dispose();
            }
            barcodeInfo = barcodeInfo.Substring(0, barcodeInfo.Length - 1);
            // Dispose the IVA_Data structure.
            ivaData.Dispose();
            return(barcodeInfo);
        }
Beispiel #20
0
        public static PaletteType RectLeftPos(VisionImage image, Point pointoffset)
        {
            // Initialize the IVA_Data structure to pass results and coordinate systems.
            IVA_Data ivaData = new IVA_Data(12, 1);

            // Extract Color Plane
            using (VisionImage plane = new VisionImage(ImageType.U8, 7))
            {
                // Extract the red color plane and copy it to the main image.
                Algorithms.ExtractColorPlanes(image, ColorMode.Rgb, plane, null, null);
                Algorithms.Copy(plane, image);
            }

            // Creates a new, empty region of interest.
            Roi roi = new Roi();
            // Creates a new RectangleContour using the given values.
            RectangleContour vaRect = new RectangleContour(630, 1313, 1073, 416);

            roi.Add(vaRect);
            // MatchPattern Grayscale
            string            dicpath        = System.Windows.Forms.Application.StartupPath;
            string            vaTemplateFile = dicpath + $"{ @"/ImageConfig/LeftRectPos.png"}";
            MatchingAlgorithm matchAlgorithm = MatchingAlgorithm.MatchGrayValuePyramid;

            float[]  minAngleVals          = { -10, 0 };
            float[]  maxAngleVals          = { 10, 0 };
            int[]    advancedOptionsItems  = { 100, 102, 106, 107, 108, 109, 114, 116, 117, 118, 111, 112, 113, 103, 104, 105 };
            double[] advancedOptionsValues = { 5, 10, 300, 0, 6, 1, 25, 0, 0, 0, 20, 10, 20, 1, 20, 0 };
            int      numberAdvOptions      = 16;
            int      vaNumMatchesRequested = 1;
            float    vaMinMatchScore       = 700;

            pmResults = IVA_MatchPattern(image, ivaData, vaTemplateFile, matchAlgorithm, minAngleVals, maxAngleVals, advancedOptionsItems, advancedOptionsValues, numberAdvOptions, vaNumMatchesRequested, vaMinMatchScore, roi, 2);
            roi.Dispose();

            if (pmResults.Count == 1)
            {
                // Set Coordinate System
                int             vaCoordSystemIndex    = 0;
                int             stepIndexOrigin       = 2;
                int             resultIndexOrigin     = 1;
                int             stepIndexAngle        = 2;
                int             resultIndexAngle      = 3;
                double          refSysOriginX         = 1160.5;
                double          refSysOriginY         = 1500.5;
                double          refSysAngle           = 0;
                AxisOrientation refSysAxisOrientation = AxisOrientation.Direct;
                int             vaCoordSystemType     = 3;
                IVA_CoordSys(vaCoordSystemIndex, stepIndexOrigin, resultIndexOrigin, stepIndexAngle, resultIndexAngle, refSysOriginX, refSysOriginY, refSysAngle, refSysAxisOrientation, vaCoordSystemType, ivaData);

                // Creates a new, empty region of interest.
                Roi roi2 = new Roi();
                // Creates a new RotatedRectangleContour using the given values.
                PointContour            vaCenter      = new PointContour(789, 965.5);
                RotatedRectangleContour vaRotatedRect = new RotatedRectangleContour(vaCenter, 72, 1119, 0);
                roi2.Add(vaRotatedRect);
                // Reposition the region of interest based on the coordinate system.
                int coordSystemIndex = 0;
                Algorithms.TransformRoi(roi2, new CoordinateTransform(ivaData.baseCoordinateSystems[coordSystemIndex], ivaData.MeasurementSystems[coordSystemIndex]));
                // Find Straight Edge
                EdgeOptions vaOptions = new EdgeOptions();
                vaOptions.ColumnProcessingMode = ColumnProcessingMode.Average;
                vaOptions.InterpolationType    = InterpolationMethod.Bilinear;
                vaOptions.KernelSize           = 9;
                vaOptions.MinimumThreshold     = Position.Instance.EdgeThreshold_Left;
                vaOptions.Polarity             = EdgePolaritySearchMode.Falling;
                vaOptions.Width = 5;
                StraightEdgeOptions vaStraightEdgeOptions = new StraightEdgeOptions();
                vaStraightEdgeOptions.AngleRange                = 45;
                vaStraightEdgeOptions.AngleTolerance            = 1;
                vaStraightEdgeOptions.HoughIterations           = 5;
                vaStraightEdgeOptions.MinimumCoverage           = 25;
                vaStraightEdgeOptions.MinimumSignalToNoiseRatio = 0;
                vaStraightEdgeOptions.NumberOfLines             = 1;
                vaStraightEdgeOptions.Orientation               = 0;
                Range vaRange = new Range(0, 1000);
                vaStraightEdgeOptions.ScoreRange = vaRange;
                vaStraightEdgeOptions.StepSize   = 20;
                vaStraightEdgeOptions.SearchMode = StraightEdgeSearchMode.FirstRakeEdges;

                vaEdgeReport = IVA_FindEdge(image, roi2, RakeDirection.LeftToRight, vaOptions, vaStraightEdgeOptions, ivaData, 4);

                roi2.Dispose();

                // Creates a new, empty region of interest.
                Roi roi3 = new Roi();
                // Creates a new RotatedRectangleContour using the given values.
                PointContour            vaCenter2      = new PointContour(1162.5, 263);
                RotatedRectangleContour vaRotatedRect2 = new RotatedRectangleContour(vaCenter2, 595, 78, 0);
                roi3.Add(vaRotatedRect2);
                // Reposition the region of interest based on the coordinate system.
                int coordSystemIndex2 = 0;
                Algorithms.TransformRoi(roi3, new CoordinateTransform(ivaData.baseCoordinateSystems[coordSystemIndex2], ivaData.MeasurementSystems[coordSystemIndex2]));
                // Find Straight Edge
                EdgeOptions vaOptions2 = new EdgeOptions();
                vaOptions2.ColumnProcessingMode = ColumnProcessingMode.Average;
                vaOptions2.InterpolationType    = InterpolationMethod.Bilinear;
                vaOptions2.KernelSize           = 9;
                vaOptions2.MinimumThreshold     = Position.Instance.EdgeThreshold_Left;
                vaOptions2.Polarity             = EdgePolaritySearchMode.Falling;
                vaOptions2.Width = 9;
                StraightEdgeOptions vaStraightEdgeOptions2 = new StraightEdgeOptions();
                vaStraightEdgeOptions2.AngleRange                = 45;
                vaStraightEdgeOptions2.AngleTolerance            = 1;
                vaStraightEdgeOptions2.HoughIterations           = 5;
                vaStraightEdgeOptions2.MinimumCoverage           = 25;
                vaStraightEdgeOptions2.MinimumSignalToNoiseRatio = 0;
                vaStraightEdgeOptions2.NumberOfLines             = 1;
                vaStraightEdgeOptions2.Orientation               = 0;
                Range vaRange2 = new Range(0, 1000);
                vaStraightEdgeOptions2.ScoreRange = vaRange2;
                vaStraightEdgeOptions2.StepSize   = 20;
                vaStraightEdgeOptions2.SearchMode = StraightEdgeSearchMode.FirstRakeEdges;

                vaEdgeReport2 = IVA_FindEdge(image, roi3, RakeDirection.TopToBottom, vaOptions2, vaStraightEdgeOptions2, ivaData, 5);

                roi3.Dispose();

                // Creates a new, empty region of interest.
                Roi roi4 = new Roi();
                // Creates a new RotatedRectangleContour using the given values.
                PointContour            vaCenter3      = new PointContour(1530, 968.5);
                RotatedRectangleContour vaRotatedRect3 = new RotatedRectangleContour(vaCenter3, 78, 1137, 0);
                roi4.Add(vaRotatedRect3);
                // Reposition the region of interest based on the coordinate system.
                int coordSystemIndex3 = 0;
                Algorithms.TransformRoi(roi4, new CoordinateTransform(ivaData.baseCoordinateSystems[coordSystemIndex3], ivaData.MeasurementSystems[coordSystemIndex3]));
                // Find Straight Edge
                EdgeOptions vaOptions3 = new EdgeOptions();
                vaOptions3.ColumnProcessingMode = ColumnProcessingMode.Average;
                vaOptions3.InterpolationType    = InterpolationMethod.Bilinear;
                vaOptions3.KernelSize           = 9;
                vaOptions3.MinimumThreshold     = Position.Instance.EdgeThreshold_Left;
                vaOptions3.Polarity             = EdgePolaritySearchMode.Falling;
                vaOptions3.Width = 9;
                StraightEdgeOptions vaStraightEdgeOptions3 = new StraightEdgeOptions();
                vaStraightEdgeOptions3.AngleRange                = 45;
                vaStraightEdgeOptions3.AngleTolerance            = 1;
                vaStraightEdgeOptions3.HoughIterations           = 5;
                vaStraightEdgeOptions3.MinimumCoverage           = 25;
                vaStraightEdgeOptions3.MinimumSignalToNoiseRatio = 0;
                vaStraightEdgeOptions3.NumberOfLines             = 1;
                vaStraightEdgeOptions3.Orientation               = 0;
                Range vaRange3 = new Range(0, 1000);
                vaStraightEdgeOptions3.ScoreRange = vaRange3;
                vaStraightEdgeOptions3.StepSize   = 20;
                vaStraightEdgeOptions3.SearchMode = StraightEdgeSearchMode.FirstRakeEdges;

                vaEdgeReport3 = IVA_FindEdge(image, roi4, RakeDirection.RightToLeft, vaOptions3, vaStraightEdgeOptions3, ivaData, 6);

                roi4.Dispose();

                // Creates a new, empty region of interest.
                Roi roi5 = new Roi();
                // Creates a new RotatedRectangleContour using the given values.
                PointContour            vaCenter4      = new PointContour(1171.5, 1691.5);
                RotatedRectangleContour vaRotatedRect4 = new RotatedRectangleContour(vaCenter4, 543, 75, 0);
                roi5.Add(vaRotatedRect4);
                // Reposition the region of interest based on the coordinate system.
                int coordSystemIndex4 = 0;
                Algorithms.TransformRoi(roi5, new CoordinateTransform(ivaData.baseCoordinateSystems[coordSystemIndex4], ivaData.MeasurementSystems[coordSystemIndex4]));
                // Find Straight Edge
                EdgeOptions vaOptions4 = new EdgeOptions();
                vaOptions4.ColumnProcessingMode = ColumnProcessingMode.Average;
                vaOptions4.InterpolationType    = InterpolationMethod.Bilinear;
                vaOptions4.KernelSize           = 11;
                vaOptions4.MinimumThreshold     = Position.Instance.EdgeThreshold_Left;
                vaOptions4.Polarity             = EdgePolaritySearchMode.Falling;
                vaOptions4.Width = 9;
                StraightEdgeOptions vaStraightEdgeOptions4 = new StraightEdgeOptions();
                vaStraightEdgeOptions4.AngleRange                = 45;
                vaStraightEdgeOptions4.AngleTolerance            = 1;
                vaStraightEdgeOptions4.HoughIterations           = 5;
                vaStraightEdgeOptions4.MinimumCoverage           = 25;
                vaStraightEdgeOptions4.MinimumSignalToNoiseRatio = 0;
                vaStraightEdgeOptions4.NumberOfLines             = 1;
                vaStraightEdgeOptions4.Orientation               = 0;
                Range vaRange4 = new Range(0, 1000);
                vaStraightEdgeOptions4.ScoreRange = vaRange4;
                vaStraightEdgeOptions4.StepSize   = 20;
                vaStraightEdgeOptions4.SearchMode = StraightEdgeSearchMode.FirstRakeEdges;

                vaEdgeReport4 = IVA_FindEdge(image, roi5, RakeDirection.BottomToTop, vaOptions4, vaStraightEdgeOptions4, ivaData, 7);

                roi5.Dispose();

                // Caliper
                // Delete all the results of this step (from a previous iteration)
                Functions.IVA_DisposeStepResults(ivaData, 8);

                // Computes the vaIntersection point between two lines.
                Collection <PointContour> vaIntersection = IVA_GetIntersection(image, ivaData, 8, 5, 0, 5, 2, 6, 0, 6, 2);
                caliperIntersection = vaIntersection[0];

                // Caliper
                // Delete all the results of this step (from a previous iteration)
                Functions.IVA_DisposeStepResults(ivaData, 9);

                // Computes the vaIntersection point between two lines.
                Collection <PointContour> vaIntersection2 = IVA_GetIntersection(image, ivaData, 9, 6, 0, 6, 2, 7, 0, 7, 2);
                caliperIntersection2 = vaIntersection2[0];

                // Caliper
                // Delete all the results of this step (from a previous iteration)
                Functions.IVA_DisposeStepResults(ivaData, 10);

                // Computes the vaIntersection point between two lines.
                Collection <PointContour> vaIntersection3 = IVA_GetIntersection(image, ivaData, 10, 4, 0, 4, 2, 7, 0, 7, 2);
                caliperIntersection3 = vaIntersection3[0];

                // Caliper
                // Delete all the results of this step (from a previous iteration)
                Functions.IVA_DisposeStepResults(ivaData, 11);

                // Computes the vaIntersection point between two lines.
                Collection <PointContour> vaIntersection4 = IVA_GetIntersection(image, ivaData, 11, 4, 0, 4, 2, 5, 0, 5, 2);
                caliperIntersection4 = vaIntersection4[0];

                //计算每个角的偏差
                string str1 = Math.Round((-caliperIntersection.X - pointoffset.X + Position.Instance.SpecLeftPos_X[0]) / 96, 3).ToString() + ";" + Math.Round((-caliperIntersection.Y - pointoffset.Y + Position.Instance.SpecLeftPos_Y[0]) / 96, 3).ToString() + ";";
                string str2 = Math.Round((-caliperIntersection2.X - pointoffset.X + Position.Instance.SpecLeftPos_X[1]) / 96, 3).ToString() + ";" + Math.Round((-caliperIntersection2.Y - pointoffset.Y + Position.Instance.SpecLeftPos_Y[1]) / 96, 3).ToString() + ";";
                string str3 = Math.Round((-caliperIntersection3.X - pointoffset.X + Position.Instance.SpecLeftPos_X[2]) / 96, 3).ToString() + ";" + Math.Round((-caliperIntersection3.Y - pointoffset.Y + Position.Instance.SpecLeftPos_Y[2]) / 96, 3).ToString() + ";";
                string str4 = Math.Round((-caliperIntersection4.X - pointoffset.X + Position.Instance.SpecLeftPos_X[3]) / 96, 3).ToString() + ";" + Math.Round((-caliperIntersection4.Y - pointoffset.Y + Position.Instance.SpecLeftPos_Y[3]) / 96, 3).ToString();
                LeftCali       = str1 + str2 + str3 + str4;
                LeftCaliArrary = new string[] { str1, str2, str3, str4 };
            }
            else
            {
                LeftCali       = "0;0;0;0;0;0;0;0";
                LeftCaliArrary = new string[] { "0;0", "0;0", "0;0", "0;0" };
            }

            // Dispose the IVA_Data structure.
            ivaData.Dispose();

            // Return the palette type of the final image.
            return(PaletteType.Gray);
        }