public static PaletteType ProcessImage(VisionImage image) { // Initialize the IVA_Data structure to pass results and coordinate systems. IVA_Data ivaData = new IVA_Data(3, 0); // Extract Color Plane using (VisionImage plane = new VisionImage(ImageType.U8, 7)) { // Extract the green color plane and copy it to the main image. Algorithms.ExtractColorPlanes(image, ColorMode.Rgb, null, plane, null); Algorithms.Copy(plane, image); } // Creates a new, empty region of interest. Roi roi = new Roi(); // Creates a new RectangleContour using the given values. RectangleContour vaRect = new RectangleContour(20, 20, 2552, 1904); roi.Add(vaRect); // Geometric Matching string vaTemplateFile = $"{ @"./VisionModel/Polygon_20.5M/Mode.png"}"; CurveOptions vaCurveOptions = new CurveOptions(); vaCurveOptions.ColumnStepSize = 15; vaCurveOptions.ExtractionMode = ExtractionMode.NormalImage; vaCurveOptions.FilterSize = EdgeFilterSize.Normal; vaCurveOptions.MaximumEndPointGap = 10; vaCurveOptions.MinimumLength = 20; vaCurveOptions.RowStepSize = 15; vaCurveOptions.Threshold = 145; MatchGeometricPatternEdgeBasedOptions matchGPMOptions = new MatchGeometricPatternEdgeBasedOptions(); matchGPMOptions.Advanced.ContrastMode = ContrastMode.Original; matchGPMOptions.Advanced.MatchStrategy = GeometricMatchingSearchStrategy.Balanced; matchGPMOptions.MinimumMatchScore = 800; matchGPMOptions.Mode = GeometricMatchModes.RotationInvariant; matchGPMOptions.NumberOfMatchesRequested = 1; double[] vaRangesMin = { -20, 0, 50, 0 }; double[] vaRangesMax = { 20, 0, 200, 50 }; matchGPMOptions.OcclusionRange = new Range(vaRangesMin[3], vaRangesMax[3]); matchGPMOptions.RotationAngleRanges.Add(new Range(vaRangesMin[0], vaRangesMax[0])); matchGPMOptions.RotationAngleRanges.Add(new Range(vaRangesMin[1], vaRangesMax[1])); matchGPMOptions.ScaleRange = new Range(vaRangesMin[2], vaRangesMax[2]); matchGPMOptions.SubpixelAccuracy = true; gpm2Results = IVA_MatchGeometricPattern2(image, vaTemplateFile, vaCurveOptions, matchGPMOptions, ivaData, 2, roi); roi.Dispose(); // Dispose the IVA_Data structure. ivaData.Dispose(); // Return the palette type of the final image. return(PaletteType.Gray); }
public static void Init() { if (MatchOptions == null) { MatchOptions = new MatchGeometricPatternEdgeBasedOptions(); //[Description("Original:原始 Reversed:反转 Both:全")] //[DisplayName("对比模式")] MatchOptions.Advanced.ContrastMode = ContrastMode.Original; //[Description("Balanced:平衡 Conservative:保守 Aggressive进取")] //[DisplayName("几何匹配搜索策略")] MatchOptions.Advanced.MatchStrategy = GeometricMatchingSearchStrategy.Balanced; MatchOptions.MinimumMatchScore = 500; /* * 匹配策略 * 角度范围 0-360 * 缩放范围 90-110 * 遮挡范围 0-25 */ MatchOptions.Mode = GeometricMatchModes.RotationInvariant | GeometricMatchModes.ScaleInvariant | GeometricMatchModes.OcclusionInvariant; MatchOptions.NumberOfMatchesRequested = 1; MatchOptions.RotationAngleRanges.Add(new Range(-20, 20)); MatchOptions.ScaleRange = new Range(90, 110); MatchOptions.OcclusionRange = new Range(0, 25); // 压像素精度 MatchOptions.SubpixelAccuracy = true; } if (LearnOptions == null) { LearnOptions = new LearnGeometricPatternEdgeBasedAdvancedOptions(); LearnOptions.ImageSamplingFactor = 2; // 图像采样因子 LearnOptions.RotationAngleRange = new Range(0, 360); // 设置模板的旋转角度范围 LearnOptions.ScaleRange = new Range(90, 110); // 设置模板的缩放范围 } }
private static Collection <GeometricEdgeBasedPatternMatch> IVA_MatchGeometricPattern2(VisionImage image, string templatePath, CurveOptions curveOptions, MatchGeometricPatternEdgeBasedOptions matchOptions, IVA_Data ivaData, int stepIndex, Roi roi) { // Geometric Matching (Edge Based) // Creates the image template. using (VisionImage imageTemplate = new VisionImage(ImageType.U8, 7)) { // Read the image template. imageTemplate.ReadVisionFile(templatePath); Collection <GeometricEdgeBasedPatternMatch> gpmResults = Algorithms.MatchGeometricPatternEdgeBased(image, imageTemplate, curveOptions, matchOptions, roi); // Store the results in the data structure. // First, delete all the results of this step (from a previous iteration) Functions.IVA_DisposeStepResults(ivaData, stepIndex); ivaData.stepResults[stepIndex].results.Add(new IVA_Result("# Matches", gpmResults.Count)); for (int i = 0; i < gpmResults.Count; ++i) { ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (Pix.)", i + 1), gpmResults[i].Position.X)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (Pix.)", i + 1), gpmResults[i].Position.Y)); // If the image is calibrated, log the calibrated results. if ((image.InfoTypes & InfoTypes.Calibration) != 0) { ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (World)", i + 1), gpmResults[i].CalibratedPosition.X)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (World)", i + 1), gpmResults[i].CalibratedPosition.Y)); } ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Angle (degrees)", i + 1), gpmResults[i].Rotation)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Scale", i + 1), gpmResults[i].Scale)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Score", i + 1), gpmResults[i].Score)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Occlusion", i + 1), gpmResults[i].Occlusion)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Template Target Curve Score", i + 1), gpmResults[i].TemplateMatchCurveScore)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Correlation Score", i + 1), gpmResults[i].CorrelationScore)); } return(gpmResults); } }