private static Collection <double> IVA_GetDistance(VisionImage image, IVA_Data ivaData, int stepIndex, int stepIndex1, int resultIndex1, int stepIndex2, int resultIndex2) { Collection <PointContour> points = new Collection <PointContour>(); points.Add(Functions.IVA_GetPoint(ivaData, stepIndex1, resultIndex1)); points.Add(Functions.IVA_GetPoint(ivaData, stepIndex2, resultIndex2)); // Computes the distance between the points. Collection <double> distances = Algorithms.FindPointDistances(points); // Store the results in the data structure. ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Distance (Pix.)", distances[0])); // If the image is calibrated, compute the real world distance. if ((image.InfoTypes & InfoTypes.Calibration) != 0) { CoordinatesReport realWorldPosition = Algorithms.ConvertPixelToRealWorldCoordinates(image, points); Collection <double> calibratedDistance = Algorithms.FindPointDistances(realWorldPosition.Points); ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Distance (Calibrated)", calibratedDistance[0])); distances.Add(calibratedDistance[0]); } return(distances); }
private static Collection <PointContour> IVA_SimpleEdge(VisionImage image, Roi roi, SimpleEdgeOptions simpleEdgeOptions, IVA_Data ivaData, int stepIndex) { // Calculates the profile of the pixels along the edge of each contour in the region of interest. using (VisionImage monoImage = new VisionImage(ImageType.U8, 7)) { if (image.Type == ImageType.Rgb32 || image.Type == ImageType.Hsl32) { Algorithms.ExtractColorPlanes(image, ColorMode.Hsl, null, null, monoImage); } else { Algorithms.Copy(image, monoImage); } RoiProfileReport roiProfile = Algorithms.RoiProfile(monoImage, roi); // Finds prominent edges along the array of pixel coordinates. Collection <PointContour> edges = Algorithms.SimpleEdge(monoImage, roiProfile.Pixels, simpleEdgeOptions); // Store the results in the data structure. // First, delete all the results of this step (from a previous iteration) Functions.IVA_DisposeStepResults(ivaData, stepIndex); ivaData.stepResults[stepIndex].results.Add(new IVA_Result("# of Edges", edges.Count)); for (int i = 0; i < edges.Count; ++i) { ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Edge {0}.X Position (Pix.)", i + 1), edges[i].X)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Edge {0}.Y Position (Pix.)", i + 1), edges[i].Y)); // If the image is calibrated, convert the pixel values to real world coordinates. if ((image.InfoTypes & InfoTypes.Calibration) != 0) { PointContour edgeLocation = new PointContour(edges[i].X, edges[i].Y); CoordinatesReport realWorldPosition = Algorithms.ConvertPixelToRealWorldCoordinates(image, new Collection <PointContour>(new PointContour[] { edgeLocation })); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Edge {0}.X Position (World)", i + 1), realWorldPosition.Points[0].X)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Edge {0}.Y Position (World)", i + 1), realWorldPosition.Points[0].Y)); } } return(edges); } }
private static Collection <PointContour> IVA_GetIntersection(VisionImage image, IVA_Data ivaData, int stepIndex, int stepIndex1, int resultIndex1, int stepIndex2, int resultIndex2, int stepIndex3, int resultIndex3, int stepIndex4, int resultIndex4) { // Caliper: Lines Intersection // Computes the intersection point between two lines. PointContour point1 = Functions.IVA_GetPoint(ivaData, stepIndex1, resultIndex1); PointContour point2 = Functions.IVA_GetPoint(ivaData, stepIndex2, resultIndex2); PointContour point3 = Functions.IVA_GetPoint(ivaData, stepIndex3, resultIndex3); PointContour point4 = Functions.IVA_GetPoint(ivaData, stepIndex4, resultIndex4); LineContour line1 = new LineContour(point1, point2); LineContour line2 = new LineContour(point3, point4); Collection <PointContour> intersectionPoint = new Collection <PointContour>(); intersectionPoint.Add(Algorithms.FindIntersectionPoint(line1, line2)); // Store the results in the data structure. ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Intersection Point X Position (Pix.)", intersectionPoint[0].X)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Intersection Point X Position (Pix.)", intersectionPoint[0].Y)); // If the image is calibrated, compute the real world position. if ((image.InfoTypes & InfoTypes.Calibration) != 0) { CoordinatesReport realWorldPosition = Algorithms.ConvertPixelToRealWorldCoordinates(image, intersectionPoint); ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Intersection Point X Position (Calibrated)", realWorldPosition.Points[0].X)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result("Intersection Point X Position (Calibrated)", realWorldPosition.Points[0].Y)); intersectionPoint.Add(realWorldPosition.Points[0]); } return(intersectionPoint); }
private Collection <PatternMatch> IVA_MatchPattern(VisionImage image, IVA_Data ivaData, string templatePath, MatchMode matchMode, bool subpixel, int[] angleRangeMin, int[] angleRangeMax, int matchesRequested, double score, Roi roi, double matchOffset_x, double matchOffset_y, int stepIndex) { using (VisionImage imageTemplate = new VisionImage(ImageType.U8, 7)) { int numObjectResults = 4; Collection <PatternMatch> patternMatchingResults = new Collection <PatternMatch>(); CoordinatesReport realWorldPosition = new CoordinatesReport(); PointContour matchLocation = new PointContour(); // Read the image template. imageTemplate.ReadVisionFile(templatePath); //Algorithms.LearnPattern(imageTemplate); // If the image is calibrated, we also need to log the calibrated position (x and y) -> 6 results instead of 4 if ((image.InfoTypes & InfoTypes.Calibration) != 0) { numObjectResults = 6; } // Fill in the Pattern Matching options. MatchPatternOptions matchPatternOptions = new MatchPatternOptions(matchMode, matchesRequested); matchPatternOptions.MinimumContrast = 0; matchPatternOptions.SubpixelAccuracy = subpixel; for (int i = 0; i < 2; ++i) { matchPatternOptions.RotationAngleRanges.Add(new Range(angleRangeMin[i], angleRangeMax[i])); } matchPatternOptions.MinimumMatchScore = score; // Searches for areas in the image that match a given pattern. patternMatchingResults = Algorithms.MatchPattern2(image, imageTemplate, matchPatternOptions, roi); // //////////////////////////////////////// // Store the results in the data structure. // //////////////////////////////////////// // First, delete all the results of this step (from a previous iteration) Functions.IVA_DisposeStepResults(ivaData, stepIndex); if (patternMatchingResults.Count > 0) { ivaData.stepResults[stepIndex].results.Add(new IVA_Result("# of objects", patternMatchingResults.Count)); for (int i = 0; i < patternMatchingResults.Count; ++i) { // Adjust the match location using the specified offsets. matchLocation.X = patternMatchingResults[i].Position.X + Functions.IVA_ComputePMOffset(matchOffset_x, matchOffset_y, patternMatchingResults[i].Rotation).X; matchLocation.Y = patternMatchingResults[i].Position.Y + Functions.IVA_ComputePMOffset(matchOffset_x, matchOffset_y, patternMatchingResults[i].Rotation).Y; ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (Pix.)", i + 1), matchLocation.X)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (Pix.)", i + 1), matchLocation.Y)); // If the image is calibrated, convert the pixel values to real world coordinates. if (numObjectResults == 6) { realWorldPosition = Algorithms.ConvertPixelToRealWorldCoordinates(image, new Collection <PointContour>(new PointContour[] { matchLocation })); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (World)", i + 1), realWorldPosition.Points[0].X)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (World)", i + 1), realWorldPosition.Points[0].Y)); } ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Angle (degrees)", i + 1), patternMatchingResults[i].Rotation)); ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Score", i + 1), patternMatchingResults[i].Score)); } } return(patternMatchingResults); } }