/// <summary> /// Crop out the central region of the screen (convenience function) /// </summary> /// <param name="diameter"></param> /// <returns></returns> public Mat ScreenCentre(int diameter) { Bitmap s = bitmap; OpenCvSharp.Point centre = new OpenCvSharp.Point(s.Width, s.Height); Rectangle screenCentre = new Rectangle((int)((s.Width - diameter) * 0.5), (int)((s.Height - diameter) * 0.5), diameter, diameter); Bitmap image = CompassSensor.Crop(s, screenCentre); return(BitmapConverter.ToMat(image)); }
public static void MatchCorona() { Bitmap screen = new Bitmap("Screenshot_0028.bmp"); Bitmap cropped = CompassSensor.Crop(screen, screen.Width * 1 / 3, screen.Height * 1 / 3, screen.Width * 2 / 3, screen.Height * 2 / 3); Mat screenwhole = BitmapConverter.ToMat(cropped); // erase the vivid areas, otherwise the blur subtraction turns yellow near red to green Mat brightHSV = screenwhole.CvtColor(ColorConversionCodes.BGR2HSV); Mat darkAreasMask = brightHSV.InRange(InputArray.Create(new int[] { 0, 0, 0 }), InputArray.Create(new int[] { 180, 255, 180 })); Mat darkAreas = new Mat(); screenwhole.CopyTo(darkAreas, darkAreasMask); Mat screenblur = darkAreas - darkAreas.Blur(new OpenCvSharp.Size(10, 10)); Window w3 = new Window(screenblur); //screenblur.SaveImage("sharplines.png"); Mat sourceHSV = screenblur.CvtColor(ColorConversionCodes.BGR2HSV); /* Paint.Net uses HSV [0..360], [0..100], [0..100]. * OpenCV uses H: 0 - 180, S: 0 - 255, V: 0 - 255 * Paint.NET colors: * 73 100 18 brightest part of green edge * 72 98 9 very dark green * suggested range [70..180], [80..100], [8..100] (paint.net) * suggested range [35..90], [204..255], [20..255] (openCV) * */ Mat mask = sourceHSV.InRange(InputArray.Create(new int[] { 35, 204, 20 }), InputArray.Create(new int[] { 90, 255, 255 })); Mat sourceHSVFiltered = new Mat(); sourceHSV.CopyTo(sourceHSVFiltered, mask); Window w5 = new Window("yellowfilter", sourceHSVFiltered.CvtColor(ColorConversionCodes.HSV2BGR)); Mat sourceGrey = sourceHSVFiltered.Split()[2].InRange(32, 256); // Value channel is pretty good as a greyscale conversion Window w6 = new Window("yellowFilterValue", sourceGrey); LineSegmentPoint[] result = sourceGrey.HoughLinesP(1, 3.1415 / 180, 5, 10, 2); List <Point2d> points = new List <Point2d>(); foreach (var line in result) { points.Add(line.P1); points.Add(line.P2); darkAreas.Line(line.P1, line.P2, new Scalar(255, 0, 255)); } CircleSegment c = CruiseSensor.ComputeCircle(points); darkAreas.Circle(c.Center, (int)c.Radius, new Scalar(255, 255, 0)); Window w9 = new Window("final", darkAreas); }
public bool MatchSafDisengag() { // MatchTemplate doesn't allow for scaling / rotation. Allow more leeway by reducing resolution? Bitmap image = CompassSensor.Crop(screen.bitmap, new Rectangle(800, 650, 300, 200)); Mat source = BitmapConverter.ToMat(image); Mat blues = source.Split()[0]; Mat clean = blues.EmptyClone(); clean.SetTo(0); // make sure the matrix is blank. blues.CopyTo(clean, blues.InRange(250, 255)); Mat matches = clean.MatchTemplate(templatesaf, TemplateMatchModes.CCoeffNormed); double minVal, maxVal; matches.MinMaxLoc(out minVal, out maxVal); return(maxVal > 0.4); // see experiments, MatchSafDisengag2 }
public static void MatchSafDisengag2() { Bitmap screen = new Bitmap("Screenshot_0022.bmp"); Bitmap image = CompassSensor.Crop(screen, new Rectangle(800, 700, 350, 150)); Mat source = BitmapConverter.ToMat(image); Mat blues = source.Split()[0]; Mat clean = blues.EmptyClone(); clean.SetTo(0); // make sure the matrix is blank. blues.CopyTo(clean, blues.InRange(250, 255)); Mat matches = clean.MatchTemplate(new Mat("res3/safdisengag250.png", ImreadModes.GrayScale), TemplateMatchModes.CCoeffNormed); clean.ImWrite("safdisengag250.png"); double minVal, maxVal; matches.MinMaxLoc(out minVal, out maxVal); Window w2 = new Window(clean); Window w3 = new Window(matches); Window w5 = new Window(matches.InRange(0.4, 1)); }
/// <summary> /// See if the IMPACT warning is being displayed /// </summary> public bool MatchImpact() { Bitmap cropped = CompassSensor.Crop(screen.bitmap, screen.bitmap.Width - 400, 0, screen.bitmap.Width - 100, 300); Mat screenarea = BitmapConverter.ToMat(cropped); Mat red = IsolateRed(screenarea); Mat template = new Mat("res3/impacttemplate.png", ImreadModes.GrayScale); Mat result = new Mat(red.Size(), red.Type()); Cv2.MatchTemplate(red, template, result, TemplateMatchModes.CCoeffNormed); double minVal, maxVal; OpenCvSharp.Point minLoc, maxLoc; result.MinMaxLoc(out minVal, out maxVal, out minLoc, out maxLoc); if (maxVal > 0.4) { debugWindow.Image = CompassSensor.Crop(BitmapConverter.ToBitmap(red), maxLoc.X, maxLoc.Y, maxLoc.X + template.Width, maxLoc.Y + template.Height); return(true); } return(false); }