Пример #1
0
        public bool EmergencyDrop()
        {
            //Top Left: 814x298
            //Size: 295x104
            int start_x = 700;
            int start_y = 200;

            Bitmap cropped    = CompassSensor.Crop(screen.bitmap, start_x, start_y, start_x + 400, start_y + 300);
            Mat    screenarea = BitmapConverter.ToMat(cropped);
            Mat    yellow     = IsolateYellow(screenarea);

            Mat template = new Mat("res3/estop.png", ImreadModes.GrayScale);
            Mat result   = new Mat(yellow.Size(), yellow.Type());

            Cv2.MatchTemplate(yellow, template, result, TemplateMatchModes.CCoeffNormed);
            double minVal, maxVal;

            OpenCvSharp.Point minLoc, maxLoc;
            result.MinMaxLoc(out minVal, out maxVal, out minLoc, out maxLoc);
            if (maxVal > 0.7)
            {
                debugWindow.Image = CompassSensor.Crop(BitmapConverter.ToBitmap(yellow), maxLoc.X, maxLoc.Y, maxLoc.X + template.Width, maxLoc.Y + template.Height);
                return(true);
            }
            return(false);
        }
Пример #2
0
        /// <summary>
        /// If we're fueling, see if our tank is full
        /// </summary>
        public bool FuelComplete()
        {
            //Top Left: 1034x254
            //Size: 115x40
            int start_x = 1034;
            int start_y = 254;

            Bitmap cropped    = CompassSensor.Crop(screen.bitmap, start_x, start_y, start_x + 300, start_y + 100);
            Mat    screenarea = BitmapConverter.ToMat(cropped);
            Mat    yellow     = IsolateYellow(screenarea);

            Mat template = new Mat("res3/fuel_full.png", ImreadModes.GrayScale);
            Mat result   = new Mat(yellow.Size(), yellow.Type());

            Cv2.MatchTemplate(yellow, template, result, TemplateMatchModes.CCoeffNormed);
            double minVal, maxVal;

            OpenCvSharp.Point minLoc, maxLoc;
            result.MinMaxLoc(out minVal, out maxVal, out minLoc, out maxLoc);

            //Console.WriteLine(string.Format("Match fuel capacity: minVal {0}, maxVal {1}", minVal, maxVal));
            //debugWindow.Image = CompassSensor.Crop(BitmapConverter.ToBitmap(yellow), maxLoc.X, maxLoc.Y, maxLoc.X + template.Width, maxLoc.Y + template.Height);
            //debugWindow2.Image = BitmapConverter.ToBitmap(template);
            if (maxVal > 0.8)
            {
                //Console.WriteLine(string.Format("Match fuel full is true: {0}", maxVal));
                debugWindow.Image = CompassSensor.Crop(BitmapConverter.ToBitmap(yellow), maxLoc.X, maxLoc.Y, maxLoc.X + template.Width, maxLoc.Y + template.Height);
                return(true);
            }

            return(false);
        }
Пример #3
0
        //The current location is locked if the "locked target" icon is overriding the blue "current location" i
        public bool CurrentLocationLocked()
        {
            Bitmap cropped    = CompassSensor.Crop(screen.bitmap, 460, 220, 1300, 800);
            Mat    screenarea = BitmapConverter.ToMat(cropped);

            Mat[] channels = screenarea.Split();
            Mat   blue     = channels[0];

            Mat template = new Mat("res3/current_location.png", ImreadModes.GrayScale);
            Mat result   = new Mat(blue.Size(), blue.Type());

            Cv2.MatchTemplate(blue, template, result, TemplateMatchModes.CCoeffNormed);

            double minVal, maxVal;

            OpenCvSharp.Point minLoc, maxLoc;
            result.MinMaxLoc(out minVal, out maxVal, out minLoc, out maxLoc);
            Console.WriteLine(string.Format("Current location lock maxval: {0}", maxVal));

            if (maxVal > 0.9)
            {
                //It's still showing up and therefore not locked.
                return(false);
            }
            debugWindow.Image = CompassSensor.Crop(BitmapConverter.ToBitmap(blue), maxLoc.X, maxLoc.Y, maxLoc.X + template.Width, maxLoc.Y + template.Height);
            return(true);
        }
Пример #4
0
        /// <summary>
        /// See if the FUEL SCOOPING notification is being displayed
        /// </summary>
        public bool MatchScooping()
        {
            //Top left corner: 843x73
            //Size: 196x17
            //Bitmap cropped = CompassSensor.Crop(screen.bitmap, screen.bitmap.Width - 400, 0, screen.bitmap.Width - 100, 300);
            int    start_x    = 740;
            int    start_y    = 50;
            Bitmap cropped    = CompassSensor.Crop(screen.bitmap, start_x, start_y, start_x + 400, start_y + 60);
            Mat    screenarea = BitmapConverter.ToMat(cropped);
            Mat    yellow     = IsolateYellow(screenarea);

            Mat template = new Mat("res3/scoop_active.png", ImreadModes.GrayScale);
            Mat result   = new Mat(yellow.Size(), yellow.Type());

            Cv2.MatchTemplate(yellow, template, result, TemplateMatchModes.CCoeffNormed);
            double minVal, maxVal;

            OpenCvSharp.Point minLoc, maxLoc;
            result.MinMaxLoc(out minVal, out maxVal, out minLoc, out maxLoc);

            Console.WriteLine(string.Format("Match scooping: minVal {0}, maxVal {1}", minVal, maxVal));
            //debugWindow.Image = CompassSensor.Crop(BitmapConverter.ToBitmap(yellow), maxLoc.X, maxLoc.Y, maxLoc.X + template.Width, maxLoc.Y + template.Height);
            //debugWindow2.Image = BitmapConverter.ToBitmap(template);
            if (maxVal > 0.4)
            {
                Console.WriteLine(string.Format("Match scooping is true: {0}", maxVal));
                debugWindow.Image = CompassSensor.Crop(BitmapConverter.ToBitmap(yellow), maxLoc.X, maxLoc.Y, maxLoc.X + template.Width, maxLoc.Y + template.Height);
                return(true);
            }

            Console.WriteLine("Match scooping is not true");
            return(false);
        }
Пример #5
0
        /// <summary>
        /// Try to match (part of) a large green circle on the screen.
        /// </summary>
        public CircleSegment FindCorona()
        {
            // see the Experiments for how this works
            Bitmap cropped = CompassSensor.Crop(screen.bitmap,
                                                screen.bitmap.Width * 1 / 3,
                                                screen.bitmap.Height * 1 / 3,
                                                screen.bitmap.Width * 2 / 3,
                                                screen.bitmap.Height * 2 / 3);
            Mat screenwhole = BitmapConverter.ToMat(cropped);

            Point2f ShipPointerOffset = new Point2f(0, 0);

            try
            {
                ShipPointerOffset = FindShipPointer(IsolateYellow(screenwhole));
            }
            catch (Exception)
            {
                // If we can't find the ship pointer (it's hard to see it against the sun) then use the middle of the screen.
            }

            // erase the vivid areas, otherwise the blur subtraction turns yellow near red to green
            Mat brightHSV     = screenwhole.CvtColor(ColorConversionCodes.BGR2HSV);
            Mat darkAreasMask = brightHSV.InRange(InputArray.Create(new int[] { 0, 0, 0 }), InputArray.Create(new int[] { 180, 255, 180 }));
            Mat darkAreas     = new Mat();

            screenwhole.CopyTo(darkAreas, darkAreasMask);

            Mat screenblur        = darkAreas - darkAreas.Blur(new OpenCvSharp.Size(10, 10));
            Mat sourceHSV         = screenblur.CvtColor(ColorConversionCodes.BGR2HSV);
            Mat mask              = sourceHSV.InRange(InputArray.Create(new int[] { 35, 204, 20 }), InputArray.Create(new int[] { 90, 255, 255 }));
            Mat sourceHSVFiltered = new Mat();

            sourceHSV.CopyTo(sourceHSVFiltered, mask);
            Mat sourceGrey = sourceHSVFiltered.Split()[2].InRange(32, 256);

            LineSegmentPoint[] result = sourceGrey.HoughLinesP(1, 3.1415 / 180, 5, 10, 2);
            List <Point2d>     points = new List <Point2d>();

            foreach (var line in result)
            {
                points.Add(line.P1);
                points.Add(line.P2);
            }
            if (points.Count < 8)
            {
                throw new ArgumentException("Not enough points in corona circle");
            }
            CircleSegment c = ComputeCircle(points);

            sourceGrey.Line(c.Center, ShipPointerOffset, 255);
            c.Center -= ShipPointerOffset; // adjust for camera movement by taking ship pointer offset
            sourceGrey.Circle(c.Center, (int)c.Radius, 255);
            debugWindow.Image = BitmapConverter.ToBitmap(sourceGrey);
            return(c);
        }
Пример #6
0
 public static void MatchJumpEnd()
 {
     Bitmap screen       = new Bitmap("Screenshot_0029.bmp");
     var    d            = 30;
     Bitmap image        = CompassSensor.Crop(screen, new Rectangle(screen.Width / 2 - d, screen.Height / 2 - d, d * 2, d * 2));
     Mat    screencentre = BitmapConverter.ToMat(image);
     Window w1           = new Window(screencentre);
     Mat    hsv          = screencentre.CvtColor(ColorConversionCodes.BGR2HSV);
     var    x            = hsv.Mean();
 }
Пример #7
0
        /// <summary>
        /// Crop out the central region of the screen (convenience function)
        /// </summary>
        /// <param name="diameter"></param>
        /// <returns></returns>
        public Mat ScreenCentre(int diameter)
        {
            Bitmap s = bitmap;

            OpenCvSharp.Point centre       = new OpenCvSharp.Point(s.Width, s.Height);
            Rectangle         screenCentre = new Rectangle((int)((s.Width - diameter) * 0.5), (int)((s.Height - diameter) * 0.5), diameter, diameter);
            Bitmap            image        = CompassSensor.Crop(s, screenCentre);

            return(BitmapConverter.ToMat(image));
        }
Пример #8
0
        public static void MatchCorona()
        {
            Bitmap screen      = new Bitmap("Screenshot_0028.bmp");
            Bitmap cropped     = CompassSensor.Crop(screen, screen.Width * 1 / 3, screen.Height * 1 / 3, screen.Width * 2 / 3, screen.Height * 2 / 3);
            Mat    screenwhole = BitmapConverter.ToMat(cropped);

            // erase the vivid areas, otherwise the blur subtraction turns yellow near red to green
            Mat brightHSV     = screenwhole.CvtColor(ColorConversionCodes.BGR2HSV);
            Mat darkAreasMask = brightHSV.InRange(InputArray.Create(new int[] { 0, 0, 0 }), InputArray.Create(new int[] { 180, 255, 180 }));
            Mat darkAreas     = new Mat();

            screenwhole.CopyTo(darkAreas, darkAreasMask);

            Mat    screenblur = darkAreas - darkAreas.Blur(new OpenCvSharp.Size(10, 10));
            Window w3         = new Window(screenblur);

            //screenblur.SaveImage("sharplines.png");
            Mat sourceHSV = screenblur.CvtColor(ColorConversionCodes.BGR2HSV);

            /* Paint.Net uses HSV [0..360], [0..100], [0..100].
             * OpenCV uses H: 0 - 180, S: 0 - 255, V: 0 - 255
             * Paint.NET colors:
             * 73   100 18     brightest part of green edge
             * 72   98  9      very dark green
             * suggested range [70..180], [80..100], [8..100] (paint.net)
             * suggested range [35..90], [204..255], [20..255] (openCV)
             * */
            Mat mask = sourceHSV.InRange(InputArray.Create(new int[] { 35, 204, 20 }), InputArray.Create(new int[] { 90, 255, 255 }));
            Mat sourceHSVFiltered = new Mat();

            sourceHSV.CopyTo(sourceHSVFiltered, mask);
            Window w5         = new Window("yellowfilter", sourceHSVFiltered.CvtColor(ColorConversionCodes.HSV2BGR));
            Mat    sourceGrey = sourceHSVFiltered.Split()[2].InRange(32, 256); // Value channel is pretty good as a greyscale conversion
            Window w6         = new Window("yellowFilterValue", sourceGrey);

            LineSegmentPoint[] result = sourceGrey.HoughLinesP(1, 3.1415 / 180, 5, 10, 2);
            List <Point2d>     points = new List <Point2d>();

            foreach (var line in result)
            {
                points.Add(line.P1);
                points.Add(line.P2);
                darkAreas.Line(line.P1, line.P2, new Scalar(255, 0, 255));
            }
            CircleSegment c = CruiseSensor.ComputeCircle(points);

            darkAreas.Circle(c.Center, (int)c.Radius, new Scalar(255, 255, 0));
            Window w9 = new Window("final", darkAreas);
        }
Пример #9
0
        public bool MatchSafDisengag()
        {
            // MatchTemplate doesn't allow for scaling / rotation. Allow more leeway by reducing resolution?

            Bitmap image  = CompassSensor.Crop(screen.bitmap, new Rectangle(800, 650, 300, 200));
            Mat    source = BitmapConverter.ToMat(image);
            Mat    blues  = source.Split()[0];
            Mat    clean  = blues.EmptyClone();

            clean.SetTo(0); // make sure the matrix is blank.
            blues.CopyTo(clean, blues.InRange(250, 255));
            Mat    matches = clean.MatchTemplate(templatesaf, TemplateMatchModes.CCoeffNormed);
            double minVal, maxVal;

            matches.MinMaxLoc(out minVal, out maxVal);

            return(maxVal > 0.4); // see experiments, MatchSafDisengag2
        }
Пример #10
0
        public static void MatchSafDisengag2()
        {
            Bitmap screen = new Bitmap("Screenshot_0022.bmp");
            Bitmap image  = CompassSensor.Crop(screen, new Rectangle(800, 700, 350, 150));
            Mat    source = BitmapConverter.ToMat(image);
            Mat    blues  = source.Split()[0];
            Mat    clean  = blues.EmptyClone();

            clean.SetTo(0); // make sure the matrix is blank.
            blues.CopyTo(clean, blues.InRange(250, 255));
            Mat matches = clean.MatchTemplate(new Mat("res3/safdisengag250.png", ImreadModes.GrayScale), TemplateMatchModes.CCoeffNormed);

            clean.ImWrite("safdisengag250.png");
            double minVal, maxVal;

            matches.MinMaxLoc(out minVal, out maxVal);
            Window w2 = new Window(clean);
            Window w3 = new Window(matches);
            Window w5 = new Window(matches.InRange(0.4, 1));
        }
Пример #11
0
        /// <summary>
        /// See if the IMPACT warning is being displayed
        /// </summary>
        public bool MatchImpact()
        {
            Bitmap cropped    = CompassSensor.Crop(screen.bitmap, screen.bitmap.Width - 400, 0, screen.bitmap.Width - 100, 300);
            Mat    screenarea = BitmapConverter.ToMat(cropped);
            Mat    red        = IsolateRed(screenarea);

            Mat template = new Mat("res3/impacttemplate.png", ImreadModes.GrayScale);
            Mat result   = new Mat(red.Size(), red.Type());

            Cv2.MatchTemplate(red, template, result, TemplateMatchModes.CCoeffNormed);
            double minVal, maxVal;

            OpenCvSharp.Point minLoc, maxLoc;
            result.MinMaxLoc(out minVal, out maxVal, out minLoc, out maxLoc);
            if (maxVal > 0.4)
            {
                debugWindow.Image = CompassSensor.Crop(BitmapConverter.ToBitmap(red), maxLoc.X, maxLoc.Y, maxLoc.X + template.Width, maxLoc.Y + template.Height);
                return(true);
            }
            return(false);
        }