コード例 #1
0
        private ObjectLayer Execute1stLevelSegmentation(GrayscaleProcessor gp, GrayscaleProcessor gpSobel, GrayscaleProcessor gpH)
        {
            ContourBasedSegmentation cbs = new ContourBasedSegmentation();

            cbs.CreatePrimarySegmentation(gp, MAX_CONTOURLENGTH);

            cbs.EvaluateContours(c =>
            {
                if (ContourProperties.FromContour(c).Convexity < 0.95)
                {
                    return(-1);
                }

                return(ContourValue.GetValue(c, gpSobel));
            });

            ObjectLayer layer = cbs.CreateLayer(MIN_CONTOURLENGTH, int.MaxValue);

            layer = new ContourOptimizer().RemoveNonCompactPixels(layer, 3);

            layer = layer.CreateAbove(obj =>
            {
                return(this.GetContourGradient(obj, gp) < 0);
            });

            //layer=new ConcaveObjectSeparation().Execute(layer, 0.33, true);

            return(layer);
        }
コード例 #2
0
        private float GetTargetArea(ObjectLayer layer)
        {
            if (layer.Objects.Count == 0)
            {
                return(-1);
            }

            float targetArea = -1F;

            List <float> values = new List <float>();

            for (int i = 0; i < layer.Objects.Count; i++)
            {
                ImageObject obj = layer.Objects[i];

                float area = ContourProperties.FromContour(obj.Contour).Area;

                values.Add(area);
            }

            values.Sort();

            int index = (int)(0.95 * values.Count);

            if (values.Count > 0)
            {
                targetArea = values[index];
            }

            return(targetArea);
        }
コード例 #3
0
ファイル: Program.cs プロジェクト: LarryLegend33/PullPara
        static ContourProperties FishContour(Mat image_raw, Mat background)
        {
            bool fishcont_found = false;
            Size frsize = new Size(image_raw.Width, image_raw.Height);
            Mat image = new Mat(frsize, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
            ContourProperties contprops = new ContourProperties();
            ThresholdType ttype = 0;
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            Mat hierarchy = new Mat();
            CvInvoke.AbsDiff(image_raw, background, image);
// This should be 30 as the LB. Switched to 20 to see if i could pick up paramecia. 
            CvInvoke.Threshold(image, image, 10, 255, ttype);
// UNCOMMENT IF YOU WANT TO SHOW THRESHOLDED IMAGE
            String camerawindow = "Camera Window";
            CvInvoke.NamedWindow(camerawindow);
            CvInvoke.Imshow(camerawindow, image);
            CvInvoke.WaitKey(1);
            CvInvoke.FindContours(image, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone);
            int fish_contour_index = 0;
            Rectangle bounding_rect = new Rectangle();
            for (int ind = 0; ind < contours.Size; ind++)
            {
                bounding_rect = CvInvoke.BoundingRectangle(contours[ind]);
                if (bounding_rect.Width > bounding_rect.Height)
                {
                    contprops.height = bounding_rect.Width;
                }
                else
                {
                    contprops.height = bounding_rect.Height;
                }
                if (contprops.height < 50 && contprops.height > 25)
                {
                    fish_contour_index = ind;
                    fishcont_found = true;
                    break;
                }
            }
            if (fishcont_found)
            {
                var contourCenter = new Point();
                var contourCOM = new Point();
                MCvMoments com = new MCvMoments();
                com = CvInvoke.Moments(contours[fish_contour_index]);
                contourCOM.X = (int)(com.M10 / com.M00);
                contourCOM.Y = (int) (com.M01 / com.M00);
                contourCenter.X = (int)(bounding_rect.X + (float)bounding_rect.Width / (float)2);
                contourCenter.Y = (int)(bounding_rect.Y + (float)bounding_rect.Height / (float)2);
                contprops.center = contourCenter;                
                contprops.com = contourCOM;
            }
            else
            {
                Console.WriteLine(contprops.com);
                Console.WriteLine(contprops.height);
                Console.WriteLine("no contours");
            }
            return contprops;
        }
コード例 #4
0
ファイル: Program.cs プロジェクト: LarryLegend33/EscapeCode
 public CamData(Mat roi_input, Point fishXY, ContourProperties cont, uint buffer, uint j_, byte[] stmpix)
 {
     jay          = j_;
     fishcoord    = fishXY;
     fishcont     = cont;
     roi          = roi_input;
     buffernumber = buffer;
     pix_for_stim = stmpix;
 }
コード例 #5
0
 private static void calculateFeatures(ObjectLayer objectLayer, Bitmap source)
 {
     using (var grayscaleProcessor = new GrayscaleProcessor(source, RgbToGrayscaleConversion.Mean)
     {
         WriteBack = false
     }) MeanIntensity.ProcessLayer(objectLayer, grayscaleProcessor);
     foreach (var imageObject in objectLayer.Objects)
     {
         imageObject.Features.Add(new AreaOfContour(ContourProperties.FromContour(imageObject.Contour)));
     }
 }
コード例 #6
0
ファイル: Program.cs プロジェクト: LarryLegend33/EscapeCode
        static ContourProperties LargestContour(Mat image_raw, Mat background, bool draw)
        {
            Size frsize = new Size(image_raw.Width, image_raw.Height);
            Mat  image  = new Mat(frsize, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
            ContourProperties     contprops = new ContourProperties();
            ThresholdType         ttype     = 0;
            VectorOfVectorOfPoint contours  = new VectorOfVectorOfPoint();
            Mat hierarchy = new Mat();

            CvInvoke.AbsDiff(image_raw, background, image);
            CvInvoke.Threshold(image, image, 35, 255, ttype);
            CvInvoke.FindContours(image, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone);
            double largest_area       = 0;
            int    largest_area_index = 0;

            for (int ind = 0; ind < contours.Size; ind++)
            {
                double area = CvInvoke.ContourArea(contours[ind]);
                if (area > largest_area)
                {
                    if (image_raw.Width > 1000 && contours[ind][0].Y < 100) // prevents stim LED from being caught as a contour
                    {
                        continue;
                    }
                    largest_area       = area;
                    largest_area_index = ind;
                }
            }
            var contourCenter = new Point();

            if (contours.Size > 0)
            {
                Rectangle bounding_rect = CvInvoke.BoundingRectangle(contours[largest_area_index]);
                contourCenter.X  = (int)(bounding_rect.X + (float)bounding_rect.Width / (float)2);
                contourCenter.Y  = (int)(bounding_rect.Y + (float)bounding_rect.Height / (float)2);
                contprops.center = contourCenter;
                contprops.height = bounding_rect.Height;
                if (draw)
                {
                    CvInvoke.DrawContours(image_raw, contours, largest_area_index, new MCvScalar(255, 0, 0), 2); // these are correct.
                    CvInvoke.Rectangle(image_raw, bounding_rect, new MCvScalar(255, 0, 0));
                    CvInvoke.Circle(image_raw, contourCenter, 50, new MCvScalar(255, 0, 0));                     // THIS IS ABOUT 50 PIXELS TOO HIGH
                }
            }
            else
            {
                //    Console.WriteLine("no contours");
            }
            return(contprops);
        }
コード例 #7
0
        /// <summary>
        /// new Feature:
        /// FormFactorOfContour
        /// </summary>
        /// <param name="objectLayer"></param>
        /// <returns></returns>
        public static ObjectLayer AddFeatureFormFactor(ObjectLayer objectLayer)
        {
            var listImageObjects = new List <ImageObject>();

            foreach (var imageObject in objectLayer.Objects)
            {
                var item = CopyImageObject(imageObject.Id, imageObject);
                item.Features.Add(new FormFactorOfContour(ContourProperties.FromContour(imageObject.Contour)));

                listImageObjects.Add(item);
            }

            return(new ObjectLayer(objectLayer.Map, listImageObjects.ToArray(), objectLayer.Name));
        }
コード例 #8
0
        private double GetContourValue(Contour c, GrayscaleProcessor gpSobel, GrayscaleProcessor gpH, float targetArea)
        {
            double h = MeanIntensityOnContour.GetValue(c, gpH);

            ContourProperties cp = ContourProperties.FromContour(c);

            float area = Math.Min(cp.Area, targetArea) / targetArea;

            if (cp.Convexity < 0.9 && cp.Area < targetArea)
            {
                area = 1F / targetArea;
            }

            float convexity = cp.Convexity * cp.Convexity;

            double color = h / 255.0;

            return(color * area * convexity * ContourValue.GetValue(c, gpSobel));
        }
コード例 #9
0
        public static void ProcessLayer(ObjectLayer layer)
        {
            foreach (var io in layer.Objects)
            {
                var correspondingEllipse = CorrespondingEllipse.FromContour(io.Contour);
                io.SetFeature("MajorAxisOfCorrespondingEllipse", new MajorAxisOfCorrespondingEllipse(correspondingEllipse).Value);
                io.SetFeature("MinorAxisOfCorrespondingEllipse", new MinorAxisOfCorrespondingEllipse(correspondingEllipse).Value);

                ConvexHull           ch  = ConvexHull.FromContour(io.Contour);
                ConvexHullProperties chp = ConvexHullProperties.FromConvexHull(ch);
                ContourProperties    cp  = ContourProperties.FromContour(io.Contour);
                AreaOfContour        ac  = new AreaOfContour(cp);
                AreaOfConvexHull     ach = new AreaOfConvexHull(chp);
                io.SetFeature("FormFactor", cp.FormFactor);
                io.SetFeature("Area", cp.Area);
                io.SetFeature("Convexity", cp.Convexity);
                io.SetFeature("AreaHull", ach.Value);
                io.SetFeature("AreaDiv", ach.Value / cp.Area);
                io.SetFeature("FormFactorOfConvexHull", new FormFactorOfConvexHull(chp).Value);
            }
        }
コード例 #10
0
ファイル: Program.cs プロジェクト: LarryLegend33/EscapeCode
        static List <ContourProperties> BarrierLocations(Mat image_raw, Mat background)
        {
            int  minArea = 1000;
            int  maxArea = 600000;
            Size frsize  = new Size(image_raw.Width, image_raw.Height);
            Mat  image   = new Mat(frsize, Emgu.CV.CvEnum.DepthType.Cv8U, 1);

            ThresholdType            ttype    = 0;
            VectorOfVectorOfPoint    contours = new VectorOfVectorOfPoint();
            List <VectorOfPoint>     contlist = new List <VectorOfPoint>();
            List <ContourProperties> cp_list  = new List <ContourProperties>();
            Mat hierarchy = new Mat();

            CvInvoke.AbsDiff(image_raw, background, image);
            CvInvoke.Threshold(image, image, 50, 255, ttype);
            CvInvoke.FindContours(image, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone);
            Point contourCenter = new Point();

            for (int ind = 0; ind < contours.Size; ind++)
            {
                double area = CvInvoke.ContourArea(contours[ind]);
                if (area > minArea && area < maxArea)
                {
                    contlist.Add(contours[ind]);
                }
            }
            for (int contind = 0; contind < contlist.Count; contind++)
            {
                ContourProperties contprops     = new ContourProperties();
                Rectangle         bounding_rect = CvInvoke.BoundingRectangle(contlist[contind]);
                contourCenter.X  = (int)(bounding_rect.X + (float)bounding_rect.Width / (float)2);
                contourCenter.Y  = (int)(bounding_rect.Y + (float)bounding_rect.Height / (float)2);
                contprops.center = contourCenter;
                contprops.height = bounding_rect.Height;
                cp_list.Add(contprops);
            }

            return(cp_list);
        }
コード例 #11
0
ファイル: Program.cs プロジェクト: LarryLegend33/EscapeCode
// Goal of this function is to place barriers halfway between the edge of the tank and the center.
        static List <ContourProperties> GenerateVirtualBarriers(int tankwidth, int tc_x, int tc_y)
        {
            double vb_distance_from_center = tankwidth / 4;
            int    barrier_diam            = 110;
            int    vb_arm       = Convert.ToInt32(Math.Sqrt(Math.Pow(vb_distance_from_center, 2) / 2));
            int    how_many_vbs = 4;
            List <ContourProperties> vb_list = new List <ContourProperties>();
            Point virtualCenter = new Point();

            for (int vb_ind = 0; vb_ind < how_many_vbs; vb_ind++)
            {
                ContourProperties contprops = new ContourProperties();
                if (vb_ind == 0)
                {
                    virtualCenter.X = tc_x + vb_arm;
                    virtualCenter.Y = tc_y + vb_arm;
                }
                if (vb_ind == 1)
                {
                    virtualCenter.X = tc_x + vb_arm;
                    virtualCenter.Y = tc_y - vb_arm;
                }
                if (vb_ind == 2)
                {
                    virtualCenter.X = tc_x - vb_arm;
                    virtualCenter.Y = tc_y + vb_arm;
                }
                if (vb_ind == 3)
                {
                    virtualCenter.X = tc_x - vb_arm;
                    virtualCenter.Y = tc_y - vb_arm;
                }
                contprops.center = virtualCenter;
                contprops.height = barrier_diam;
                vb_list.Add(contprops);
            }
            return(vb_list);
        }
コード例 #12
0
ファイル: Program.cs プロジェクト: LarryLegend33/PullPara
        static void Main(string[] args)
        {
            SerialPort pyboard = new SerialPort("COM6", 115200);
            pyboard.Open();
            pyboard.WriteLine("import paramove\r");
            var options = new DataflowBlockOptions();
            options.BoundedCapacity = 10;
            var pipe_buffer = new BufferBlock<CamData>(options);
            bool foundfish = false;
            int l_or_r = 0; 
            MCvScalar gray = new MCvScalar(128, 128, 128);
            int roidim = 80;
            string camera_id = "img0"; //this is the ID of the NI-IMAQ board in NI MAX. 
            var _session = new ImaqSession(camera_id);

            String camerawindow = "Camera Window";
            CvInvoke.NamedWindow(camerawindow);
            int frameWidth = 1280;
            int frameHeight = 1024;
            uint bufferCount = 3;
            uint buff_out = 0;
            int numchannels = 1;
            ContourProperties fishcontour = new ContourProperties();
            System.Drawing.Size framesize = new System.Drawing.Size(frameWidth, frameHeight);
            System.Drawing.Size roi_size = new System.Drawing.Size(roidim, roidim);
            Mat cvimage = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            Mat modeimage = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            Mat modeimage_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            byte[,] data_2D = new byte[frameHeight, frameWidth];
            byte[,] data_2D_roi = new byte[roidim, roidim];
            byte[,] imagemode = new byte[frameHeight, frameWidth];
            ImaqBuffer image = null;
            List<byte[,]> imglist = new List<byte[,]>();
            ImaqBufferCollection buffcollection = _session.CreateBufferCollection((int)bufferCount, ImaqBufferCollectionType.VisionImage);
            _session.RingSetup(buffcollection, 0, false);
            _session.Acquisition.AcquireAsync();

            imglist = GetImageList(_session, 5000, 400);
            imagemode = FindMode(imglist);
            modeimage.SetTo(imagemode);
            imglist.Clear();
            CvInvoke.Imshow(camerawindow, modeimage);
            CvInvoke.WaitKey(0);
            Point f_center = new Point();
            Mat cv_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            image = _session.Acquisition.Extract((uint)0, out buff_out);
            uint j = buff_out;
            Console.WriteLine("j followed by buff_out");
            Console.WriteLine(j.ToString());
            Console.WriteLine(buff_out.ToString());
            while (true)
            {
                image = _session.Acquisition.Extract(j, out buff_out);
                data_2D = image.ToPixelArray().U8;
                cvimage.SetTo(data_2D);
        
                if (foundfish)
                {
                    modeimage_roi.SetTo(SliceROI(imagemode, f_center.X, f_center.Y, roidim));
                    data_2D_roi = SliceROI(data_2D, f_center.X, f_center.Y, roidim);
                    cv_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
                    cv_roi.SetTo(data_2D_roi);
                    fishcontour = FishContour(cv_roi, modeimage_roi);
                    if (fishcontour.height != 0)
                    {
                        f_center.X = (int)fishcontour.center.X + f_center.X - roidim / 2;  // puts ROI coords into full frame coords
                        f_center.Y = (int)fishcontour.center.Y + f_center.Y - roidim / 2;
                    }

                    else
                    {
                        foundfish = false;
                    }
                }
                if (!foundfish)                
                {
                    fishcontour = FishContour(cvimage, modeimage);
                    if (fishcontour.height != 0)
                    {
                        f_center.X = (int)fishcontour.center.X;
                        f_center.Y = (int)fishcontour.center.Y;
//                        foundfish = true;
                        data_2D_roi = SliceROI(data_2D, f_center.X, f_center.Y, roidim);
                        cv_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
                        cv_roi.SetTo(data_2D_roi);                        
                    }
                    else
                    {
                        foundfish = false;
                        cv_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
                        cv_roi.SetTo(gray); //in movie indicates that program lost the fish on this frame
                   
                        if (j % 25 == 0)
                        {
                            CvInvoke.Imshow(camerawindow, cvimage);
                            CvInvoke.WaitKey(1);
                            Console.WriteLine("Missed Fish");
                            Console.WriteLine(fishcontour.height);
                        }
                        j = buff_out + 1;
                        continue; 
                    }
                }

                if (fishcontour.com.Y > fishcontour.center.Y)
                {
//                   pyboard.WriteLine("paramove.pull_up()\r");
                    l_or_r = 1;

                }
                else if (fishcontour.com.Y < fishcontour.center.Y)
                {
// pyboard.WriteLine("paramove.pull_down()\r");
                    l_or_r = 0;
                }
                // PROBABLY MAKE THIS SO IT DOESNT DRAW DURING A STIMULUS
                if (j % 25 == 0)
                {
                    if (l_or_r == 0)
                    {
                        pyboard.WriteLine("paramove.pull_up()\r");
                        CvInvoke.Circle(cvimage, new Point(f_center.X, f_center.Y), 20, new MCvScalar(0, 0, 0));
//                        CvInvoke.Circle(cvimage, new Point(f_center.X - roidim / 2 + fish_head.X, f_center.Y - roidim / 2 + fish_head.Y), 4, new MCvScalar(255,0,0));
                        Console.WriteLine(fishcontour.height);
                    }
                    else if (l_or_r == 1)
                    {
                        pyboard.WriteLine("paramove.pull_down()\r");
                        CvInvoke.Circle(cvimage, new Point(f_center.X, f_center.Y), 20, new MCvScalar(255, 0, 0));
                        Console.WriteLine(fishcontour.height);
                    }
                  //  CvInvoke.Imshow(camerawindow, cvimage);
                  //  CvInvoke.WaitKey(1);
                }
                j = buff_out + 1;
            }


        }
コード例 #13
0
ファイル: Program.cs プロジェクト: LarryLegend33/EscapeCode
        static void Main(string[] args)
        {
            // Note that if you want to do halfmoon or stonehenge trials, place halfmoon and stonehenge in the center of the tank.
            // Fill their center with a barrier for the first mode. Then take the barrier out and take the mode again. Use the smallest barrier possible (so the fish can get close to the center) and, like in nb trials, get rid of the tracking restriction on barriers

            var options = new DataflowBlockOptions();

            options.BoundedCapacity = 10;
            var   pipe_buffer = new BufferBlock <CamData>(options);
            Point tank_center = new Point
            {
                X = 640,
                Y = 512,
            };
            int roidim = 80;

            string camera_id         = "img0"; //this is the ID of the NI-IMAQ board in NI MAX.
            var    _session          = new ImaqSession(camera_id);
            bool   reuse_background  = false;
            bool   drew_barriers     = false;
            bool   halfmoon          = false;
            bool   stonehenge        = false;
            bool   minefield         = false;
            bool   minefield_control = false;

            Console.WriteLine("Enter FishID   ");
            String fishid         = Console.ReadLine();
            String home_directory = "C:/Users/Deadpool/Desktop/Results/";
            String exp_directory  = home_directory + fishid;
            bool   exists_already = System.IO.Directory.Exists(exp_directory);

            if (!exists_already)
            {
                System.IO.Directory.CreateDirectory(exp_directory);
            }
            else
            {
                Console.WriteLine("Directory Already Exists. Overrite?  ");
                String overwrite = Console.ReadLine();
                if (overwrite == "y")
                {
                    System.IO.Directory.CreateDirectory(exp_directory);
                }
                else if (overwrite == "c")
                {
                }
                else
                {
                    Environment.Exit(0);
                }
            }
            Console.WriteLine("Enter Light X Location  ");
            String lightloc_X = Console.ReadLine();

            Console.WriteLine("Enter Light Y Location  ");
            String lightloc_Y       = Console.ReadLine();
            int    light_location_X = Convert.ToInt32(lightloc_X) - 25;
            int    light_location_Y = Convert.ToInt32(lightloc_Y);

            Console.WriteLine("Enter Experiment Type  ");
            String exp_string = Console.ReadLine();

            Console.WriteLine("Use old background?  ");
            String reuse = Console.ReadLine();

            if (reuse == "y")
            {
                reuse_background = true;
            }
            if (exp_string == "n" || exp_string == "t" || exp_string == "v")
            {
                minefield_control = true;
            }
            else if (exp_string == "b")
            {
                minefield = true;
            }
            String camerawindow = "Camera Window";

            CvInvoke.NamedWindow(camerawindow);
            int  frameWidth  = 1280;
            int  frameHeight = 1024;
            uint bufferCount = 3;
            // Could try changing this to 2 or 100
            // Checked and there is no card memory. It makes a buffer on system mem. Tried increasing virtual memory so
            // HD can be used as RAM. Allocated an additional 32 GB to virtual mem.
            uint      buff_out    = 0;
            int       numchannels = 1;
            MCvScalar gray        = new MCvScalar(128, 128, 128);
            List <ContourProperties> barrierlist         = new List <ContourProperties>();
            ContourProperties        fishcontour         = new ContourProperties();
            ContourProperties        fishcontour_correct = new ContourProperties();
            ContourProperties        barrier             = new ContourProperties();

            System.Drawing.Size framesize = new System.Drawing.Size(frameWidth, frameHeight);
            System.Drawing.Size roi_size  = new System.Drawing.Size(roidim, roidim);
            Mat cvimage = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            Mat modeimage_barrier_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            Mat modeimage             = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            //            Mat modeimage_barrier = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            Mat            maxproj_cv = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            AutoResetEvent event1     = new AutoResetEvent(true);
            AutoResetEvent event2     = new AutoResetEvent(false);
            MCvMoments     COM        = new MCvMoments();

            byte[,] data_2D             = new byte[frameHeight, frameWidth];
            byte[,] data_2D_roi         = new byte[roidim, roidim];
            byte[,] imagemode_nobarrier = new byte[frameHeight, frameWidth];
            byte[,] maxprojimage        = new byte[frameHeight, frameWidth];
            ImaqBuffer           image          = null;
            ImaqBufferCollection buffcollection = _session.CreateBufferCollection((int)bufferCount, ImaqBufferCollectionType.VisionImage);

            _session.RingSetup(buffcollection, 0, false);
            _session.Acquisition.AcquireAsync();
            RecordAndStim experiment = new RecordAndStim(event1, event2, pipe_buffer, exp_string);

            experiment.experiment_directory = exp_directory;
            var stimthread = new Thread(experiment.StartStim);

            stimthread.Start();

            // THIS GRABS THE MODE FOR THE TANK IN GENERAL BEFORE ALIGNMENT

            if (!experiment.alignment_complete)
            {
                CvInvoke.WaitKey(0);
                imglist      = GetImageList(_session, 500, 10);
                maxprojimage = FindMaxProjection(imglist);
                maxproj_cv.SetTo(maxprojimage);
                imglist.Clear();
                CvInvoke.Imshow(camerawindow, maxproj_cv);
                CvInvoke.WaitKey(0);
            }

            // IF CAMERA IS NOT YET ALIGNED TO THE PROJECTOR, THIS LOOP FINDS THE LOCATION OF THE CALIBRATION CONTOUR THE EXPERIMENT CLASS IS PLACING ON THE PROJECTOR.

            experiment.start_align = true;
            if (!experiment.alignment_complete)
            {
                while (!experiment.alignment_complete)
                {
                    imglist = GetImageList(_session, 500, 10);
                    data_2D = FindMaxProjection(imglist);
                    cvimage.SetTo(data_2D);
                    Console.WriteLine("Finding Largest Contour");
                    experiment.projcenter_camcoords = LargestContour(cvimage, maxproj_cv, true).center;
                    CvInvoke.Imshow(camerawindow, cvimage);
                    CvInvoke.WaitKey(1);
                    event2.Set();
                    event1.WaitOne();
                }
                imglist.Clear();
                CvInvoke.WaitKey(0);
                imglist = GetImageList(_session, 500, 10);
                data_2D = FindMaxProjection(imglist);
                cvimage.SetTo(data_2D);
                experiment.tankwidth = LargestContour(cvimage, maxproj_cv, true).height * 2;
                Console.WriteLine("Width Of Tank Contour");
                Console.WriteLine(experiment.tankwidth);
                CvInvoke.Imshow(camerawindow, cvimage);
                CvInvoke.WaitKey(0);
                imglist.Clear();
            }

            // Next, the opposite thread is going to display a black circle that is the same size as the tank. Do a max projection on this
            // contour in order to measure width of the tank in projector coordinates.


            // Now you've put the IR filter back over the camera and are ready to do an experiment.
            // Get mode of image with no barrier present so you can background subtract and find the barriers and fish.
            imglist.Clear();
            if (reuse_background)
            {
                modeimage = CvInvoke.Imread(home_directory + "/background_nobar" + exp_string + ".tif", 0);
            }
            else
            {
                imglist             = GetImageList(_session, 5000, 400);
                imagemode_nobarrier = FindMode(imglist);
                modeimage.SetTo(imagemode_nobarrier);
                imglist.Clear();
                CvInvoke.Imshow(camerawindow, modeimage);
                CvInvoke.WaitKey(0);
            }

            // Here you have just added barriers to the tank. Now get a new mode that contains the barriers for use in background subtraction to find fish
            // and for localizing barriers.

            if (halfmoon || stonehenge || minefield)
            {
                imglist = GetImageList(_session, 5000, 400);
                if (reuse_background)
                {
                    modeimage_barrier = CvInvoke.Imread(home_directory + "/background_" + exp_string + ".tif", 0);
                }
                else
                {
                    imagemode = FindMode(imglist);
                    modeimage_barrier.SetTo(imagemode);
                }

                modeimage_barrier.Save(exp_directory + "/background_" + exp_string + ".tif");
                imglist.Clear();
                barrierlist = BarrierLocations(modeimage_barrier, modeimage);
                for (int ind = 0; ind < barrierlist.Count; ind++)
                {
                    experiment.barrier_position_list.Add(barrierlist[ind].center);
                    experiment.barrier_radius_list.Add(barrierlist[ind].height / 2);
                }
            }
            else if (minefield_control)
            {
                modeimage_barrier.SetTo(imagemode_nobarrier);
                modeimage_barrier.Save(exp_directory + "/background_" + exp_string + ".tif");

                barrierlist = GenerateVirtualBarriers(experiment.tankwidth, tank_center.X, tank_center.Y);
                for (int ind = 0; ind < barrierlist.Count; ind++)
                {
                    experiment.barrier_position_list.Add(barrierlist[ind].center);
                    experiment.barrier_radius_list.Add(barrierlist[ind].height / 2);
                }
            }

            using (StreamWriter barrierfile = new StreamWriter(exp_directory + "/barrierstruct_" + exp_string + ".txt"))
            {
                for (int bar = 0; bar < barrierlist.Count; bar++)
                {
                    if (bar == 0)
                    {
                        barrierfile.WriteLine(experiment.templatewidth.ToString());
                        barrierfile.WriteLine(experiment.tankwidth.ToString());
                    }
                    barrierfile.WriteLine(barrierlist[bar].center.ToString());
                    barrierfile.WriteLine(barrierlist[bar].height.ToString());
                }
            }

            CvInvoke.Imshow(camerawindow, modeimage_barrier);
            CvInvoke.WaitKey(0);


            if (halfmoon) //THIS IS BECAUSE YOU TAKE THE BARRIER AWAY AFTER IT FINDS THE HOLE. IE FOR HALFMOON TRIALS, YOU FIRST KEEP THE HALFMOON THERE FOR MODEIMAGE, THEN ADD A BARRIER THE SIZE OF THE HOLE FOR FINDING OF THE HOLE OF THE BARRIER. IF YOU WANT TO RUN STONEHENGE OR HALFMOON, DECLARE MINEFIELD_CONTROL AS TRUE, but don't draw barriers.
            {
                modeimage_barrier = modeimage;
                imagemode         = imagemode_nobarrier;
            }


            // IMAGE ACQUISITION AND FISH FINDING.
            //            Idea is to first acquire the image and turn it into a cvimage matrix. find the fish by finding the largest contour on a background subtracted and thresholded image (LargestContour function).  Each time you find the fish, store its coords so you can just search within a small ROI on the next frame. If you lose the fish, go back out to full frame and find it again.
            Point f_center = new Point();
            Mat   cv_roi   = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);

            image = _session.Acquisition.Extract((uint)0, out buff_out);
            uint j = buff_out;
            int  experiment_phase = 0;
            int  xycounter        = 0;

            Console.WriteLine("j followed by buff_out");
            Console.WriteLine(j.ToString());
            Console.WriteLine(buff_out.ToString());
            List <Point> coordlist   = new List <Point>();
            List <int>   phasebounds = new List <int>();

            while (true)
            {
                if (mode_reset.WaitOne(0))
                {
                    Console.WriteLine("Clearing Imagelist");
                    imglist.Clear();
                    mode_reset.Reset();
                }
                image = _session.Acquisition.Extract(j, out buff_out);
                try
                {
                    data_2D = image.ToPixelArray().U8;
                }
                catch (NationalInstruments.Vision.VisionException e)
                {
                    Console.WriteLine(e);
                    continue;
                }

                byte[] stim_pixel_readout = new byte[100];
                for (int pix = 0; pix < 100; pix++)
                {
                    stim_pixel_readout[pix] = data_2D[light_location_Y, light_location_X + pix];
                }
                cvimage.SetTo(data_2D);
                fishcontour = FishContour(cvimage, modeimage_barrier, tank_center, barrierlist, minefield_control);

                // com makes sure that the head is near the barrier.
                if (fishcontour.height != 0)
                {
                    fishcontour_correct = fishcontour;
                    f_center.X          = fishcontour.com.X;
                    f_center.Y          = fishcontour.com.Y;
                }
                if (!experiment.stim_in_progress)
                {
                    drew_barriers = false;
                }
                if (experiment.stim_in_progress && !drew_barriers)
                {
                    if (halfmoon || stonehenge || minefield || minefield_control)
                    {
                        for (int ind = 0; ind < barrierlist.Count; ind++)
                        {
                            CvInvoke.Circle(cvimage, barrierlist[ind].center, barrierlist[ind].height / 2, new MCvScalar(255, 0, 0), 1);
                        }
                    }
                    Image <Gray, Byte> d2d = cvimage.ToImage <Gray, Byte>();
                    data_2D_roi   = SliceROIImage(d2d, f_center.X, f_center.Y, roidim);
                    drew_barriers = true;
                }
                else
                {
                    data_2D_roi = SliceROI(data_2D, f_center.X, f_center.Y, roidim);
                }
                cv_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
                cv_roi.SetTo(data_2D_roi);

                CamData camdat = new CamData(cv_roi, f_center, fishcontour_correct, buff_out, j, stim_pixel_readout);
                pipe_buffer.Post(camdat);
                if (j % 10 == 0)
                {
                    xycounter++;
                    coordlist.Add(camdat.fishcoord);
                    if (experiment.experiment_phase > experiment_phase)
                    {
                        experiment_phase = experiment.experiment_phase;
                        phasebounds.Add(xycounter);
                    }
                }
                if (j % 100 == 0 && !experiment.stim_in_progress)
                {
                    //    CvInvoke.Circle(cvimage, fishcontour_correct.center, 2,new MCvScalar(255, 255, 0));
                    CvInvoke.Circle(cvimage, fishcontour_correct.com, 2, new MCvScalar(255, 255, 255));
                    if (halfmoon || stonehenge || minefield || minefield_control)
                    {
                        for (int ind = 0; ind < barrierlist.Count; ind++)
                        {
                            CvInvoke.Circle(cvimage, barrierlist[ind].center, barrierlist[ind].height / 2, new MCvScalar(255, 0, 0), 3);
                        }
                    }
                    else
                    {
                        CvInvoke.Circle(cvimage, experiment.barrier_center, barrier.height / 2, new MCvScalar(255, 0, 0), 3);
                    }
                    CvInvoke.Imshow(camerawindow, cvimage);
                    CvInvoke.WaitKey(1);
                    if (j % 1000 == 0)
                    {
                        byte[,] mode_frame = new byte[frameHeight, frameWidth];
                        Buffer.BlockCopy(data_2D, 0, mode_frame, 0, data_2D.Length);
                        imglist.Add(mode_frame);
                        if (imglist.LongCount() == 40)
                        {
                            var modethread = new Thread(() => ModeWrapper(imglist, mode_reset, experiment, exp_directory));
                            modethread.Start();
                        }
                    }
                }
                if (experiment.experiment_complete)
                {
                    break;
                }

                j = buff_out + 1;
            }
            modeimage_barrier.Save(home_directory + "/background_" + exp_string + ".tif");
            modeimage.Save(home_directory + "/background_nobar" + exp_string + ".tif");
            string experiment_string   = exp_directory + "/all_xycoords_" + exp_string + ".txt";
            string phasestring         = exp_directory + "/phase_" + exp_string + ".txt";
            string numframes_gray      = exp_directory + "/numframesgray_" + exp_string + ".txt";
            string numframes_gray_dark = exp_directory + "/numframesgray_dark.txt";

            using (StreamWriter sr = new StreamWriter(experiment_string))
            {
                foreach (Point fishpoint in coordlist)
                {
                    sr.WriteLine(fishpoint.ToString());
                }
            }
            using (StreamWriter sr = new StreamWriter(phasestring))
            {
                foreach (int phase in phasebounds)
                {
                    sr.WriteLine(phase.ToString());
                }
            }
            using (StreamWriter sr = new StreamWriter(numframes_gray))
            {
                foreach (int ng in experiment.num_grayframes)
                {
                    sr.WriteLine(ng.ToString());
                }
            }
            if (exp_string == "b")
            {
                using (StreamWriter sr = new StreamWriter(numframes_gray_dark))
                {
                    foreach (int ngd in experiment.num_grayframes_d)
                    {
                        sr.WriteLine(ngd.ToString());
                    }
                }
            }
        }
コード例 #14
0
ファイル: Program.cs プロジェクト: LarryLegend33/EscapeCode
        static ContourProperties FishContour(Mat image_raw, Mat background, Point tc, List <ContourProperties> blist, bool control)
        {
// BUG IN HERE IS THAT CONTPROPS HEIGHT GETS SET EVEN WHEN THERE IS NO CONTOUR FOUND. THIS OCCURS BEFORE ENTERING THE LOOP
// BASED ON CONTRPOPS.HEIGHT SIZE. YOU RETURN SOMETHING WITH A HEIGHT BUT NO COORDINATE (0,0) AND THE MAIN LINE THINKS YOU HAVE A CONTOUR AT 0,0.
            bool fishcont_found             = false;
            Size frsize                     = new Size(image_raw.Width, image_raw.Height);
            Mat  image                      = new Mat(frsize, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
            ContourProperties     contprops = new ContourProperties();
            ThresholdType         ttype     = 0;
            VectorOfVectorOfPoint contours  = new VectorOfVectorOfPoint();
            Mat hierarchy                   = new Mat();

            CvInvoke.AbsDiff(image_raw, background, image);
            // This should be 30 as the LB. Switched to 20 to see if i could pick up paramecia.
            CvInvoke.Threshold(image, image, 25, 255, ttype);
            // IF YOU NEED TO SHOW THE THRESHOLDED IMAGE, UNCOMMENT THESE LINES
//            String camerawindow2 = "Camera Window 2";
//          CvInvoke.NamedWindow(camerawindow2);
//        CvInvoke.Imshow(camerawindow2, image);
//      CvInvoke.WaitKey(1);
            CvInvoke.FindContours(image, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone);
            int       fish_contour_index = 0;
            int       height             = 0;
            Point     contourCOM         = new Point();
            Point     contour_center     = new Point();
            Rectangle bounding_rect      = new Rectangle();

            for (int ind = 0; ind < contours.Size; ind++)
            {
                MCvMoments com = new MCvMoments();
                com              = CvInvoke.Moments(contours[ind]);
                contourCOM.X     = (int)(com.M10 / com.M00);
                contourCOM.Y     = (int)(com.M01 / com.M00);
                bounding_rect    = CvInvoke.BoundingRectangle(contours[ind]);
                contour_center.X = (int)(bounding_rect.X + (float)bounding_rect.Width / (float)2);
                contour_center.Y = (int)(bounding_rect.Y + (float)bounding_rect.Height / (float)2);
                if (bounding_rect.Width > bounding_rect.Height)
                {
                    height = bounding_rect.Width;
                }
                else
                {
                    height = bounding_rect.Height;
                }
                if (height < 60 && height > 8)
                {
                    if (image_raw.Width > 1000)
                    {
                        if (!control)
                        {
                            bool tooclose = false;
                            for (int i = 0; i < blist.Count; i++)
                            {
// This allows 3, 4, or 5 to be recorded as a COM center, but would be rejected by Tap
                                if (VectorMag(blist[i].center, contourCOM) - (blist[i].height / 2) < 3)
                                {
                                    tooclose = true;
                                    break;
                                }
                            }
                            if (tooclose)
                            {
                                continue;
                            }
                        }
                        if (VectorMag(contourCOM, tc) > 460)
                        //this tells the algorithm not to look for fish outside the tank.
                        {
                            continue;
                        }
                        if (contourCOM.X < 0 || contourCOM.Y < 0)
                        {
                            continue;
                        }
                    }
                    fish_contour_index = ind;
                    fishcont_found     = true;
                    break;
                }
            }
            if (fishcont_found)
            {
// could also choose the contour center below using the bounding rect
                contprops.com    = contourCOM;
                contprops.height = height;
                contprops.center = contour_center;
            }
            return(contprops);
        }
コード例 #15
0
        private ObjectLayer Execute3rdLevelSegmentation(ObjectLayer l2ndLevel, GrayscaleProcessor gpSobel, GrayscaleProcessor gpH, float targetArea)
        {
            List <Contour> finalContours = new List <Contour>();

            for (int i = 0; i < l2ndLevel.Objects.Count; i++)
            {
                finalContours.Add(l2ndLevel.Objects[i].Contour);
            }

            double[] hBackground = this.GetBackgroundHistogram(l2ndLevel, gpH);
            double[] hForeground = this.GetForegroundHistogram(l2ndLevel, gpH);

            Parallel.For(0, l2ndLevel.Objects.Count, i =>
            {
                ImageObject obj = l2ndLevel.Objects[i];

                ContourProperties cp = ContourProperties.FromContour(obj.Contour);

                obj.Features.Add(new Feature("Area", cp.Area));
            });

            Map map = new Map(gpSobel.Width, gpSobel.Height);

            Parallel.For(0, gpH.Height, dy =>
            {
                for (int dx = 0; dx < gpH.Width; dx++)
                {
                    UInt32 h = gpH[dx, dy];

                    if (hForeground[h] <= hBackground[h])
                    {
                        continue;
                    }

                    UInt32 id = l2ndLevel.Map[dx, dy];

                    if (id != 0)
                    {
                        ImageObject obj = l2ndLevel.Objects.GetObjectById(id);

                        double area = obj.Features["Area"].Value;

                        if (area > 0.33 * targetArea)
                        {
                            continue;
                        }
                    }

                    map[dx, dy] = 0xffffffff;
                }
            });

            ObjectLayer layer = new ConnectedComponentCollector().Execute(map);

            layer = new ContourOptimizer().RemoveNonCompactPixels(layer, 3);

            for (int i = 0; i < layer.Objects.Count; i++)
            {
                finalContours.Add(layer.Objects[i].Contour);
            }

            Contour[] contours = this.Sort(finalContours.ToArray(), gpSobel, gpH, targetArea);

            layer = this.CreateLayer(gpSobel.Width, gpSobel.Height, contours);

            Map finalMap = new Map(layer.Map, false);

            for (int dy = 0; dy < gpH.Height; dy++)
            {
                for (int dx = 0; dx < gpH.Width; dx++)
                {
                    if (l2ndLevel.Map[dx, dy] != 0)
                    {
                        continue;
                    }

                    if (map[dx, dy] != 0)
                    {
                        continue;
                    }

                    finalMap[dx, dy] = 0;
                }
            }

            layer = new ConnectedComponentCollector().Execute(finalMap);

            layer = new ContourOptimizer().RemoveNonCompactPixels(layer, 3);

            //layer=new ConcaveObjectSeparation().Execute(layer, 0.33, true);

            double minArea = Math.Max(0.1 * targetArea, MIN_AREA);

            layer = layer.CreateAbove(obj =>
            {
                float area = ContourProperties.FromContour(obj.Contour).Area;

                return(area > minArea);
            });

            layer = this.RefillContours(layer);

            return(layer);
        }
コード例 #16
0
        private ObjectLayer Execute2ndLevelSegmentation(GrayscaleProcessor gp, GrayscaleProcessor gpSobel, GrayscaleProcessor gpH, float targetArea)
        {
            ContourBasedSegmentation cbs = new ContourBasedSegmentation();

            cbs.CreatePrimarySegmentation(gp, MAX_CONTOURLENGTH);

            cbs.EvaluateContours(c =>
            {
                return(this.GetContourValue(c, gpSobel, gpH, targetArea));
            });

            ObjectLayer layer = cbs.CreateLayer(MIN_CONTOURLENGTH, int.MaxValue);

            layer = layer.CreateAbove(obj =>
            {
                return(this.GetContourGradient(obj, gp) < 0);
            });

            layer = new ContourOptimizer().RemoveNonCompactPixels(layer, 3);

            //layer=new ConcaveObjectSeparation().Execute(layer, 0.33, true);

            double[] hBackground = this.GetBackgroundHistogram(layer, gpH);
            double[] hForeground = this.GetForegroundHistogram(layer, gpH);

            bool isFirst = true;

            ObjectLayer firstStep = null;

            while (true)
            {
                bool removed = false;

                layer = layer.CreateAbove(obj =>
                {
                    int[] hHistogram = this.GetHistogram(obj, gpH);

                    double hRatioForeground = this.GetRatioForeground(hHistogram, hForeground, hBackground);

                    if (hRatioForeground > 0.5)
                    {
                        return(true);
                    }

                    for (int i = 0; i < 256; i++)
                    {
                        int val = hHistogram[i];

                        hForeground[i] -= val;
                        hBackground[i] += val;
                    }

                    removed = true;

                    return(false);
                });

                if (isFirst)
                {
                    firstStep = layer;
                    isFirst   = false;
                }

                if (!removed)
                {
                    break;
                }
            }

            if (layer.Objects.Count == 0)
            {
                layer = firstStep;
            }

            double minArea = Math.Max(0.1 * targetArea, MIN_AREA);

            layer = layer.CreateAbove(obj =>
            {
                float area = ContourProperties.FromContour(obj.Contour).Area;

                return(area >= minArea);
            });

            layer = this.RefillContours(layer);

            return(layer);
        }