Exemplo n.º 1
0
 public ImaqHAL(string sessionName)
 {
     Trace.TraceInformation("Launching IMAQ camera driver...");
     this.session = new ImaqSession(sessionName);
     cameraAttributes.BitDepth         = 12;
     cameraAttributes.FullHeight       = 1024;
     cameraAttributes.FullWidth        = 1344;
     cameraAttributes.Model            = "Hamamatsu Orca ER";
     cameraAttributes.SerialNumber     = session.Attributes[ImaqStandardAttribute.SerialNumber].GetValue().ToString();
     cameraAttributes.SupportedBinning = new BinningMode[] {
         BinningMode.Binning1x1, BinningMode.Binning2x2, BinningMode.Binning4x4, BinningMode.Binning8x8
     };
     binningModeStrings = Array.ConvertAll <BinningMode, string>(cameraAttributes.SupportedBinning,
                                                                 b => string.Format("{0}x{0} Binning", (int)b));
     attrib_cam_string = new AttributeWrapper <string, string>(getAttribute_string,
                                                               session.Attributes.SetCameraAttribute);
     attrib_cam_num = new AttributeWrapper <string, double>(getAttribute_num,
                                                            session.Attributes.SetCameraAttribute);
     attrib_imaq_int = new AttributeWrapper <ImaqStandardAttribute, int>(
         k => (int)session.Attributes[k].GetValue(),
         (k, v) => session.Attributes[k].SetValue(v));
     SettingsDynamic = new VideoSettingsDynamic()
     {
         AnalogGain_dB = 0, AnalogOffset = 0
     };
     streamingProcDelegate     = new ThreadStart(AcqProc);
     frameCopierDelegate       = new Action <NF, VideoFrame>(frameCopier);
     setContrastValuesDelegate = setContrastValues;
 }
Exemplo n.º 2
0
 public virtual void Close()
 {
     if (session != null)
     {
         session.Close();
         session = null;
     }
 }
Exemplo n.º 3
0
 public virtual void Close()
 {
     if (session != null)
     {
         session.Close();
         session = null;
     }
 }
Exemplo n.º 4
0
        static string GetDefaultSessionName()
        {
            var sessions = ImaqSession.EnumerateInterfaces();

            if (sessions.Length == 0)
            {
                throw new ImaqException("No IMAQ cameras detected.  Ensure cables are connected and power is on.");
            }
            return(sessions[0]);
        }
Exemplo n.º 5
0
        public CamDisplay(ImaqSession _sessionForDisplay)
        {
            InitializeComponent();
            //   thisSession = _sessionForDisplay;
            //  imageViewer1.Attach(proxy);
            //ProcessThreadCollection threads;
            //Process.

            /*acquisitionWorker = new System.ComponentModel.BackgroundWorker();
             * acquisitionWorker.DoWork += new DoWorkEventHandler(acquisitionWorker_DoWork);
             * //  Create a session.
             * _session = new ImaqSession(interfaceText);
             * //  Start a Grab acquisition.
             * _session.GrabSetup(true);
             * //  Start the background worker thread.
             * acquisitionWorker.RunWorkerAsync();*/
        }
Exemplo n.º 6
0
        void Initialize(CameraSettings settings)
        {
            session = new ImaqSession(settings.deviceName);

            Framerate = settings.framerate;

            Trace.WriteLine("Framerate: " + Framerate);

            //  Create a buffer collection for the acquisition with the requested
            //  number of images, and configure the buffers to loop continuously.
            bufList = session.CreateBufferCollection(settings.numBuffers);
            for (int i = 0; i < bufList.Count; ++i)
            {
                bufList[i].Command = (i == bufList.Count - 1) ? ImaqBufferCommand.Loop : ImaqBufferCommand.Next;
            }
            //  Configure and start the acquisition.
            session.Acquisition.Configure(bufList);
            session.Acquisition.AcquireAsync();
        }
Exemplo n.º 7
0
        static List<byte[,]> GetImageList(ImaqSession ses, int numframes, int mod)
        {

            int frheight = 1024;
            int frwidth = 1280;
            List<byte[,]> avg_imglist = new List<byte[,]>();
            byte[,] avg_data_2D = new byte[frheight, frwidth];
            uint buff_out = 0;
            ImaqBuffer image = null;
            for (uint i = 0; i < numframes; i++)
            {
                image = ses.Acquisition.Extract((uint)0, out buff_out);
                avg_data_2D = image.ToPixelArray().U8;
                if (i % mod == 0)
                {
                    byte[,] avgimage_2D = new byte[frheight, frwidth];
                    Buffer.BlockCopy(avg_data_2D, 0, avgimage_2D, 0, avg_data_2D.Length);
                    avg_imglist.Add(avgimage_2D);
                }
            }
            return avg_imglist;
        }
Exemplo n.º 8
0
        public AutoTrackerDlg()
        {
            InitializeComponent();

            CameraConfig cfg = new CameraConfig()
            {
                ROIs    = new Point[] { new Point(0, 0) },
                ROISize = new Point(800, 600)
            };

            session = new ImaqSession("img0");

            SetROIs(cfg);
            buflist = session.CreateBufferCollection(40);
            session.Acquisition.Configure(buflist);

            bufNum = 0;
            session.Acquisition.AcquireCompleted += Acquisition_AcquireCompleted;
            session.Acquisition.AcquireAsync();

            session.Start();
        }
Exemplo n.º 9
0
        static void Main(string[] args)
        {
            SerialPort pyboard = new SerialPort("COM6", 115200);
            pyboard.Open();
            pyboard.WriteLine("import paramove\r");
            var options = new DataflowBlockOptions();
            options.BoundedCapacity = 10;
            var pipe_buffer = new BufferBlock<CamData>(options);
            bool foundfish = false;
            int l_or_r = 0; 
            MCvScalar gray = new MCvScalar(128, 128, 128);
            int roidim = 80;
            string camera_id = "img0"; //this is the ID of the NI-IMAQ board in NI MAX. 
            var _session = new ImaqSession(camera_id);

            String camerawindow = "Camera Window";
            CvInvoke.NamedWindow(camerawindow);
            int frameWidth = 1280;
            int frameHeight = 1024;
            uint bufferCount = 3;
            uint buff_out = 0;
            int numchannels = 1;
            ContourProperties fishcontour = new ContourProperties();
            System.Drawing.Size framesize = new System.Drawing.Size(frameWidth, frameHeight);
            System.Drawing.Size roi_size = new System.Drawing.Size(roidim, roidim);
            Mat cvimage = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            Mat modeimage = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            Mat modeimage_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            byte[,] data_2D = new byte[frameHeight, frameWidth];
            byte[,] data_2D_roi = new byte[roidim, roidim];
            byte[,] imagemode = new byte[frameHeight, frameWidth];
            ImaqBuffer image = null;
            List<byte[,]> imglist = new List<byte[,]>();
            ImaqBufferCollection buffcollection = _session.CreateBufferCollection((int)bufferCount, ImaqBufferCollectionType.VisionImage);
            _session.RingSetup(buffcollection, 0, false);
            _session.Acquisition.AcquireAsync();

            imglist = GetImageList(_session, 5000, 400);
            imagemode = FindMode(imglist);
            modeimage.SetTo(imagemode);
            imglist.Clear();
            CvInvoke.Imshow(camerawindow, modeimage);
            CvInvoke.WaitKey(0);
            Point f_center = new Point();
            Mat cv_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            image = _session.Acquisition.Extract((uint)0, out buff_out);
            uint j = buff_out;
            Console.WriteLine("j followed by buff_out");
            Console.WriteLine(j.ToString());
            Console.WriteLine(buff_out.ToString());
            while (true)
            {
                image = _session.Acquisition.Extract(j, out buff_out);
                data_2D = image.ToPixelArray().U8;
                cvimage.SetTo(data_2D);
        
                if (foundfish)
                {
                    modeimage_roi.SetTo(SliceROI(imagemode, f_center.X, f_center.Y, roidim));
                    data_2D_roi = SliceROI(data_2D, f_center.X, f_center.Y, roidim);
                    cv_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
                    cv_roi.SetTo(data_2D_roi);
                    fishcontour = FishContour(cv_roi, modeimage_roi);
                    if (fishcontour.height != 0)
                    {
                        f_center.X = (int)fishcontour.center.X + f_center.X - roidim / 2;  // puts ROI coords into full frame coords
                        f_center.Y = (int)fishcontour.center.Y + f_center.Y - roidim / 2;
                    }

                    else
                    {
                        foundfish = false;
                    }
                }
                if (!foundfish)                
                {
                    fishcontour = FishContour(cvimage, modeimage);
                    if (fishcontour.height != 0)
                    {
                        f_center.X = (int)fishcontour.center.X;
                        f_center.Y = (int)fishcontour.center.Y;
//                        foundfish = true;
                        data_2D_roi = SliceROI(data_2D, f_center.X, f_center.Y, roidim);
                        cv_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
                        cv_roi.SetTo(data_2D_roi);                        
                    }
                    else
                    {
                        foundfish = false;
                        cv_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
                        cv_roi.SetTo(gray); //in movie indicates that program lost the fish on this frame
                   
                        if (j % 25 == 0)
                        {
                            CvInvoke.Imshow(camerawindow, cvimage);
                            CvInvoke.WaitKey(1);
                            Console.WriteLine("Missed Fish");
                            Console.WriteLine(fishcontour.height);
                        }
                        j = buff_out + 1;
                        continue; 
                    }
                }

                if (fishcontour.com.Y > fishcontour.center.Y)
                {
//                   pyboard.WriteLine("paramove.pull_up()\r");
                    l_or_r = 1;

                }
                else if (fishcontour.com.Y < fishcontour.center.Y)
                {
// pyboard.WriteLine("paramove.pull_down()\r");
                    l_or_r = 0;
                }
                // PROBABLY MAKE THIS SO IT DOESNT DRAW DURING A STIMULUS
                if (j % 25 == 0)
                {
                    if (l_or_r == 0)
                    {
                        pyboard.WriteLine("paramove.pull_up()\r");
                        CvInvoke.Circle(cvimage, new Point(f_center.X, f_center.Y), 20, new MCvScalar(0, 0, 0));
//                        CvInvoke.Circle(cvimage, new Point(f_center.X - roidim / 2 + fish_head.X, f_center.Y - roidim / 2 + fish_head.Y), 4, new MCvScalar(255,0,0));
                        Console.WriteLine(fishcontour.height);
                    }
                    else if (l_or_r == 1)
                    {
                        pyboard.WriteLine("paramove.pull_down()\r");
                        CvInvoke.Circle(cvimage, new Point(f_center.X, f_center.Y), 20, new MCvScalar(255, 0, 0));
                        Console.WriteLine(fishcontour.height);
                    }
                  //  CvInvoke.Imshow(camerawindow, cvimage);
                  //  CvInvoke.WaitKey(1);
                }
                j = buff_out + 1;
            }


        }
Exemplo n.º 10
0
        static void Main(string[] args)
        {
            // Note that if you want to do halfmoon or stonehenge trials, place halfmoon and stonehenge in the center of the tank.
            // Fill their center with a barrier for the first mode. Then take the barrier out and take the mode again. Use the smallest barrier possible (so the fish can get close to the center) and, like in nb trials, get rid of the tracking restriction on barriers

            var options = new DataflowBlockOptions();

            options.BoundedCapacity = 10;
            var   pipe_buffer = new BufferBlock <CamData>(options);
            Point tank_center = new Point
            {
                X = 640,
                Y = 512,
            };
            int roidim = 80;

            string camera_id         = "img0"; //this is the ID of the NI-IMAQ board in NI MAX.
            var    _session          = new ImaqSession(camera_id);
            bool   reuse_background  = false;
            bool   drew_barriers     = false;
            bool   halfmoon          = false;
            bool   stonehenge        = false;
            bool   minefield         = false;
            bool   minefield_control = false;

            Console.WriteLine("Enter FishID   ");
            String fishid         = Console.ReadLine();
            String home_directory = "C:/Users/Deadpool/Desktop/Results/";
            String exp_directory  = home_directory + fishid;
            bool   exists_already = System.IO.Directory.Exists(exp_directory);

            if (!exists_already)
            {
                System.IO.Directory.CreateDirectory(exp_directory);
            }
            else
            {
                Console.WriteLine("Directory Already Exists. Overrite?  ");
                String overwrite = Console.ReadLine();
                if (overwrite == "y")
                {
                    System.IO.Directory.CreateDirectory(exp_directory);
                }
                else if (overwrite == "c")
                {
                }
                else
                {
                    Environment.Exit(0);
                }
            }
            Console.WriteLine("Enter Light X Location  ");
            String lightloc_X = Console.ReadLine();

            Console.WriteLine("Enter Light Y Location  ");
            String lightloc_Y       = Console.ReadLine();
            int    light_location_X = Convert.ToInt32(lightloc_X) - 25;
            int    light_location_Y = Convert.ToInt32(lightloc_Y);

            Console.WriteLine("Enter Experiment Type  ");
            String exp_string = Console.ReadLine();

            Console.WriteLine("Use old background?  ");
            String reuse = Console.ReadLine();

            if (reuse == "y")
            {
                reuse_background = true;
            }
            if (exp_string == "n" || exp_string == "t" || exp_string == "v")
            {
                minefield_control = true;
            }
            else if (exp_string == "b")
            {
                minefield = true;
            }
            String camerawindow = "Camera Window";

            CvInvoke.NamedWindow(camerawindow);
            int  frameWidth  = 1280;
            int  frameHeight = 1024;
            uint bufferCount = 3;
            // Could try changing this to 2 or 100
            // Checked and there is no card memory. It makes a buffer on system mem. Tried increasing virtual memory so
            // HD can be used as RAM. Allocated an additional 32 GB to virtual mem.
            uint      buff_out    = 0;
            int       numchannels = 1;
            MCvScalar gray        = new MCvScalar(128, 128, 128);
            List <ContourProperties> barrierlist         = new List <ContourProperties>();
            ContourProperties        fishcontour         = new ContourProperties();
            ContourProperties        fishcontour_correct = new ContourProperties();
            ContourProperties        barrier             = new ContourProperties();

            System.Drawing.Size framesize = new System.Drawing.Size(frameWidth, frameHeight);
            System.Drawing.Size roi_size  = new System.Drawing.Size(roidim, roidim);
            Mat cvimage = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            Mat modeimage_barrier_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            Mat modeimage             = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            //            Mat modeimage_barrier = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            Mat            maxproj_cv = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
            AutoResetEvent event1     = new AutoResetEvent(true);
            AutoResetEvent event2     = new AutoResetEvent(false);
            MCvMoments     COM        = new MCvMoments();

            byte[,] data_2D             = new byte[frameHeight, frameWidth];
            byte[,] data_2D_roi         = new byte[roidim, roidim];
            byte[,] imagemode_nobarrier = new byte[frameHeight, frameWidth];
            byte[,] maxprojimage        = new byte[frameHeight, frameWidth];
            ImaqBuffer           image          = null;
            ImaqBufferCollection buffcollection = _session.CreateBufferCollection((int)bufferCount, ImaqBufferCollectionType.VisionImage);

            _session.RingSetup(buffcollection, 0, false);
            _session.Acquisition.AcquireAsync();
            RecordAndStim experiment = new RecordAndStim(event1, event2, pipe_buffer, exp_string);

            experiment.experiment_directory = exp_directory;
            var stimthread = new Thread(experiment.StartStim);

            stimthread.Start();

            // THIS GRABS THE MODE FOR THE TANK IN GENERAL BEFORE ALIGNMENT

            if (!experiment.alignment_complete)
            {
                CvInvoke.WaitKey(0);
                imglist      = GetImageList(_session, 500, 10);
                maxprojimage = FindMaxProjection(imglist);
                maxproj_cv.SetTo(maxprojimage);
                imglist.Clear();
                CvInvoke.Imshow(camerawindow, maxproj_cv);
                CvInvoke.WaitKey(0);
            }

            // IF CAMERA IS NOT YET ALIGNED TO THE PROJECTOR, THIS LOOP FINDS THE LOCATION OF THE CALIBRATION CONTOUR THE EXPERIMENT CLASS IS PLACING ON THE PROJECTOR.

            experiment.start_align = true;
            if (!experiment.alignment_complete)
            {
                while (!experiment.alignment_complete)
                {
                    imglist = GetImageList(_session, 500, 10);
                    data_2D = FindMaxProjection(imglist);
                    cvimage.SetTo(data_2D);
                    Console.WriteLine("Finding Largest Contour");
                    experiment.projcenter_camcoords = LargestContour(cvimage, maxproj_cv, true).center;
                    CvInvoke.Imshow(camerawindow, cvimage);
                    CvInvoke.WaitKey(1);
                    event2.Set();
                    event1.WaitOne();
                }
                imglist.Clear();
                CvInvoke.WaitKey(0);
                imglist = GetImageList(_session, 500, 10);
                data_2D = FindMaxProjection(imglist);
                cvimage.SetTo(data_2D);
                experiment.tankwidth = LargestContour(cvimage, maxproj_cv, true).height * 2;
                Console.WriteLine("Width Of Tank Contour");
                Console.WriteLine(experiment.tankwidth);
                CvInvoke.Imshow(camerawindow, cvimage);
                CvInvoke.WaitKey(0);
                imglist.Clear();
            }

            // Next, the opposite thread is going to display a black circle that is the same size as the tank. Do a max projection on this
            // contour in order to measure width of the tank in projector coordinates.


            // Now you've put the IR filter back over the camera and are ready to do an experiment.
            // Get mode of image with no barrier present so you can background subtract and find the barriers and fish.
            imglist.Clear();
            if (reuse_background)
            {
                modeimage = CvInvoke.Imread(home_directory + "/background_nobar" + exp_string + ".tif", 0);
            }
            else
            {
                imglist             = GetImageList(_session, 5000, 400);
                imagemode_nobarrier = FindMode(imglist);
                modeimage.SetTo(imagemode_nobarrier);
                imglist.Clear();
                CvInvoke.Imshow(camerawindow, modeimage);
                CvInvoke.WaitKey(0);
            }

            // Here you have just added barriers to the tank. Now get a new mode that contains the barriers for use in background subtraction to find fish
            // and for localizing barriers.

            if (halfmoon || stonehenge || minefield)
            {
                imglist = GetImageList(_session, 5000, 400);
                if (reuse_background)
                {
                    modeimage_barrier = CvInvoke.Imread(home_directory + "/background_" + exp_string + ".tif", 0);
                }
                else
                {
                    imagemode = FindMode(imglist);
                    modeimage_barrier.SetTo(imagemode);
                }

                modeimage_barrier.Save(exp_directory + "/background_" + exp_string + ".tif");
                imglist.Clear();
                barrierlist = BarrierLocations(modeimage_barrier, modeimage);
                for (int ind = 0; ind < barrierlist.Count; ind++)
                {
                    experiment.barrier_position_list.Add(barrierlist[ind].center);
                    experiment.barrier_radius_list.Add(barrierlist[ind].height / 2);
                }
            }
            else if (minefield_control)
            {
                modeimage_barrier.SetTo(imagemode_nobarrier);
                modeimage_barrier.Save(exp_directory + "/background_" + exp_string + ".tif");

                barrierlist = GenerateVirtualBarriers(experiment.tankwidth, tank_center.X, tank_center.Y);
                for (int ind = 0; ind < barrierlist.Count; ind++)
                {
                    experiment.barrier_position_list.Add(barrierlist[ind].center);
                    experiment.barrier_radius_list.Add(barrierlist[ind].height / 2);
                }
            }

            using (StreamWriter barrierfile = new StreamWriter(exp_directory + "/barrierstruct_" + exp_string + ".txt"))
            {
                for (int bar = 0; bar < barrierlist.Count; bar++)
                {
                    if (bar == 0)
                    {
                        barrierfile.WriteLine(experiment.templatewidth.ToString());
                        barrierfile.WriteLine(experiment.tankwidth.ToString());
                    }
                    barrierfile.WriteLine(barrierlist[bar].center.ToString());
                    barrierfile.WriteLine(barrierlist[bar].height.ToString());
                }
            }

            CvInvoke.Imshow(camerawindow, modeimage_barrier);
            CvInvoke.WaitKey(0);


            if (halfmoon) //THIS IS BECAUSE YOU TAKE THE BARRIER AWAY AFTER IT FINDS THE HOLE. IE FOR HALFMOON TRIALS, YOU FIRST KEEP THE HALFMOON THERE FOR MODEIMAGE, THEN ADD A BARRIER THE SIZE OF THE HOLE FOR FINDING OF THE HOLE OF THE BARRIER. IF YOU WANT TO RUN STONEHENGE OR HALFMOON, DECLARE MINEFIELD_CONTROL AS TRUE, but don't draw barriers.
            {
                modeimage_barrier = modeimage;
                imagemode         = imagemode_nobarrier;
            }


            // IMAGE ACQUISITION AND FISH FINDING.
            //            Idea is to first acquire the image and turn it into a cvimage matrix. find the fish by finding the largest contour on a background subtracted and thresholded image (LargestContour function).  Each time you find the fish, store its coords so you can just search within a small ROI on the next frame. If you lose the fish, go back out to full frame and find it again.
            Point f_center = new Point();
            Mat   cv_roi   = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);

            image = _session.Acquisition.Extract((uint)0, out buff_out);
            uint j = buff_out;
            int  experiment_phase = 0;
            int  xycounter        = 0;

            Console.WriteLine("j followed by buff_out");
            Console.WriteLine(j.ToString());
            Console.WriteLine(buff_out.ToString());
            List <Point> coordlist   = new List <Point>();
            List <int>   phasebounds = new List <int>();

            while (true)
            {
                if (mode_reset.WaitOne(0))
                {
                    Console.WriteLine("Clearing Imagelist");
                    imglist.Clear();
                    mode_reset.Reset();
                }
                image = _session.Acquisition.Extract(j, out buff_out);
                try
                {
                    data_2D = image.ToPixelArray().U8;
                }
                catch (NationalInstruments.Vision.VisionException e)
                {
                    Console.WriteLine(e);
                    continue;
                }

                byte[] stim_pixel_readout = new byte[100];
                for (int pix = 0; pix < 100; pix++)
                {
                    stim_pixel_readout[pix] = data_2D[light_location_Y, light_location_X + pix];
                }
                cvimage.SetTo(data_2D);
                fishcontour = FishContour(cvimage, modeimage_barrier, tank_center, barrierlist, minefield_control);

                // com makes sure that the head is near the barrier.
                if (fishcontour.height != 0)
                {
                    fishcontour_correct = fishcontour;
                    f_center.X          = fishcontour.com.X;
                    f_center.Y          = fishcontour.com.Y;
                }
                if (!experiment.stim_in_progress)
                {
                    drew_barriers = false;
                }
                if (experiment.stim_in_progress && !drew_barriers)
                {
                    if (halfmoon || stonehenge || minefield || minefield_control)
                    {
                        for (int ind = 0; ind < barrierlist.Count; ind++)
                        {
                            CvInvoke.Circle(cvimage, barrierlist[ind].center, barrierlist[ind].height / 2, new MCvScalar(255, 0, 0), 1);
                        }
                    }
                    Image <Gray, Byte> d2d = cvimage.ToImage <Gray, Byte>();
                    data_2D_roi   = SliceROIImage(d2d, f_center.X, f_center.Y, roidim);
                    drew_barriers = true;
                }
                else
                {
                    data_2D_roi = SliceROI(data_2D, f_center.X, f_center.Y, roidim);
                }
                cv_roi = new Mat(roi_size, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);
                cv_roi.SetTo(data_2D_roi);

                CamData camdat = new CamData(cv_roi, f_center, fishcontour_correct, buff_out, j, stim_pixel_readout);
                pipe_buffer.Post(camdat);
                if (j % 10 == 0)
                {
                    xycounter++;
                    coordlist.Add(camdat.fishcoord);
                    if (experiment.experiment_phase > experiment_phase)
                    {
                        experiment_phase = experiment.experiment_phase;
                        phasebounds.Add(xycounter);
                    }
                }
                if (j % 100 == 0 && !experiment.stim_in_progress)
                {
                    //    CvInvoke.Circle(cvimage, fishcontour_correct.center, 2,new MCvScalar(255, 255, 0));
                    CvInvoke.Circle(cvimage, fishcontour_correct.com, 2, new MCvScalar(255, 255, 255));
                    if (halfmoon || stonehenge || minefield || minefield_control)
                    {
                        for (int ind = 0; ind < barrierlist.Count; ind++)
                        {
                            CvInvoke.Circle(cvimage, barrierlist[ind].center, barrierlist[ind].height / 2, new MCvScalar(255, 0, 0), 3);
                        }
                    }
                    else
                    {
                        CvInvoke.Circle(cvimage, experiment.barrier_center, barrier.height / 2, new MCvScalar(255, 0, 0), 3);
                    }
                    CvInvoke.Imshow(camerawindow, cvimage);
                    CvInvoke.WaitKey(1);
                    if (j % 1000 == 0)
                    {
                        byte[,] mode_frame = new byte[frameHeight, frameWidth];
                        Buffer.BlockCopy(data_2D, 0, mode_frame, 0, data_2D.Length);
                        imglist.Add(mode_frame);
                        if (imglist.LongCount() == 40)
                        {
                            var modethread = new Thread(() => ModeWrapper(imglist, mode_reset, experiment, exp_directory));
                            modethread.Start();
                        }
                    }
                }
                if (experiment.experiment_complete)
                {
                    break;
                }

                j = buff_out + 1;
            }
            modeimage_barrier.Save(home_directory + "/background_" + exp_string + ".tif");
            modeimage.Save(home_directory + "/background_nobar" + exp_string + ".tif");
            string experiment_string   = exp_directory + "/all_xycoords_" + exp_string + ".txt";
            string phasestring         = exp_directory + "/phase_" + exp_string + ".txt";
            string numframes_gray      = exp_directory + "/numframesgray_" + exp_string + ".txt";
            string numframes_gray_dark = exp_directory + "/numframesgray_dark.txt";

            using (StreamWriter sr = new StreamWriter(experiment_string))
            {
                foreach (Point fishpoint in coordlist)
                {
                    sr.WriteLine(fishpoint.ToString());
                }
            }
            using (StreamWriter sr = new StreamWriter(phasestring))
            {
                foreach (int phase in phasebounds)
                {
                    sr.WriteLine(phase.ToString());
                }
            }
            using (StreamWriter sr = new StreamWriter(numframes_gray))
            {
                foreach (int ng in experiment.num_grayframes)
                {
                    sr.WriteLine(ng.ToString());
                }
            }
            if (exp_string == "b")
            {
                using (StreamWriter sr = new StreamWriter(numframes_gray_dark))
                {
                    foreach (int ngd in experiment.num_grayframes_d)
                    {
                        sr.WriteLine(ngd.ToString());
                    }
                }
            }
        }
Exemplo n.º 11
0
        //static int max_interval = 50000;

        static void Main(string[] args)
        {
            // set up pyboard for shock control
            pyboard.Open();
            pyboard.WriteLine("import para\r");
            pyboard2.Open();
            pyboard2.WriteLine("import shockpara\r");

            // set up random number generator
            var rand = new Random();
            // set up video camera
            var options = new DataflowBlockOptions();

            options.BoundedCapacity = 10;
            MCvScalar gray        = new MCvScalar(128, 128, 128);
            string    camera_id   = "img0"; //this is the ID of the NI-IMAQ board in NI MAX.
            var       _session    = new ImaqSession(camera_id);
            var       jlist       = new List <uint>();
            int       frameWidth  = 1280;
            int       frameHeight = 1024;
            uint      bufferCount = 3;
            uint      buff_out    = 0;
            int       numchannels = 1;
            Size      framesize   = new Size(frameWidth, frameHeight);
            Mat       cvimage     = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, numchannels);

            byte[,] data_2D = new byte[frameHeight, frameWidth];
            Console.WriteLine("Please Enter Experiment ID");
            string        exp_id = Console.ReadLine();
            DirectoryInfo di     = Directory.CreateDirectory("E:/ParaBehaviorData/" + exp_id);

            Console.WriteLine("Please Enter Condition (1=experiment, 2=control)");
            int cond = Convert.ToInt32(Console.ReadLine());

            VideoWriter          camvid         = new VideoWriter("E:/ParaBehaviorData/" + exp_id + "/" + exp_id + ".AVI", 0, 100, framesize, false);
            string               logpath        = "E:/ParaBehaviorData/" + exp_id + "/" + exp_id + "_log.txt";
            ImaqBuffer           image          = null;
            ImaqBufferCollection buffcollection = _session.CreateBufferCollection((int)bufferCount, ImaqBufferCollectionType.VisionImage);

            _session.RingSetup(buffcollection, 0, false);
            _session.Acquisition.AcquireAsync();
            uint j = buff_out;

            // experiment parameters
            int    trials        = 10;    // number of trials
            int    ISI           = 2000;  // interstimulus interval (between CS onset and US onset)
            int    US_dur        = 2000;  // US (shock) duration (msec)
            int    CS_dur        = 4000;  // CS (tone) duration (msec)
            int    CS_intensity  = 80;    // duty cycle % for CS pulse width
            int    min_interval  = 7000;  // minimum ITI
            double mean_interval = 15000; // mean interval between CS onsets

            // write experiment parameters to file
            string[] lines   = { "ISI: " + Convert.ToString(ISI), "US_dur: " + Convert.ToString(US_dur), "CS_dur: " + Convert.ToString(CS_dur), "CS_intensity: " + Convert.ToString(CS_intensity), "mean_interval: " + Convert.ToString(mean_interval), "min_interval: " + Convert.ToString(min_interval), "cond: " + Convert.ToString(cond), "trials: " + Convert.ToString(trials) };
            string   docpath = "E:/ParaBehaviorData/" + exp_id + "/" + exp_id + "_param.txt";

            using (StreamWriter outputFile = new StreamWriter(docpath))
            {
                foreach (string line in lines)
                {
                    outputFile.WriteLine(line);
                }
            }

            int[] CS_times = new int[trials];
            int[] US_times = new int[trials];
            CS_times[0] = ExpRnd(mean_interval, rand) + 1000;
            for (int i = 1; i < trials; i++)
            {
                CS_times[i] = CS_times[i - 1] + ExpRnd(mean_interval, rand) + min_interval;
            }
            int experiment_duration = CS_times[trials - 1] + 10000;

            Console.WriteLine(experiment_duration);
            switch (cond)
            {
            case 1:
                for (int i = 0; i < trials; i++)
                {
                    US_times[i] = CS_times[i] + ISI;
                }
                break;

            case 2:
                US_times[0] = ExpRnd(mean_interval, rand) + 1000;
                for (int i = 1; i < trials; i++)
                {
                    US_times[i] = US_times[i - 1] + ExpRnd(mean_interval, rand) + min_interval;
                }
                break;
            }

            // write event times to disk
            File.WriteAllLines("E:/ParaBehaviorData/" + exp_id + "/" + exp_id + "_CS_times.txt", CS_times.Select(tb => tb.ToString()));
            File.WriteAllLines("E:/ParaBehaviorData/" + exp_id + "/" + exp_id + "_US_times.txt", US_times.Select(tb => tb.ToString()));

            var tonethread = new Thread(() => PlayCS(CS_dur, CS_intensity, CS_times));

            tonethread.Start();
            var shockthread = new Thread(() => ShockPara(US_dur, US_times));

            shockthread.Start();

            experiment_timer.Start();

            while (true)
            {
                if (experiment_timer.ElapsedMilliseconds > experiment_duration)
                {
                    Console.WriteLine(Convert.ToString(experiment_timer.ElapsedMilliseconds));
                    camvid.Dispose();
                    // Disconnect the camera
                    CvInvoke.DestroyAllWindows();
                    using (StreamWriter logfile = new StreamWriter(logpath))
                    {
                        for (int jind = 0; jind < jlist.Count; jind++)
                        {
                            logfile.WriteLine(jlist[jind].ToString());
                        }
                    }
                    break;
                }
                // write images to file
                image   = _session.Acquisition.Extract(j, out buff_out);
                data_2D = image.ToPixelArray().U8;
                cvimage.SetTo(data_2D);
                camvid.Write(cvimage);
                jlist.Add(j);

                j = buff_out + 1;
            }
        }
Exemplo n.º 12
0
        private void Cleanup()
        {
#if ACQDATA
            if (_session != null)
            {
                // Close the session.
                _session.Close();
                _session = null;
            }
#endif
            //  Update the UI.
            startButton.Enabled = true;
            stopButton.Enabled = false;
            interfaceTextBox.Enabled = true;
            numImages.Enabled = true;
            volumeDepthTextBox.Enabled = true;
            thresholdDeltaVoltageTextBox.Enabled = true;
        }
Exemplo n.º 13
0
        private void startButton_Click(object sender, EventArgs e)
        {
            try
            {
                //  Update the UI.
                startButton.Enabled = false;
                stopButton.Enabled = true;
                bufNumTextBox.Text = "";
                //pixelValTextBox.Text = "";
                interfaceTextBox.Enabled = false;
                numImages.Enabled = false;
                volumeDepthTextBox.Enabled = false;
                thresholdDeltaVoltageTextBox.Enabled = false;
#if ACQDATA
                // TODO: Params from UI
                // Create a new task
                myTask = new Task();
                physicalChannelText = "Dev1/ai0";
                minimumValue = -10.00;
                maximumValue = 10.00;
                rateValue = 10000.00;
                samplesPerChannelValue = 1000;

                // Create a virtual channel
                myTask.AIChannels.CreateVoltageChannel(physicalChannelText, "",
                    (AITerminalConfiguration)(-1), Convert.ToDouble(minimumValue),
                    Convert.ToDouble(maximumValue), AIVoltageUnits.Volts);

                analogInReader = new AnalogMultiChannelReader(myTask.Stream);

                // Verify the Task
                myTask.Control(TaskAction.Verify);

                //  Create a session.
                _session = new ImaqSession(interfaceTextBox.Text);

                //  Configure the image viewer.
                displayImage = new VisionImage((ImageType)_session.Attributes[ImaqStandardAttribute.ImageType].GetValue());
                imageViewer.Attach(displayImage);

                //  Create a buffer collection for the acquisition with the requested
                //  number of images, and configure the buffers to loop continuously.
                int numberOfImages = (int)numImages.Value;
                bufList = _session.CreateBufferCollection(numberOfImages, ImaqBufferCollectionType.PixelValue2D);
                for (int i = 0; i < bufList.Count; ++i)
                {
                    bufList[i].Command = (i == bufList.Count - 1) ? ImaqBufferCommand.Loop : ImaqBufferCommand.Next;
                }

                //  Configure and start the acquisition.
                _session.Acquisition.Configure(bufList);
                _session.Acquisition.AcquireAsync();

                _thresholdDeltaVoltage = Convert.ToDouble(thresholdDeltaVoltageTextBox.Text);
                _volumeDepth = Convert.ToInt32(volumeDepthTextBox.Text);
#endif

                RenderUIArgs renderUIArgs;
                renderUIArgs.rotxTextBox = rotxTextBox;
                renderUIArgs.rotyTextBox = rotyTextBox;
                renderUIArgs.rotzTextBox = rotzTextBox;
                renderUIArgs.transxTextBox = transxTextBox;
                renderUIArgs.transyTextBox = transyTextBox;
                renderUIArgs.transzTextBox = transzTextBox;
                renderUIArgs.densityTextBox = densityTextBox;
                renderUIArgs.brightnessTextBox = brightnessTextBox;
                renderUIArgs.transoffsetTextBox = transoffsetTextBox;
                renderUIArgs.transscaleTextBox = transscaleTextBox;
                renderUIArgs.linfilterCheckBox = linfilterCheckBox;

                //  Start the background worker threads
                acquisitionWorker.RunWorkerAsync(subCheckBox);
                renderWorker.RunWorkerAsync(renderUIArgs);
            }
            catch (ImaqException ex)
            {
                MessageBox.Show(ex.Message, "NI-IMAQ Error");
                Cleanup();
            }
            catch (FormatException ex)
            {
                MessageBox.Show(ex.Message, "Format Error");
                Cleanup();
            }
        }