private ManagedImage temp; // an temporary image for storage of ca[ture

        #endregion Fields

        #region Constructors

        // constructir
        public ManagedImageRollingBuffer(int maxlen)
        {
            int i;
            MAX_LEN = maxlen;
            Buffer = new ManagedImage[MAX_LEN];
            // pre-allocating buffers
            for (i = 0; i < MAX_LEN; i++)
                Buffer[i] = new ManagedImage();

            first = last = -1;
            BufSemaphore = new Semaphore(1, 1); // single access at a time

            temp = new ManagedImage(); // initialize the temporary image
        }
Exemplo n.º 2
0
        void RunSingleCamera(ManagedPGRGuid guid)
        {
            const int k_numImages = 10;

            ManagedGigECamera cam = new ManagedGigECamera();

            // Connect to a camera
            cam.Connect(guid);

            // Get the camera information
            CameraInfo camInfo = cam.GetCameraInfo();
            PrintCameraInfo(camInfo);

            uint numStreamChannels = cam.GetNumStreamChannels();
            for (uint i=0; i < numStreamChannels; i++)
            {
                PrintStreamChannelInfo(cam.GetGigEStreamChannelInfo(i));
            }

            GigEImageSettingsInfo imageSettingsInfo = cam.GetGigEImageSettingsInfo();

            GigEImageSettings imageSettings = new GigEImageSettings();
            imageSettings.offsetX = 0;
            imageSettings.offsetY = 0;
            imageSettings.height = imageSettingsInfo.maxHeight;
            imageSettings.width = imageSettingsInfo.maxWidth;
            imageSettings.pixelFormat = PixelFormat.PixelFormatMono8;

            cam.SetGigEImageSettings(imageSettings);

            // Get embedded image info from camera
            EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo();

            // Enable timestamp collection
            if (embeddedInfo.timestamp.available == true)
            {
                embeddedInfo.timestamp.onOff = true;
            }

            // Set embedded image info to camera
            cam.SetEmbeddedImageInfo(embeddedInfo);

            // Start capturing images
            cam.StartCapture();

            ManagedImage rawImage = new ManagedImage();
            for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++)
            {
                // Retrieve an image
                cam.RetrieveBuffer(rawImage);

                // Get the timestamp
                TimeStamp timeStamp = rawImage.timeStamp;

                Console.WriteLine(
                   "Grabbed image {0} - {1} {2} {3}",
                   imageCnt,
                   timeStamp.cycleSeconds,
                   timeStamp.cycleCount,
                   timeStamp.cycleOffset);

                // Create a converted image
                ManagedImage convertedImage = new ManagedImage();

                // Convert the raw image
                rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage);

                // Create a unique filename
                string filename = String.Format(
                   "GigEGrabEx_CSharp-{0}-{1}.bmp",
                   camInfo.serialNumber,
                   imageCnt);

                // Get the Bitmap object. Bitmaps are only valid if the
                // pixel format of the ManagedImage is RGB or RGBU.
                System.Drawing.Bitmap bitmap = convertedImage.bitmap;

                // Save the image
                bitmap.Save(filename);
            }

            // Stop capturing images
            cam.StopCapture();

            // Disconnect the camera
            cam.Disconnect();
        }
Exemplo n.º 3
0
        static void Main(string[] args)
        {
            PrintBuildInfo();

            const int k_numImages = 10;
            bool useSoftwareTrigger = true;

            ManagedBusManager busMgr = new ManagedBusManager();
            uint numCameras = busMgr.GetNumOfCameras();

            Console.WriteLine("Number of cameras detected: {0}", numCameras);

            ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0);

            ManagedCamera cam = new ManagedCamera();

            cam.Connect(guid);

            // Get the camera information
            CameraInfo camInfo = cam.GetCameraInfo();

            PrintCameraInfo(camInfo);

            if (!useSoftwareTrigger)
            {
                // Check for external trigger support
                TriggerModeInfo triggerModeInfo = cam.GetTriggerModeInfo();
                if (triggerModeInfo.present != true)
                {
                    Console.WriteLine("Camera does not support external trigger! Exiting...\n");
                    return;
                }
            }

            // Get current trigger settings
            TriggerMode triggerMode = cam.GetTriggerMode();

            // Set camera to trigger mode 0
            // A source of 7 means software trigger
            triggerMode.onOff = true;
            triggerMode.mode = 0;
            triggerMode.parameter = 0;

            if (useSoftwareTrigger)
            {
                // A source of 7 means software trigger
                triggerMode.source = 7;
            }
            else
            {
                // Triggering the camera externally using source 0.
                triggerMode.source = 0;
            }

            // Set the trigger mode
            cam.SetTriggerMode(triggerMode);

            // Poll to ensure camera is ready
            bool retVal = PollForTriggerReady(cam);
            if (retVal != true)
            {
                return;
            }

            // Get the camera configuration
            FC2Config config = cam.GetConfiguration();

            // Set the grab timeout to 5 seconds
            config.grabTimeout = 5000;

            // Set the camera configuration
            cam.SetConfiguration(config);

            // Camera is ready, start capturing images
            cam.StartCapture();

            if (useSoftwareTrigger)
            {
                if (CheckSoftwareTriggerPresence(cam) == false)
                {
                    Console.WriteLine("SOFT_ASYNC_TRIGGER not implemented on this camera!  Stopping application\n");
                    return;
                }
            }
            else
            {
                Console.WriteLine("Trigger the camera by sending a trigger pulse to GPIO%d.\n",
                  triggerMode.source);
            }

            ManagedImage image = new ManagedImage();
            for (int iImageCount = 0; iImageCount < k_numImages; iImageCount++)
            {
                if (useSoftwareTrigger)
                {

                    // Check that the trigger is ready
                    retVal = PollForTriggerReady(cam);

                    Console.WriteLine("Press the Enter key to initiate a software trigger.\n");
                    Console.ReadLine();

                    // Fire software trigger
                    retVal = FireSoftwareTrigger(cam);
                    if (retVal != true)
                    {
                        Console.WriteLine("Error firing software trigger!");
                        return;
                    }
                }

                // Grab image
                cam.RetrieveBuffer(image);

                Console.WriteLine(".\n");
            }

            Console.WriteLine("Finished grabbing images");

            // Stop capturing images
            cam.StopCapture();

            // Turn off trigger mode
            triggerMode.onOff = false;
            cam.SetTriggerMode(triggerMode);

            // Disconnect the camera
            cam.Disconnect();

            Console.WriteLine("Done! Press any key to exit...");
            Console.ReadKey();
        }
        // start capturing
        public void startCapture(ManagedPGRGuid camGuid, int vidMode,System.Windows.Forms.PictureBox displayPicture, 
                                 String fileName,Boolean record2file)
        {
            int i;

            Flag_GravityFound_Y = false; // garvity is not known

            // CLEARING THE FRAME QUEUE NO MATTER WHAT...
            FrameQueue.clear();

            RecordToFile = record2file;

            // creating the GPS data list
            GpsCaptureData = new List<GPSDataInstance>();
            // creating the IMU data List
            IMUCapturedata = new List<IMUDataInstance>();

            // resetting frame index
            FrameIndex = 0;

            // 1. connect to the camera
            Cam.Connect(camGuid);

            int fps_i = 0;
            if (vidMode == 0)
            {
                Cam.SetVideoModeAndFrameRate(VideoMode.VideoMode1600x1200Yuv422, FrameRate.FrameRate30);
                fps_i = 30;
            }
            else if (vidMode == 1) {
                Cam.SetVideoModeAndFrameRate(VideoMode.VideoMode1600x1200Rgb, FrameRate.FrameRate15);
                fps_i = 15;
                }
            else if (vidMode == 2)
            {
                Format7ImageSettings fset = new Format7ImageSettings();
                fset.height = 540;
                fset.width = 960;
                fset.offsetX = 40;
                fset.offsetY = 118;
                fset.mode = Mode.Mode4;

                fset.pixelFormat = PixelFormat.PixelFormatRgb8;

                Cam.SetFormat7Configuration(fset, 40.0f); // this equivalent to 24 fps

                fps_i = 24;
            }

            if (RecordToFile)
            {
                // 3. Creating the avi recorder object
                AviRecorder = new ManagedAVIRecorder();

                MJPGOption option = new MJPGOption();

                float fps = (float)fps_i;

                option.frameRate = fps;
                option.quality = 100;  // 100 for superb quality
                AviRecorder.AVIOpen(fileName, option);
            }

            // 4. setting the frame buffering option
            // leave it for now...

            // 5. start the capturing
            Cam.StartCapture();

            // MUST discard the first few frames!
            ManagedImage rawImage = new ManagedImage();
            for (i = 0; i < 10;  i++)
            {
                Cam.RetrieveBuffer(rawImage);
            }

            // 6. set the display bitmap
            DisplayPicture = displayPicture;

            // 7. starting sampling, recording and dumping threads

            // IMU sampling thread
            IMUSamplingTimer = new PrecisionTimer(.0075, this.IMUSamplingEvent); // sampling frequency at 150 Hz

            RecordingThreadActive = true;
            OutOfRecordingThread = true;

            IMUSamplingTimer.start();
            RecordingThread = new Thread(this.mainLoop);
            //RecordingThread.Priority = ThreadPriority.Highest;
            RecordingThread.Start();

            // creating the thread for the dumping
            DumpingThread = new System.Threading.Thread(this.dumpingLoop);

            while (OutOfRecordingThread); // must wait until the recording thread enters the loop, otherwise the dumping will never start!

            DumpingThread.Start();
        }
        // the recording thread main loop
        public void mainLoop()
        {
            OutOfRecordingThread = false;
            ManagedImage rawImage = new ManagedImage();
            ManagedImage convertedImage = new ManagedImage();
            GPSDataInstance newGpsData;

            while (RecordingThreadActive)
            {

                // 1. retrieving a frame from the buffer

                if ((Flag_GravityFound_Y && (IMUcomms != null)) || (IMUcomms == null)) // record only if gravity is found or if there is no IMU
                {

                    FrameQueue.add(Cam);
                    //Cam.RetrieveBuffer(rawImage);

                    // increasing frame index
                    FrameIndex++;

                    //tempImage = new ManagedImage(rawImage);

                    //FrameQueue.add(tempImage);

                    //FrameQueue.Buffer[FrameQueue.last].Convert(PixelFormat.PixelFormatBgr, convertedImage);
                    //System.Drawing.Bitmap bitmap = convertedImage.bitmap;
                    //DisplayPicture.Image = (Image)convertedImage.bitmap;

                    // adding gps data in the GPS data list
                    if (GpsReceiver != null)
                    {

                        newGpsData = new GPSDataInstance(FrameIndex,
                                                         GpsReceiver.LatitudeAsDecimal,
                                                         GpsReceiver.LongitudeAsDecimal,
                                                         GpsReceiver.SpeedOverGround,
                                                         GpsReceiver.CourseOverGround,
                                                         GpsReceiver.MRCStatus);
                        GpsCaptureData.Add(newGpsData);
                    }

                }

            }

            DisplayPicture.Image = null;
            OutOfRecordingThread = true;
        }
        // the dumping thread loop
        public void dumpingLoop()
        {
            OutOfDumpingThread = false;
            ManagedImage convertedImage = new ManagedImage();
            ManagedImage mgdimg;

            while (!OutOfRecordingThread || (!FrameQueue.isEmpty()))
            {

                // recording the image straight into the file
                if (!FrameQueue.isEmpty())
                {
                     mgdimg = FrameQueue.remove();

                    // displying the image (via the DisplayBitmap)
                    // Convert the raw image
                    mgdimg.Convert(PixelFormat.PixelFormatBgr, convertedImage);

                    DisplayPicture.Image = (Image)convertedImage.bitmap;

                    if (RecordToFile)
                        AviRecorder.AVIAppend(mgdimg);

                }

            }
            // displying the image (via the DisplayBitmap)
            // Convert the raw image
            //ManagedImage convertedImage = new ManagedImage();
            //FrameQueue[0].Convert(PixelFormat.PixelFormatBgr, convertedImage);
            //System.Drawing.Bitmap bitmap = convertedImage.bitmap;

            //Image<Bgr, Byte> img = new Image<Bgr, byte>(bitmap);

            //DisplayPicture.Image = (Image)bitmap;

            //if (RecordToFile) AviRecorder.AVIClose();

            //DisplayPicture.Image = null;
            OutOfDumpingThread = true;
        }
        public void MainLoop()
        {
            // Managed Image MUST BE OUT OF THE LOOP! (For some reason...)
            ManagedImage rawImage = new ManagedImage();
            ManagedImage convertedImage = new ManagedImage();
            //System.Drawing.Bitmap bitmap;

            while (state == ST_CALIBRATING)
            {

                // retrieving an image using the Flea3 API
                Cam.RetrieveBuffer(rawImage);

                // Convert the raw image to a System.Drawing.Bitmap
                rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage);
                System.Drawing.Bitmap bitmap = convertedImage.bitmap;

                // Suimply create a new openCV frame with the bitmap
                Image<Bgr, Byte> frame = new Image<Bgr, byte>(bitmap);

                Image<Gray, Byte> grayFrame = frame.Convert<Gray, Byte>();

                // and creating the drawImage frame

                DrawImage = grayFrame.Clone();

                // decalring array of points for all RGB components
                PointF[][] corners = new PointF[3][];

                // finding corners in the frame
                // left frame

                // bool result = CameraCalibration.FindChessboardCorners(grayFrame,
                //                                                      new Size(ChessHorizCount, ChessVertCount), Emgu.CV.CvEnum.CALIB_CB_TYPE.FAST_CHECK, out corners[0]);

                corners[0] = CameraCalibration.FindChessboardCorners(grayFrame,
                                                                     new Size(ChessHorizCount, ChessVertCount), Emgu.CV.CvEnum.CALIB_CB_TYPE.FAST_CHECK);
                                                                      //Emgu.CV.CvEnum.CALIB_CB_TYPE.ADAPTIVE_THRESH | Emgu.CV.CvEnum.CALIB_CB_TYPE.NORMALIZE_IMAGE
                                                                      //);

                bool result = !(corners[0] == null);

                if (result)
                {
                    FrameCounter++;
                    //finding corners with sub pixel accuracy
                    grayFrame.FindCornerSubPix(corners, new Size(10, 10), new Size(-1, -1), new MCvTermCriteria(0.01));
                    // now draing the corners

                    /* CameraCalibration.DrawChessboardCorners(DrawImage,
                                                            new Size(ChessHorizCount, ChessVertCount),
                                                            corners[0],
                                                            true
                                                            ); */
                    CameraCalibration.DrawChessboardCorners(DrawImage,
                                                            new Size(ChessHorizCount, ChessVertCount),
                                                            corners[0]
                                                            );

                    // adding the detected points to the list
                    Points[FrameCounter - 1] = corners[0];

                }

                // assiging the image to the imageviewer (so that it shows)
                //imageViewer.Image = DrawImage;
                DisplayBox.Image = (Image)DrawImage.Bitmap;

                if (FrameCounter >= FrameCount)
                {
                    state = ST_IDLE;

                    calibrate();

                    Console.WriteLine("Calibration now is complete. You may NOW kill the thread!");
                }
            }
        }
Exemplo n.º 8
0
        static void Main(string[] args)
        {
            PrintBuildInfo();

            const Mode k_fmt7Mode = Mode.Mode0;
            const PixelFormat k_fmt7PixelFormat = PixelFormat.PixelFormatMono8;
            const int k_numImages = 10;

            // Since this application saves images in the current folder
            // we must ensure that we have permission to write to this folder.
            // If we do not have permission, fail right away.
            FileStream fileStream;
            try
            {
                fileStream = new FileStream(@"test.txt", FileMode.Create);
                fileStream.Close();
                File.Delete("test.txt");
            }
            catch
            {
                Console.WriteLine("Failed to create file in current folder.  Please check permissions.\n");
                return;
            }

            ManagedBusManager busMgr = new ManagedBusManager();
            uint numCameras = busMgr.GetNumOfCameras();

            Console.WriteLine("Number of cameras detected: {0}", numCameras);

            ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0);

            ManagedCamera cam = new ManagedCamera();

            cam.Connect(guid);

            // Get the camera information
            CameraInfo camInfo = cam.GetCameraInfo();

            PrintCameraInfo(camInfo);

            // Query for available Format 7 modes
            bool supported = false;
            Format7Info fmt7Info = cam.GetFormat7Info(k_fmt7Mode, ref supported);

            PrintFormat7Capabilities(fmt7Info);

            if ((k_fmt7PixelFormat & (PixelFormat)fmt7Info.pixelFormatBitField) == 0)
            {
                // Pixel format not supported!
                return;
            }

            Format7ImageSettings fmt7ImageSettings = new Format7ImageSettings();
            fmt7ImageSettings.mode = k_fmt7Mode;
            fmt7ImageSettings.offsetX = 0;
            fmt7ImageSettings.offsetY = 0;
            fmt7ImageSettings.width = fmt7Info.maxWidth;
            fmt7ImageSettings.height = fmt7Info.maxHeight;
            fmt7ImageSettings.pixelFormat = k_fmt7PixelFormat;

            // Validate the settings to make sure that they are valid
            bool settingsValid = false;
            Format7PacketInfo fmt7PacketInfo = cam.ValidateFormat7Settings(
                fmt7ImageSettings,
                ref settingsValid);

            if (settingsValid != true)
            {
                // Settings are not valid
                return;
            }

            // Set the settings to the camera
            cam.SetFormat7Configuration(
               fmt7ImageSettings,
               fmt7PacketInfo.recommendedBytesPerPacket);

            // Get embedded image info from camera
            EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo();

            // Enable timestamp collection
            if (embeddedInfo.timestamp.available == true)
            {
                embeddedInfo.timestamp.onOff = true;
            }

            // Set embedded image info to camera
            cam.SetEmbeddedImageInfo(embeddedInfo);

            // Start capturing images
            cam.StartCapture();

            // Retrieve frame rate property
            CameraProperty frmRate = cam.GetProperty(PropertyType.FrameRate);

            Console.WriteLine("Frame rate is {0:F2} fps", frmRate.absValue);

            Console.WriteLine("Grabbing {0} images", k_numImages);

            ManagedImage rawImage = new ManagedImage();
            for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++)
            {
                // Retrieve an image
                cam.RetrieveBuffer(rawImage);

                // Get the timestamp
                TimeStamp timeStamp = rawImage.timeStamp;

                Console.WriteLine(
                   "Grabbed image {0} - {1} {2} {3}",
                   imageCnt,
                   timeStamp.cycleSeconds,
                   timeStamp.cycleCount,
                   timeStamp.cycleOffset);

                // Create a converted image
                ManagedImage convertedImage = new ManagedImage();

                // Convert the raw image
                rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage);

                // Create a unique filename
                string filename = String.Format(
                   "CustomImageEx_CSharp-{0}-{1}.bmp",
                   camInfo.serialNumber,
                   imageCnt);

                // Get the Bitmap object. Bitmaps are only valid if the
                // pixel format of the ManagedImage is RGB or RGBU.
                System.Drawing.Bitmap bitmap = convertedImage.bitmap;

                // Save the image
                bitmap.Save(filename);
            }

            // Stop capturing images
            cam.StopCapture();

            // Disconnect the camera
            cam.Disconnect();

            Console.WriteLine("Done! Press any key to exit...");
            Console.ReadKey();
        }
Exemplo n.º 9
0
 public FrameData(ManagedImage img, GPSDataInstance gpsData)
 {
     Img = img;
     GPSData = gpsData;
 }
Exemplo n.º 10
0
 void OnImageGrabbed(ManagedImage image)
 {
     Console.WriteLine("Grabbed image {0}", imageCnt++);
 }
Exemplo n.º 11
0
        void RunSingleCamera(ManagedPGRGuid guid)
        {
            const int NumImages = 10;

            ManagedCamera cam = new ManagedCamera();

            // Connect to a camera
            cam.Connect(guid);

            // Get the camera information
            CameraInfo camInfo = cam.GetCameraInfo();

            PrintCameraInfo(camInfo);

            // Get embedded image info from camera
            EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo();

            // Enable timestamp collection
            if (embeddedInfo.timestamp.available == true)
            {
                embeddedInfo.timestamp.onOff = true;
            }

            // Set embedded image info to camera
            cam.SetEmbeddedImageInfo(embeddedInfo);

            // Start capturing images
            cam.StartCapture();

            // Create a raw image
            ManagedImage rawImage = new ManagedImage();

            // Create a converted image
            ManagedImage convertedImage = new ManagedImage();

            for (int imageCnt = 0; imageCnt < NumImages; imageCnt++)
            {
                try
                {
                    // Retrieve an image
                    cam.RetrieveBuffer(rawImage);
                }
                catch (FC2Exception ex)
                {
                    Console.WriteLine("Error retrieving buffer : {0}", ex.Message);
                    continue;
                }


                // Get the timestamp
                TimeStamp timeStamp = rawImage.timeStamp;

                Console.WriteLine(
                    "Grabbed image {0} - {1} {2} {3}",
                    imageCnt,
                    timeStamp.cycleSeconds,
                    timeStamp.cycleCount,
                    timeStamp.cycleOffset);

                // Convert the raw image
                rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage);

                // Create a unique filename
                string filename = String.Format(
                    "FlyCapture2Test_CSharp-{0}-{1}.bmp",
                    camInfo.serialNumber,
                    imageCnt);

                // Get the Bitmap object. Bitmaps are only valid if the
                // pixel format of the ManagedImage is RGB or RGBU.
                System.Drawing.Bitmap bitmap = convertedImage.bitmap;

                // Save the image
                bitmap.Save(filename);
            }

            // Stop capturing images
            cam.StopCapture();

            // Disconnect the camera
            cam.Disconnect();
        }
Exemplo n.º 12
0
        void RunSingleCamera(ManagedPGRGuid guid)
        {
            const int NumImages = 10;

            ManagedCamera cam = new ManagedCamera();

            // Connect to a camera
            cam.Connect(guid);

            // Get the camera information
            CameraInfo camInfo = cam.GetCameraInfo();

            PrintCameraInfo(camInfo);

            //
            // Register for End of Exposure (EoE) event. We simply create an instance of
            // the ManagedEventOptions, populate it, and register it with the camera.
            //
            ManagedEventOptions option = new ManagedEventOptions();

            option.EventCallbackFcn = OnEventReceived;
            option.EventName        = "EventExposureEnd";

            try
            {
                cam.RegisterEvent(option);

                Console.WriteLine("Successfully registered event: {0}", option.EventName);
            }
            catch (FC2Exception ex)
            {
                Console.WriteLine("Error registering EventExposureEnd : {0}", ex.Message);
                return;
            }

            //
            // Attempt to register all events. This will fail, since we only expect this
            // to be called if no events have yet been registered, but a fatal error
            // will not be generated. If the user wants to use this call, the user can
            // DeregisterAllEvents(), and then run RegisterAllEvents().
            //
            // If there are numerious different event types, and the user would like to
            // create a "default" callback and/or UserData struct, the user can run
            // RegisterAllEvents() with the default callback function, issue
            // DeregisterEvent() for the specific event that uses a custom callback, and
            // then issue RegisterEvent() with the specific callback function. This is
            // to ensure the user doesn't accidentally corrupt the callback function
            // list.
            //
            try
            {
                cam.RegisterAllEvents(option);
            }
            catch (FC2Exception ex)
            {
                // Expected error
                Console.WriteLine("Error registering EventExposureEnd : {0}", ex.Message);
            }

            // Start capturing images
            cam.StartCapture();

            // Retrieve images from buffer
            ManagedImage rawImage = new ManagedImage();

            for (ImageCount = 0; ImageCount < NumImages; ImageCount++)
            {
                try
                {
                    // Retrieve an image
                    cam.RetrieveBuffer(rawImage);
                }
                catch (FC2Exception ex)
                {
                    Console.WriteLine("Error retrieving buffer : {0}", ex.Message);
                    continue;
                }
            }

            // Stop capturing images
            cam.StopCapture();

            // Uncomment the following to deregister event handler for specific device event
            //cam.DeregisterEvent(option);

            // Deregister event handler for all events
            cam.DeregisterAllEvents();

            // Disconnect the camera
            cam.Disconnect();

            // Reset counter for next iteration
            ImageCount = 0;
        }
        public void MainLoop()
        {
            // Managed Image MUST BE OUT OF THE LOOP! (For some reason...)
            ManagedImage rawImage       = new ManagedImage();
            ManagedImage convertedImage = new ManagedImage();

            //System.Drawing.Bitmap bitmap;


            while (state == ST_CALIBRATING)
            {
                // retrieving an image using the Flea3 API
                Cam.RetrieveBuffer(rawImage);


                // Convert the raw image to a System.Drawing.Bitmap
                rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage);
                System.Drawing.Bitmap bitmap = convertedImage.bitmap;

                // Suimply create a new openCV frame with the bitmap
                Image <Bgr, Byte> frame = new Image <Bgr, byte>(bitmap);

                Image <Gray, Byte> grayFrame = frame.Convert <Gray, Byte>();


                // and creating the drawImage frame

                DrawImage = grayFrame.Clone();


                // decalring array of points for all RGB components
                PointF[][] corners = new PointF[3][];

                // finding corners in the frame
                // left frame

                // bool result = CameraCalibration.FindChessboardCorners(grayFrame,
                //                                                      new Size(ChessHorizCount, ChessVertCount), Emgu.CV.CvEnum.CALIB_CB_TYPE.FAST_CHECK, out corners[0]);

                corners[0] = CameraCalibration.FindChessboardCorners(grayFrame,
                                                                     new Size(ChessHorizCount, ChessVertCount), Emgu.CV.CvEnum.CALIB_CB_TYPE.FAST_CHECK);
                //Emgu.CV.CvEnum.CALIB_CB_TYPE.ADAPTIVE_THRESH | Emgu.CV.CvEnum.CALIB_CB_TYPE.NORMALIZE_IMAGE
                //);

                bool result = !(corners[0] == null);



                if (result)
                {
                    FrameCounter++;
                    //finding corners with sub pixel accuracy
                    grayFrame.FindCornerSubPix(corners, new Size(10, 10), new Size(-1, -1), new MCvTermCriteria(0.01));
                    // now draing the corners

                    /* CameraCalibration.DrawChessboardCorners(DrawImage,
                     *                                      new Size(ChessHorizCount, ChessVertCount),
                     *                                      corners[0],
                     *                                      true
                     *                                      ); */
                    CameraCalibration.DrawChessboardCorners(DrawImage,
                                                            new Size(ChessHorizCount, ChessVertCount),
                                                            corners[0]
                                                            );

                    // adding the detected points to the list
                    Points[FrameCounter - 1] = corners[0];
                }



                // assiging the image to the imageviewer (so that it shows)
                //imageViewer.Image = DrawImage;
                DisplayBox.Image = (Image)DrawImage.Bitmap;


                if (FrameCounter >= FrameCount)
                {
                    state = ST_IDLE;

                    calibrate();

                    Console.WriteLine("Calibration now is complete. You may NOW kill the thread!");
                }
            }
        }
Exemplo n.º 14
0
        void RunCamera(ManagedPGRGuid guid)
        {
            const uint k_numImages = 100;

            try
            {
                using (ManagedCamera cam = new ManagedCamera())
                {
                    cam.Connect(guid);

                    CameraInfo camInfo = cam.GetCameraInfo();
                    PrintCameraInfo(camInfo);

                    // Start capturing images
                    Console.WriteLine("Starting capture...");
                    cam.StartCapture();

                    List <ManagedImage> imageList = new List <ManagedImage>();

                    ManagedImage rawImage = new ManagedImage();
                    for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++)
                    {
                        cam.RetrieveBuffer(rawImage);
                        ManagedImage tempImage = new ManagedImage(rawImage);
                        imageList.Add(tempImage);

                        Console.WriteLine("Grabbed image {0}", imageCnt);
                    }

                    // Stop capturing images
                    Console.WriteLine("Stopping capture...");

                    // Check if the camera supports the FRAME_RATE property
                    CameraPropertyInfo propInfo = cam.GetPropertyInfo(PropertyType.FrameRate);

                    float frameRateToUse = 15.0F;
                    if (propInfo.present == true)
                    {
                        // Get the frame rate
                        CameraProperty prop = cam.GetProperty(PropertyType.FrameRate);
                        frameRateToUse = prop.absValue;
                    }

                    Console.WriteLine("Using frame rate of {0}", frameRateToUse);

                    string aviFileName;

                    aviFileName = String.Format("SaveImageToAviEx_CSharp-Uncompressed-{0}", camInfo.serialNumber);
                    SaveAviHelper(AviType.Uncompressed, ref imageList, aviFileName, frameRateToUse);

                    aviFileName = String.Format("SaveImageToAviEx_CSharp-Mjpg-{0}", camInfo.serialNumber);
                    SaveAviHelper(AviType.Mjpg, ref imageList, aviFileName, frameRateToUse);

                    aviFileName = String.Format("SaveImageToAviEx_CSharp-h264-{0}", camInfo.serialNumber);
                    SaveAviHelper(AviType.H264, ref imageList, aviFileName, frameRateToUse);
                }
            }
            catch (FC2Exception ex)
            {
                Console.WriteLine("There was an FC2 error: " + ex.Message);
            }
        }