static void Main(string[] args) { PrintBuildInfo(); Program program = new Program(); SerialPort pyboard = new SerialPort("COM5", 115200); pyboard.Open(); pyboard.WriteLine("import rig_load\r"); FileStream fileStream; //try //{ // fileStream = new FileStream(@"test.txt", FileMode.Create); // fileStream.Close(); // File.Delete("test.txt"); //} //catch //{ // Console.WriteLine("Failed to create file in current folder. Please check permissions.\n"); // return; //} // Here want to take the ID number of the experiment instead of the total number of frames. The number of frames will be set by the set experiment parameters established. Have user input from console.writeline "Please Enter Experiment ID". saveto will occur after the input of this line, and will be "D:/"+idstring+"cam0.AVI". etc. int frames, instead of being framestring, will just be a constant. calculate this tomorrow after you've established the exact paradigm. ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); // List<string> save_to = new List<string>{"D:/Movies/cam0.AVI","E:/Movies/cam1.AVI"}; Console.WriteLine("Please Enter Experiment ID: "); string idstring = Console.ReadLine(); List <string> save_to = new List <string> { "D:/Movies/" + idstring + "_cam0.AVI", "E:/Movies/" + idstring + "_cam1.AVI" }; Console.WriteLine("Please Enter Number of Frames: "); string framestring = Console.ReadLine(); int frames = Convert.ToInt32(framestring); if (numCameras == 1) { ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); program.RunSingleCamera(guid, "C:/Users/Deadpool/Desktop/flea3.AVI", frames); } else if (numCameras == 2) { ManagedPGRGuid camid1 = busMgr.GetCameraFromIndex(0); ManagedPGRGuid camid2 = busMgr.GetCameraFromIndex(1); Thread camthread1 = new Thread(() => program.RunSingleCamera(camid1, save_to[0], frames)); camthread1.Start(); // have to declare this way if your return is a void but you pass variables to the function Thread camthread2 = new Thread(() => program.RunSingleCamera(camid2, save_to[1], frames)); camthread2.Start(); pyboard.WriteLine("rig_load.full_experiment(True,True)\r"); // pyboard.WriteLine("rig_load.full_experiment(True,True)\r"); // pyboard.WriteLine("rig_load.light_test()\r"); } }
// calibrator deafult constructor public Flea3Calibrator(PictureBox displaybox) { DisplayBox = displaybox; // 1. creating the camera object Cam = new ManagedCamera(); // 2. creating the bus manager in order to handle (potentially) // multiple cameras BusMgr = new ManagedBusManager(); // 3. retrieving the List of all camera ids connected to the bus CamIDs = new List <ManagedPGRGuid>(); int camCount = (int)BusMgr.GetNumOfCameras(); for (int i = 0; i < camCount; i++) { ManagedPGRGuid guid = BusMgr.GetCameraFromIndex((uint)i); CamIDs.Add(guid); } FrameCount = FRAME_COUNT; ChessHorizCount = CHESS_HORIZ_CORNER_COUNT; ChessVertCount = CHESS_VERT_CORNER_COUNT; RectWidth = RECT_WIDTH; RectHeight = RECT_HEIGHT; // creatring the imageViewer to display the calibration frame sequence //imageViewer = new ImageViewer(); state = ST_IDLE; }
private void OpenCamera() { try { System.Diagnostics.Debug.WriteLine("OpenCamera:" + DateTime.Now.ToString("HH:mm:ss.fff")); ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); if (numCameras == 0) { System.Diagnostics.Debug.WriteLine("没有发现相机!"); return; } m_camera = new ManagedCamera(); //m_processedImage = new ManagedImage(); m_grabThreadExited = new AutoResetEvent(false); ManagedPGRGuid m_guid = busMgr.GetCameraFromIndex(0); // Connect to the first selected GUID m_camera.Connect(m_guid); // Set embedded timestamp to on EmbeddedImageInfo embeddedInfo = m_camera.GetEmbeddedImageInfo(); embeddedInfo.timestamp.onOff = true; m_camera.SetEmbeddedImageInfo(embeddedInfo); m_camera.StartCapture(); System.Diagnostics.Debug.WriteLine("OpenCamera:" + DateTime.Now.ToString("HH:mm:ss.fff")); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); } }
static void Main(string[] args) { PrintBuildInfo(); Program program = new Program(); ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); // Finish if there are no cameras if (numCameras == 0) { Console.WriteLine("Not enough cameras!"); Console.WriteLine("Press Enter to exit..."); Console.ReadLine(); return; } for (uint i = 0; i < numCameras; i++) { ManagedPGRGuid guid = busMgr.GetCameraFromIndex(i); program.RunSingleCamera(guid); } Console.WriteLine("Done! Press enter to exit..."); Console.ReadLine(); }
static void Main(string[] args) { PrintBuildInfo(); Program program = new Program(); // Since this application saves images in the current folder // we must ensure that we have permission to write to this folder. // If we do not have permission, fail right away. FileStream fileStream; try { fileStream = new FileStream(@"test.txt", FileMode.Create); fileStream.Close(); File.Delete("test.txt"); } catch { Console.WriteLine("Failed to create file in current folder. Please check permissions.\n"); return; } ManagedBusManager busMgr = new ManagedBusManager(); CameraInfo[] camInfos = ManagedBusManager.DiscoverGigECameras(); Console.WriteLine("Number of cameras discovered: {0}", camInfos.Length); foreach (CameraInfo camInfo in camInfos) { PrintCameraInfo(camInfo); } uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras enumerated: {0}", numCameras); for (uint i = 0; i < numCameras; i++) { ManagedPGRGuid guid = busMgr.GetCameraFromIndex(i); if (busMgr.GetInterfaceTypeFromGuid(guid) != InterfaceType.GigE) { continue; } try { program.RunSingleCamera(guid); } catch (FC2Exception ex) { Console.WriteLine( String.Format( "Error running camera {0}. Error: {1}", i, ex.Message)); } } Console.WriteLine("Done! Press any key to exit..."); Console.ReadKey(); }
// calibrator constructor#2 public Flea3Calibrator(int horiz_corner_count, int vert_corner_count, float rect_width, float rect_height, int frame_count, PictureBox displaybox) { DisplayBox = displaybox; // 1. creating the camera object Cam = new ManagedCamera(); // 2. creating the bus manager in order to handle (potentially) // multiple cameras BusMgr = new ManagedBusManager(); // 3. retrieving the List of all camera ids connected to the bus CamIDs = new List <ManagedPGRGuid>(); int camCount = (int)BusMgr.GetNumOfCameras(); for (int i = 0; i < camCount; i++) { ManagedPGRGuid guid = BusMgr.GetCameraFromIndex((uint)i); CamIDs.Add(guid); } FrameCount = frame_count; ChessHorizCount = horiz_corner_count; ChessVertCount = vert_corner_count; RectWidth = rect_width; RectHeight = rect_height; // creatring the imageViewer to display the calibration frame sequence //imageViewer = new ImageViewer(); state = ST_IDLE; }
static void Main(string[] args) { PrintBuildInfo(); Program program = new Program(); // Since this application saves images in the current folder // we must ensure that we have permission to write to this folder. // If we do not have permission, fail right away. FileStream fileStream; try { fileStream = new FileStream(@"test.txt", FileMode.Create); fileStream.Close(); File.Delete("test.txt"); } catch { Console.WriteLine("Failed to create file in current folder. Please check permissions.\n"); return; } ManagedBusManager busMgr = new ManagedBusManager(); CameraInfo[] camInfos = ManagedBusManager.DiscoverGigECameras(); Console.WriteLine("Number of cameras discovered: {0}", camInfos.Length); foreach (CameraInfo camInfo in camInfos) { PrintCameraInfo(camInfo); } uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras enumerated: {0}", numCameras); for (uint i = 0; i < numCameras; i++) { ManagedPGRGuid guid = busMgr.GetCameraFromIndex(i); if ( busMgr.GetInterfaceTypeFromGuid(guid) != InterfaceType.GigE ) { continue; } try { program.RunSingleCamera(guid); } catch (FC2Exception ex) { Console.WriteLine( String.Format( "Error running camera {0}. Error: {1}", i, ex.Message)); } } Console.WriteLine("Done! Press any key to exit..."); Console.ReadKey(); }
static void Main(string[] args) { PrintBuildInfo(); Program program = new Program(); // Since this application saves images in the current folder // we must ensure that we have permission to write to this folder. // If we do not have permission, fail right away. FileStream fileStream; try { fileStream = new FileStream(@"test.txt", FileMode.Create); fileStream.Close(); File.Delete("test.txt"); } catch { Console.WriteLine("Failed to create file in current folder. Please check permissions."); Console.WriteLine("Press Enter to exit..."); Console.ReadLine(); return; } ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); // Finish if there are no cameras if (numCameras == 0) { Console.WriteLine("Not enough cameras!"); Console.WriteLine("Press Enter to exit..."); Console.ReadLine(); return; } for (uint i = 0; i < numCameras; i++) { ManagedPGRGuid guid = busMgr.GetCameraFromIndex(i); program.RunCamera(guid); } Console.WriteLine("Done! Press enter to exit..."); Console.ReadLine(); }
/// <summary> ///Initialize a point Grey Camera, it take the first that it detect if their is more than one. ///Give the default setting. /// </summary> /// <exception cref="NoCameraDetectedException">Thrown if no camera is detected.</exception> public PtGreyCamera() { using ManagedBusManager busMgr = new ManagedBusManager(); setting = new PtGreyCameraSetting(); uint numCameras = busMgr.GetNumOfCameras(); if (numCameras == 0) { throw new NoCameraDetectedException { Source = "PointGrey" }; } ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); //If there is more than 1 camera, we take the first one cam = new ManagedCamera(); cam.Connect(guid); SetProp(); }
static void Main(string[] args) { PrintBuildInfo(); Program program = new Program(); ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); for (uint i = 0; i < numCameras; i++) { ManagedPGRGuid guid = busMgr.GetCameraFromIndex(i); program.RunSingleCamera(guid); } Console.WriteLine("Done! Press any key to exit..."); Console.ReadKey(); }
/// <summary> ///Initialize a point Grey Camera, it take the first that it detect if their is more than one. ///Utilize the given settings to initialize the camera. /// </summary> /// <exception cref="NoCameraDetectedException">Thrown if no camera is detected.</exception> /// <param name="setting">Setting used for the camera</param> public PtGreyCamera(PtGreyCameraSetting setting) { using ManagedBusManager busMgr = new ManagedBusManager(); this.setting = setting; uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine(numCameras); // Finish if there are no cameras if (numCameras == 0) { throw new NoCameraDetectedException(); } ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); //If there is more than 1 camera, we take the first one cam = new ManagedCamera(); cam.Connect(guid); SetProp(); }
static void Main(string[] args) { PrintBuildInfo(); Program program = new Program(); // Since this application saves images in the current folder // we must ensure that we have permission to write to this folder. // If we do not have permission, fail right away. FileStream fileStream; try { fileStream = new FileStream(@"test.txt", FileMode.Create); fileStream.Close(); File.Delete("test.txt"); } catch { Console.WriteLine("Failed to create file in current folder. Please check permissions.\n"); return; } ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); for (uint i = 0; i < numCameras; i++) { ManagedPGRGuid guid = busMgr.GetCameraFromIndex(i); program.RunSingleCamera(guid); } Console.WriteLine("Done! Press any key to exit..."); Console.ReadKey(); }
// A timer used to sample the IMU during the intervals betweem frame captures //private System.Timers.Timer IMUSamplingTimer; public Flea3Recorder(GPSReceiver gpsReceiver, IMUCommsDevice imu) { int i; IMUcomms = imu; // 1. creating the camera object Cam = new ManagedCamera(); // 2. creating the bus manager in order to handle (potentially) // multiple cameras BusMgr = new ManagedBusManager(); // 3. retrieving the List of all camera ids connected to the bus CamIDs = new List <ManagedPGRGuid>(); int camCount = (int)BusMgr.GetNumOfCameras(); for (i = 0; i < camCount; i++) { ManagedPGRGuid guid = BusMgr.GetCameraFromIndex((uint)i); CamIDs.Add(guid); } // 4. assigning values to properties GpsReceiver = gpsReceiver; // 5. init flags RecordingThreadActive = false; RecordToFile = false; OutOfRecordingThread = OutOfDumpingThread = true; // 6. Creating the Frame data queue FrameQueue = new ManagedImageRollingBuffer(MAX_FRAMEQUEUE_LEN); }
static void Main(string[] args) { PrintBuildInfo(); const int k_numImages = 10; bool useSoftwareTrigger = true; ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); if (!useSoftwareTrigger) { // Check for external trigger support TriggerModeInfo triggerModeInfo = cam.GetTriggerModeInfo(); if (triggerModeInfo.present != true) { Console.WriteLine("Camera does not support external trigger! Exiting...\n"); return; } } // Get current trigger settings TriggerMode triggerMode = cam.GetTriggerMode(); // Set camera to trigger mode 0 // A source of 7 means software trigger triggerMode.onOff = true; triggerMode.mode = 0; triggerMode.parameter = 0; if (useSoftwareTrigger) { // A source of 7 means software trigger triggerMode.source = 7; } else { // Triggering the camera externally using source 0. triggerMode.source = 0; } // Set the trigger mode cam.SetTriggerMode(triggerMode); // Poll to ensure camera is ready bool retVal = PollForTriggerReady(cam); if (retVal != true) { return; } // Get the camera configuration FC2Config config = cam.GetConfiguration(); // Set the grab timeout to 5 seconds config.grabTimeout = 5000; // Set the camera configuration cam.SetConfiguration(config); // Camera is ready, start capturing images cam.StartCapture(); if (useSoftwareTrigger) { if (CheckSoftwareTriggerPresence(cam) == false) { Console.WriteLine("SOFT_ASYNC_TRIGGER not implemented on this camera! Stopping application\n"); return; } } else { Console.WriteLine("Trigger the camera by sending a trigger pulse to GPIO%d.\n", triggerMode.source); } ManagedImage image = new ManagedImage(); for (int iImageCount = 0; iImageCount < k_numImages; iImageCount++) { if (useSoftwareTrigger) { // Check that the trigger is ready retVal = PollForTriggerReady(cam); Console.WriteLine("Press the Enter key to initiate a software trigger.\n"); Console.ReadLine(); // Fire software trigger retVal = FireSoftwareTrigger(cam); if (retVal != true) { Console.WriteLine("Error firing software trigger!"); return; } } // Grab image cam.RetrieveBuffer(image); Console.WriteLine(".\n"); } Console.WriteLine("Finished grabbing images"); // Stop capturing images cam.StopCapture(); // Turn off trigger mode triggerMode.onOff = false; cam.SetTriggerMode(triggerMode); // Disconnect the camera cam.Disconnect(); Console.WriteLine("Done! Press any key to exit..."); Console.ReadKey(); }
static void Main(string[] args) { PrintBuildInfo(); const int NumImages = 50; Program program = new Program(); // // Initialize BusManager and retrieve number of cameras detected // ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); // // Check to make sure at least two cameras are connected before // running example // if (numCameras < 2) { Console.WriteLine("Insufficient number of cameras."); Console.WriteLine("Make sure at least two cameras are connected for example to run."); Console.WriteLine("Press Enter to exit."); Console.ReadLine(); return; } // // Initialize an array of cameras // // *** NOTES *** // The size of the array is equal to the number of cameras detected. // The array of cameras will be used for connecting, configuring, // and capturing images. // ManagedCamera[] cameras = new ManagedCamera[numCameras]; // // Prepare each camera to acquire images // // *** NOTES *** // For pseudo-simultaneous streaming, each camera is prepared as if it // were just one, but in a loop. Notice that cameras are selected with // an index. We demonstrate pseduo-simultaneous streaming because true // simultaneous streaming would require multiple process or threads, // which is too complex for an example. // for (uint i = 0; i < numCameras; i++) { cameras[i] = new ManagedCamera(); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(i); // Connect to a camera cameras[i].Connect(guid); // Get the camera information CameraInfo camInfo = cameras[i].GetCameraInfo(); PrintCameraInfo(camInfo); try { // Turn trigger mode off TriggerMode trigMode = new TriggerMode(); trigMode.onOff = false; cameras[i].SetTriggerMode(trigMode); // Turn Timestamp on EmbeddedImageInfo imageInfo = new EmbeddedImageInfo(); imageInfo.timestamp.onOff = true; cameras[i].SetEmbeddedImageInfo(imageInfo); } catch (System.Exception ex) { Console.WriteLine("Error configuring camera : {0}", ex.Message); Console.WriteLine("Press any key to exit..."); Console.ReadLine(); return; } try { // Start streaming on camera cameras[i].StartCapture(); } catch (System.Exception ex) { Console.WriteLine("Error starting camera : {0}", ex.Message); Console.WriteLine("Press any key to exit..."); Console.ReadLine(); return; } } // // Retrieve images from all cameras // // *** NOTES *** // In order to work with simultaneous camera streams, nested loops are // needed. It is important that the inner loop be the one iterating // through the cameras; otherwise, all images will be grabbed from a // single camera before grabbing any images from another. // ManagedImage tempImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < NumImages; imageCnt++) { for (int camCount = 0; camCount < numCameras; camCount++) { try { // Retrieve an image cameras[camCount].RetrieveBuffer(tempImage); } catch (System.Exception ex) { Console.WriteLine("Error retrieving buffer : {0}", ex.Message); Console.WriteLine("Press any key to exit..."); Console.ReadLine(); return; } // Display the timestamps of the images grabbed for each camera TimeStamp timeStamp = tempImage.timeStamp; Console.Out.WriteLine("Camera {0} - Frame {1} - TimeStamp {2} {3}", camCount, imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount); } } // // Stop streaming for each camera // for (uint i = 0; i < numCameras; i++) { try { cameras[i].StopCapture(); cameras[i].Disconnect(); } catch (System.Exception ex) { Console.WriteLine("Error cleaning up camera : {0}", ex.Message); Console.WriteLine("Press any key to exit..."); Console.ReadLine(); return; } } Console.WriteLine("Press enter to exit..."); Console.ReadLine(); }
// A timer used to sample the IMU during the intervals betweem frame captures //private System.Timers.Timer IMUSamplingTimer; public Flea3Recorder(GPSReceiver gpsReceiver, IMUCommsDevice imu) { int i; IMUcomms = imu; // 1. creating the camera object Cam = new ManagedCamera(); // 2. creating the bus manager in order to handle (potentially) // multiple cameras BusMgr = new ManagedBusManager(); // 3. retrieving the List of all camera ids connected to the bus CamIDs = new List<ManagedPGRGuid>(); int camCount = (int)BusMgr.GetNumOfCameras(); for (i = 0; i < camCount; i++) { ManagedPGRGuid guid = BusMgr.GetCameraFromIndex((uint)i); CamIDs.Add(guid); } // 4. assigning values to properties GpsReceiver = gpsReceiver; // 5. init flags RecordingThreadActive = false; RecordToFile = false; OutOfRecordingThread = OutOfDumpingThread = true; // 6. Creating the Frame data queue FrameQueue = new ManagedImageRollingBuffer(MAX_FRAMEQUEUE_LEN); }
static void Main(string[] args) { PrintBuildInfo(); const int NumImages = 10; bool useSoftwareTrigger = true; ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); // Finish if there are no cameras if (numCameras == 0) { Console.WriteLine("Not enough cameras!"); Console.WriteLine("Press Enter to exit..."); Console.ReadLine(); return; } ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Power on the camera const uint CameraPower = 0x610; const uint CameraPowerValue = 0x80000000; cam.WriteRegister(CameraPower, CameraPowerValue); const Int32 MillisecondsToSleep = 100; uint cameraPowerValueRead = 0; // Wait for camera to complete power-up do { System.Threading.Thread.Sleep(MillisecondsToSleep); cameraPowerValueRead = cam.ReadRegister(CameraPower); }while ((cameraPowerValueRead & CameraPowerValue) == 0); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); if (!useSoftwareTrigger) { // Check for external trigger support TriggerModeInfo triggerModeInfo = cam.GetTriggerModeInfo(); if (triggerModeInfo.present != true) { Console.WriteLine("Camera does not support external trigger!"); Console.WriteLine("Press enter to exit..."); Console.ReadLine(); return; } } // Get current trigger settings TriggerMode triggerMode = cam.GetTriggerMode(); // Set camera to trigger mode 0 // A source of 7 means software trigger triggerMode.onOff = true; triggerMode.mode = 0; triggerMode.parameter = 0; if (useSoftwareTrigger) { // A source of 7 means software trigger triggerMode.source = 7; } else { // Triggering the camera externally using source 0. triggerMode.source = 0; } // Set the trigger mode cam.SetTriggerMode(triggerMode); // Poll to ensure camera is ready bool retVal = PollForTriggerReady(cam); if (retVal != true) { Console.WriteLine("Poll for trigger read failed!"); Console.WriteLine("Press enter to exit..."); Console.ReadLine(); return; } // Get the camera configuration FC2Config config = cam.GetConfiguration(); // Set the grab timeout to 5 seconds config.grabTimeout = 5000; // Set the camera configuration cam.SetConfiguration(config); // Camera is ready, start capturing images cam.StartCapture(); if (useSoftwareTrigger) { if (CheckSoftwareTriggerPresence(cam) == false) { Console.WriteLine("SOFT_ASYNC_TRIGGER not implemented on this camera! Stopping application\n"); Console.WriteLine("Press enter to exit..."); Console.ReadLine(); return; } } else { Console.WriteLine("Trigger the camera by sending a trigger pulse to GPIO%d.\n", triggerMode.source); } ManagedImage rawImage = new ManagedImage(); for (int iImageCount = 0; iImageCount < NumImages; iImageCount++) { if (useSoftwareTrigger) { // Check that the trigger is ready retVal = PollForTriggerReady(cam); Console.WriteLine("Press the Enter key to initiate a software trigger.\n"); Console.ReadLine(); // Fire software trigger retVal = FireSoftwareTrigger(cam); if (retVal != true) { Console.WriteLine("Error firing software trigger!"); Console.WriteLine("Press enter to exit..."); Console.ReadLine(); return; } } try { // Retrieve an image cam.RetrieveBuffer(rawImage); } catch (FC2Exception ex) { Console.WriteLine("Error retrieving buffer : {0}", ex.Message); continue; } Console.WriteLine(".\n"); } Console.WriteLine("Finished grabbing images"); // Stop capturing images cam.StopCapture(); // Turn off trigger mode triggerMode.onOff = false; cam.SetTriggerMode(triggerMode); // Disconnect the camera cam.Disconnect(); Console.WriteLine("Done! Press enter to exit..."); Console.ReadLine(); }
static int StartSyncCaptureAndGrab(ref ManagedCamera[] cameras, uint numCameras) { const int NumImages = 50; ManagedBusManager busMgr = new ManagedBusManager(); // Connect to all detected cameras and attempt to set them to // a common video mode and frame rate for (uint i = 0; i < numCameras; i++) { cameras[i] = new ManagedCamera(); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(i); // Connect to a camera cameras[i].Connect(guid); // Get the camera information CameraInfo camInfo = cameras[i].GetCameraInfo(); PrintCameraInfo(camInfo); // Set all cameras to a specific mode and frame rate so they // can be synchronized. This function is only used for firewire and usb2 cameras try { cameras[i].SetVideoModeAndFrameRate(VideoMode.VideoMode640x480Y8, FrameRate.FrameRate15); } catch (System.Exception /*ex*/) { Console.WriteLine("Error configuring cameras."); Console.WriteLine("This example requires cameras to be able to set to 640x480 Y8 at 15fps."); Console.WriteLine("If your camera does not support this mode, please edit the source code and recompile the application."); return(-1); } } // Put StartSyncCapture in a try-catch block in case // cameras failed to synchronize try { Console.WriteLine("Starting sync capture..."); //sync firewire cameras ManagedCamera.StartSyncCapture(numCameras, cameras); } catch (System.Exception /*ex*/) { Console.WriteLine("Error starting cameras."); Console.WriteLine("This example requires cameras to be able to set to 640x480 Y8 at 15fps."); Console.WriteLine("If your camera does not support this mode, please edit the source code and recompile the application."); return(-1); } ManagedImage tempImage = new ManagedImage(); // Retrieve images from attached cameras for (int imageCnt = 0; imageCnt < NumImages; imageCnt++) { for (int camCount = 0; camCount < numCameras; camCount++) { try { // Retrieve an image cameras[camCount].RetrieveBuffer(tempImage); } catch (FC2Exception ex) { Console.WriteLine("Error retrieving buffer : {0}", ex.Message); continue; } // Display the timestamps for all cameras to show that the // captured image is synchronized for each camera TimeStamp timeStamp = tempImage.timeStamp; Console.Out.WriteLine("Camera {0} - Frame {1} - TimeStamp {2} {3}", camCount, imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount); } } for (uint i = 0; i < numCameras; i++) { // Stop capturing images cameras[i].StopCapture(); // Disconnect the camera cameras[i].Disconnect(); } return(0); }
static void Main(string[] args) { ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); // Check to make sure enough cameras are connected if (numCameras < 2) { Console.WriteLine("Insufficient number of cameras..."); Console.WriteLine("Please connect at least two 1394 cameras for example to run."); Console.WriteLine("Press Enter to exit..."); Console.ReadLine(); return; } // Check to make sure only 1394 cameras are connected for (uint i = 0; i < numCameras; i++) { ManagedPGRGuid guid = busMgr.GetCameraFromIndex(i); if (busMgr.GetInterfaceTypeFromGuid(guid) != InterfaceType.Ieee1394) { Console.WriteLine("Please make sure ONLY 1394 cameras are connected for example to run."); Console.WriteLine("Press Enter to exit..."); Console.ReadLine(); return; } } Console.Out.WriteLine("Creating ManagedSyncManager Object..."); ManagedSyncManager syncManager = new ManagedSyncManager(); Console.Out.WriteLine("Starting Sync process..."); PGRSyncError syncError; syncError = syncManager.Start(); if (syncError != PGRSyncError.PGRSyncError_OK) { Console.Out.WriteLine("Error in sync start call: {0}", syncError.ToString()); Console.WriteLine("Press Enter to exit..."); Console.ReadLine(); return; } // Grab and check synchonization status uint numChecks = 0; PGRSyncMessage message; do { // Short delay before checking sync status Thread.Sleep(1000); message = syncManager.GetSyncStatus(); Console.WriteLine("Current sync status : {0}", SyncStatusToString(message)); }while (message != PGRSyncMessage.PGRSyncMessage_OK && ++numChecks < 10); Console.Out.WriteLine("\nTiming bus is {0}connected.", syncManager.IsTimingBusConnected() ? "" : "not "); Console.Out.WriteLine("Time since sync started in seconds: {0:F2}.", syncManager.GetTimeSinceSynced()); // Start sync capture and grab images ManagedCamera[] cameras = new ManagedCamera[numCameras]; int grabStatus = StartSyncCaptureAndGrab(ref cameras, numCameras); if (grabStatus != 0) { // Cleanup if start sync capture fails intermittently for (uint i = 0; i < numCameras; i++) { if (cameras[i] != null) { cameras[i].StopCapture(); cameras[i].Disconnect(); } } Console.WriteLine("Press Enter to exit..."); Console.ReadLine(); return; } Console.Out.WriteLine("\nTime since sync started in seconds: {0:F2}.", syncManager.GetTimeSinceSynced()); // Stop sync Console.WriteLine("Stopping Sync Process..."); syncError = syncManager.Stop(); if (syncError != PGRSyncError.PGRSyncError_OK) { Console.Out.WriteLine("Error in sync stop call: {0}", syncError.ToString()); Console.WriteLine("Press Enter to exit..."); Console.ReadLine(); return; } Console.Out.WriteLine("Press any key to exit..."); Console.ReadLine(); }
// calibrator constructor#2 public Flea3Calibrator(int horiz_corner_count, int vert_corner_count, float rect_width, float rect_height, int frame_count, PictureBox displaybox) { DisplayBox = displaybox; // 1. creating the camera object Cam = new ManagedCamera(); // 2. creating the bus manager in order to handle (potentially) // multiple cameras BusMgr = new ManagedBusManager(); // 3. retrieving the List of all camera ids connected to the bus CamIDs = new List<ManagedPGRGuid>(); int camCount = (int)BusMgr.GetNumOfCameras(); for (int i = 0; i < camCount; i++) { ManagedPGRGuid guid = BusMgr.GetCameraFromIndex((uint)i); CamIDs.Add(guid); } FrameCount = frame_count; ChessHorizCount = horiz_corner_count; ChessVertCount = vert_corner_count; RectWidth = rect_width; RectHeight = rect_height; // creatring the imageViewer to display the calibration frame sequence //imageViewer = new ImageViewer(); state = ST_IDLE; }
private void CaptureCameraCallback() { //initialise camera here #region camsetup const Mode k_fmt7Mode = Mode.Mode0; const PixelFormat k_fmt7PixelFormat = PixelFormat.PixelFormatMono8; ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Query for available Format 7 modes bool supported = false; Format7Info fmt7Info = cam.GetFormat7Info(k_fmt7Mode, ref supported); PrintFormat7Capabilities(fmt7Info); if ((k_fmt7PixelFormat & (PixelFormat)fmt7Info.pixelFormatBitField) == 0) { // Pixel format not supported! Console.WriteLine("Pixel format is not supported"); return; } Format7ImageSettings fmt7ImageSettings = new Format7ImageSettings(); fmt7ImageSettings.mode = k_fmt7Mode; fmt7ImageSettings.offsetX = 1124; fmt7ImageSettings.offsetY = 924; fmt7ImageSettings.width = 200; fmt7ImageSettings.height = 200; fmt7ImageSettings.pixelFormat = k_fmt7PixelFormat; // Validate the settings to make sure that they are valid bool settingsValid = false; Format7PacketInfo fmt7PacketInfo = cam.ValidateFormat7Settings( fmt7ImageSettings, ref settingsValid); if (settingsValid != true) { // Settings are not valid return; } // Set the settings to the camera cam.SetFormat7Configuration( fmt7ImageSettings, fmt7PacketInfo.recommendedBytesPerPacket); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Retrieve frame rate property CameraProperty frmRate = cam.GetProperty(PropertyType.FrameRate); Console.WriteLine("Frame rate is {0:F2} fps", frmRate.absValue); #endregion Mat image; Mat grey = new Mat(); int differencex = 0; int differencey = 0; double diffenceeucl = 0; OpenCvSharp.Point centre_im = new OpenCvSharp.Point(); int centrex = Convert.ToInt16(fmt7ImageSettings.width) / 2; int centrey = Convert.ToInt16(fmt7ImageSettings.width) / 2; double dp_ = 1; //inverse ratio of array accumulator to image resolution double minDist_ = 100; // minimum distance between centre of detected circles double param1_ = 100; // Higher threshold of Canny edge detection double param2_ = 20; // Accumulator threshold, smaller value leads to higher false detection rates int minRad_ = 45; // minimum radius int maxRad_ = 60; // maximum radius centre_im.X = centrex; centre_im.Y = centrey; int windowsize = 20; int[] centresx = new int[windowsize]; int[] centresy = new int[windowsize]; int[] radi = new int[windowsize]; OpenCvSharp.Point centre = new OpenCvSharp.Point(); int radius = 1; int buffpos = 0; int flag = 0; ManagedImage rawImage = new ManagedImage(); ManagedImage convertedImage = new ManagedImage(); //do repeated actions here while (true) { //Thread.Sleep(1000); cam.RetrieveBuffer(rawImage); rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); System.Drawing.Bitmap bitmap = convertedImage.bitmap; image = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap); Cv2.CvtColor(image, grey, ColorConversionCodes.BGR2GRAY); // Inner circle CircleSegment[] circles = Cv2.HoughCircles(grey, HoughMethods.Gradient, dp_, minDist_, param1_, param2_, minRad_, maxRad_); for (int i = 0; i < circles.Length; i++) { flag = 1; centre.X = Convert.ToInt16(Math.Round(circles[0].Center.X)); centre.Y = Convert.ToInt16(Math.Round(circles[0].Center.Y)); radius = Convert.ToInt16(Math.Round(circles[0].Radius)); buffpos = (buffpos + 1) % windowsize; centresx[buffpos] = centre.X; centresy[buffpos] = centre.Y; radi[buffpos] = radius; centre.X = Convert.ToInt16(centresx.Average()); centre.Y = Convert.ToInt16(centresy.Average()); radius = Convert.ToInt16(radi.Average()); differencex = centre.X - centrex; differencey = centre.Y - centrey; diffenceeucl = Math.Round((Math.Sqrt(Math.Pow(differencex, 2) + Math.Pow(differencey, 2))), 2); } if (flag == 1) { Cv2.Circle(image, centre, 3, Scalar.Red); Cv2.Circle(image, centre, radius, Scalar.Red, 3); } Cv2.Circle(image, centre_im, 3, Scalar.DeepSkyBlue); Cv2.Circle(image, centre_im, 50, Scalar.DeepSkyBlue, 3); string xdiff = differencex.ToString(); string textxparse = "X Offset: " + xdiff + " [pixels]"; string ydiff = differencey.ToString(); string textyparse = "Y Offset: " + ydiff + " [pixels]"; string eucl = diffenceeucl.ToString(); string texteucle = "Eucl. Dist: " + eucl + " [pixels]"; AppendTextBoxX(textxparse); AppendTextBoxE(texteucle); AppendTextBoxY(textyparse); Bitmap bm = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(image); bm.SetResolution(flydisp.Width, flydisp.Height); flydisp.Image = bm; } }
static void Main(string[] args) { PrintBuildInfo(); const Mode k_fmt7Mode = Mode.Mode0; const PixelFormat k_fmt7PixelFormat = PixelFormat.PixelFormatMono8; const int k_numImages = 10; // Since this application saves images in the current folder // we must ensure that we have permission to write to this folder. // If we do not have permission, fail right away. FileStream fileStream; try { fileStream = new FileStream(@"test.txt", FileMode.Create); fileStream.Close(); File.Delete("test.txt"); } catch { Console.WriteLine("Failed to create file in current folder. Please check permissions.\n"); return; } ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Query for available Format 7 modes bool supported = false; Format7Info fmt7Info = cam.GetFormat7Info(k_fmt7Mode, ref supported); PrintFormat7Capabilities(fmt7Info); if ((k_fmt7PixelFormat & (PixelFormat)fmt7Info.pixelFormatBitField) == 0) { // Pixel format not supported! return; } Format7ImageSettings fmt7ImageSettings = new Format7ImageSettings(); fmt7ImageSettings.mode = k_fmt7Mode; fmt7ImageSettings.offsetX = 0; fmt7ImageSettings.offsetY = 0; fmt7ImageSettings.width = fmt7Info.maxWidth; fmt7ImageSettings.height = fmt7Info.maxHeight; fmt7ImageSettings.pixelFormat = k_fmt7PixelFormat; // Validate the settings to make sure that they are valid bool settingsValid = false; Format7PacketInfo fmt7PacketInfo = cam.ValidateFormat7Settings( fmt7ImageSettings, ref settingsValid); if (settingsValid != true) { // Settings are not valid return; } // Set the settings to the camera cam.SetFormat7Configuration( fmt7ImageSettings, fmt7PacketInfo.recommendedBytesPerPacket); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Retrieve frame rate property CameraProperty frmRate = cam.GetProperty(PropertyType.FrameRate); Console.WriteLine("Frame rate is {0:F2} fps", frmRate.absValue); Console.WriteLine("Grabbing {0} images", k_numImages); ManagedImage rawImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; Console.WriteLine( "Grabbed image {0} - {1} {2} {3}", imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount, timeStamp.cycleOffset); // Create a converted image ManagedImage convertedImage = new ManagedImage(); // Convert the raw image rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // Create a unique filename string filename = String.Format( "CustomImageEx_CSharp-{0}-{1}.bmp", camInfo.serialNumber, imageCnt); // Get the Bitmap object. Bitmaps are only valid if the // pixel format of the ManagedImage is RGB or RGBU. System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Save the image bitmap.Save(filename); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); Console.WriteLine("Done! Press any key to exit..."); Console.ReadKey(); }
public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, ImageFormat format, bool grayscale) { if (cameraInitialized) { return; } this.resolution = resolution; this.grayscale = grayscale; this.frameRate = framerate; this.videoDeviceID = videoDeviceID; this.format = format; switch (resolution) { case Resolution._160x120: cameraWidth = 160; cameraHeight = 120; break; case Resolution._320x240: cameraWidth = 320; cameraHeight = 240; break; case Resolution._640x480: cameraWidth = 640; cameraHeight = 480; break; case Resolution._800x600: cameraWidth = 800; cameraHeight = 600; break; case Resolution._1024x768: cameraWidth = 1024; cameraHeight = 768; break; case Resolution._1280x1024: cameraWidth = 1280; cameraHeight = 960; break; case Resolution._1600x1200: cameraWidth = 1600; cameraHeight = 1200; break; } ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); if (videoDeviceID >= numCameras || videoDeviceID < 0) { throw new GoblinException("VideoDeviceID: " + videoDeviceID + " is out of range. There are only " + numCameras + " point grey camera(s) connected"); } ManagedPGRGuid guid = busMgr.GetCameraFromIndex((uint)videoDeviceID); InterfaceType ifType = busMgr.GetInterfaceTypeFromGuid(guid); if (ifType == InterfaceType.GigE) { camera = new ManagedGigECamera(); } else { camera = new ManagedCamera(); } camera.Connect(guid); processedImage = new ManagedImage(); pixelData = new byte[cameraWidth * cameraHeight * 3]; VideoMode currentVideoMode = VideoMode.NumberOfVideoModes; FlyCapture2Managed.FrameRate currentFrameRate = FlyCapture2Managed.FrameRate.NumberOfFrameRates; FlyCapture2Managed.FrameRate desiredFrameRate = FlyCapture2Managed.FrameRate.NumberOfFrameRates; switch (frameRate) { case FrameRate._15Hz: desiredFrameRate = FlyCapture2Managed.FrameRate.FrameRate15; break; case FrameRate._30Hz: desiredFrameRate = FlyCapture2Managed.FrameRate.FrameRate30; break; case FrameRate._60Hz: desiredFrameRate = FlyCapture2Managed.FrameRate.FrameRate60; break; case FrameRate._120Hz: desiredFrameRate = FlyCapture2Managed.FrameRate.FrameRate120; break; case FrameRate._240Hz: desiredFrameRate = FlyCapture2Managed.FrameRate.FrameRate240; break; default: throw new GoblinException(framerate.ToString() + " is not supported"); } try { ((ManagedCamera)camera).GetVideoModeAndFrameRate(ref currentVideoMode, ref currentFrameRate); if (!isVideoModeSet) { videoMode = currentVideoMode; } bool supported = ((ManagedCamera)camera).GetVideoModeAndFrameRateInfo(videoMode, desiredFrameRate); if (supported) { ((ManagedCamera)camera).SetVideoModeAndFrameRate(videoMode, desiredFrameRate); } else { Log.Write(desiredFrameRate.ToString() + " is not supported in " + videoMode.ToString() + " mode"); } } catch (Exception exp) { Log.Write(exp.Message + ": " + exp.StackTrace); } camera.StartCapture(OnImageGrabbed); cameraInitialized = true; }
void RunAllCameras(ManagedBusManager busMgr, uint numOfCameras) { const int k_numImages = 10; ManagedCamera[] cameras = new ManagedCamera[numOfCameras]; // Connect to all detected cameras and attempt to set them to // a common video mode and frame rate for (uint i = 0; i < numOfCameras; i++) { cameras[i] = new ManagedCamera(); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(i); // Connect to a camera cameras[i].Connect(guid); // Get the camera information CameraInfo camInfo = cameras[i].GetCameraInfo(); PrintCameraInfo(camInfo); // Set all cameras to a specific mode and frame rate so they // can be synchronized try { cameras[i].SetVideoModeAndFrameRate(VideoMode.VideoMode640x480Y8, FrameRate.FrameRate15); } catch (System.Exception ex) { Console.WriteLine("Error configuring cameras."); Console.WriteLine("This example requires cameras to be able to set to 640x480 Y8 at 15fps."); Console.WriteLine("If your camera does not support this mode, please edit the source code and recompile the application.\n"); return; } } // Put StartSyncCapture in a try-catch block in case // cameras failed to synchronize try { ManagedCamera.StartSyncCapture(numOfCameras, cameras); } catch (System.Exception ex) { Console.WriteLine("Error starting cameras."); Console.WriteLine("This example requires cameras to be able to set to 640x480 Y8 at 15fps."); Console.WriteLine("If your camera does not support this mode, please edit the source code and recompile the application."); Console.WriteLine("Press any key to exit."); Console.ReadKey(); return; } ManagedImage tempImage = new ManagedImage(); // Retrieve images from attached cameras for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { for (int camCount = 0; camCount < numOfCameras; camCount++) { // Retrieve an image cameras[camCount].RetrieveBuffer(tempImage); // Display the timestamps for all cameras to show that the // captured image is synchronized for each camera TimeStamp timeStamp = tempImage.timeStamp; Console.Out.WriteLine("Cam {0} - Frame {1} - TimeStamp {2} {3}", camCount, imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount); } } for (uint i = 0; i < numOfCameras; i++) { // Stop capturing images cameras[i].StopCapture(); // Disconnect the camera cameras[i].Disconnect(); } }
public FlyCapture() { ColorProcessing = ColorProcessingAlgorithm.Default; source = Observable.Create <FlyCaptureDataFrame>((observer, cancellationToken) => { return(Task.Factory.StartNew(() => { lock (captureLock) { ManagedCamera camera; using (var manager = new ManagedBusManager()) { var guid = manager.GetCameraFromIndex((uint)Index); camera = new ManagedCamera(); camera.Connect(guid); // Power on the camera const uint CameraPower = 0x610; const uint CameraPowerValue = 0x80000000; camera.WriteRegister(CameraPower, CameraPowerValue); // Wait for camera to complete power-up const Int32 MillisecondsToSleep = 100; uint cameraPowerValueRead = 0; do { Thread.Sleep(MillisecondsToSleep); cameraPowerValueRead = camera.ReadRegister(CameraPower); }while ((cameraPowerValueRead & CameraPowerValue) == 0); } var capture = 0; try { // Set frame rate var prop = new CameraProperty(PropertyType.FrameRate); prop.absControl = true; prop.absValue = FramesPerSecond; prop.autoManualMode = false; prop.onOff = true; camera.SetProperty(prop); // Enable/disable blackfly pull up const uint pullUp = 0x19D0; if (EnableBlackflyOutputVoltage) { camera.WriteRegister(pullUp, 0x10000001); } else { camera.WriteRegister(pullUp, 0x10000000); } // Acquisition parameters var colorProcessing = ColorProcessing; var autoExposure = !AutoExposure; // Horrible hack to trigger update inititally var shutter = Shutter; var gain = Gain; // Configure embedded info const uint embeddedInfo = 0x12F8; uint embeddedInfoState = camera.ReadRegister(embeddedInfo); if (EnableEmbeddedFrameCounter) { embeddedInfoState |= (uint)1 << 6; } else { embeddedInfoState &= ~((uint)1 << 6); } if (EnableEmbeddedFrameTimeStamp) { embeddedInfoState |= (uint)1 << 0; } else { embeddedInfoState &= ~((uint)1 << 0); } camera.WriteRegister(embeddedInfo, embeddedInfoState); using (var image = new ManagedImage()) using (var notification = cancellationToken.Register(() => { Interlocked.Exchange(ref capture, 0); camera.StopCapture(); })) { camera.StartCapture(); Interlocked.Exchange(ref capture, 1); while (!cancellationToken.IsCancellationRequested) { IplImage output; BayerTileFormat bayerTileFormat; if (autoExposure != AutoExposure && AutoExposure) { prop = new CameraProperty(PropertyType.AutoExposure); prop.autoManualMode = true; prop.onOff = true; camera.SetProperty(prop); autoExposure = AutoExposure; // Shutter prop = new CameraProperty(PropertyType.Shutter); prop.absControl = true; prop.autoManualMode = true; prop.onOff = true; camera.SetProperty(prop); // Shutter prop = new CameraProperty(PropertyType.Gain); prop.absControl = true; prop.autoManualMode = true; prop.onOff = true; camera.SetProperty(prop); autoExposure = AutoExposure; } else if (autoExposure != AutoExposure && !AutoExposure) { shutter = -0.1f; // Hack gain = -0.1f; autoExposure = AutoExposure; } if (shutter != Shutter && !AutoExposure) { // Figure out max shutter time given current frame rate var info = camera.GetPropertyInfo(PropertyType.Shutter); var delta = info.absMax - info.absMin; prop = new CameraProperty(PropertyType.Shutter); prop.absControl = true; prop.absValue = Shutter * delta + info.absMin; prop.autoManualMode = false; prop.onOff = true; camera.SetProperty(prop); shutter = Shutter; } if (gain != Gain && !AutoExposure) { // Figure out max shutter time given current frame rate var info = camera.GetPropertyInfo(PropertyType.Shutter); var delta = info.absMax - info.absMin; prop = new CameraProperty(PropertyType.Gain); prop.absControl = true; prop.absValue = Gain * delta + info.absMin;; prop.autoManualMode = false; prop.onOff = true; camera.SetProperty(prop); gain = Gain; } try { camera.RetrieveBuffer(image); } catch (FC2Exception ex) { if (capture == 0) { break; } else if (IgnoreImageConsistencyError && ex.CauseType == ErrorType.ImageConsistencyError) { continue; } else { throw; } } if (image.pixelFormat == PixelFormat.PixelFormatMono8 || image.pixelFormat == PixelFormat.PixelFormatMono16 || (image.pixelFormat == PixelFormat.PixelFormatRaw8 && (image.bayerTileFormat == BayerTileFormat.None || colorProcessing == ColorProcessingAlgorithm.NoColorProcessing))) { unsafe { bayerTileFormat = image.bayerTileFormat; var depth = image.pixelFormat == PixelFormat.PixelFormatMono16 ? IplDepth.U16 : IplDepth.U8; var bitmapHeader = new IplImage(new Size((int)image.cols, (int)image.rows), depth, 1, new IntPtr(image.data)); output = new IplImage(bitmapHeader.Size, bitmapHeader.Depth, bitmapHeader.Channels); CV.Copy(bitmapHeader, output); } } else { unsafe { bayerTileFormat = BayerTileFormat.None; output = new IplImage(new Size((int)image.cols, (int)image.rows), IplDepth.U8, 3); using (var convertedImage = new ManagedImage( (uint)output.Height, (uint)output.Width, (uint)output.WidthStep, (byte *)output.ImageData.ToPointer(), (uint)(output.WidthStep * output.Height), PixelFormat.PixelFormatBgr)) { convertedImage.colorProcessingAlgorithm = colorProcessing; image.Convert(PixelFormat.PixelFormatBgr, convertedImage); } } } observer.OnNext(new FlyCaptureDataFrame(output, image.imageMetadata, bayerTileFormat)); } } } finally { // Power off the camera const uint CameraPower = 0x610; const uint CameraPowerValue = 0x00000000; camera.WriteRegister(CameraPower, CameraPowerValue); if (capture != 0) { camera.StopCapture(); } camera.Disconnect(); camera.Dispose(); } } }, cancellationToken, TaskCreationOptions.LongRunning, TaskScheduler.Default)); }) .PublishReconnectable() .RefCount(); }
public FlyCapture() { NumBuffers = 10; GrabMode = GrabMode.BufferFrames; ColorProcessing = ColorProcessingAlgorithm.Default; source = Observable.Create <FlyCaptureDataFrame>((observer, cancellationToken) => { return(Task.Factory.StartNew(() => { lock (captureLock) { ManagedCamera camera; using (var manager = new ManagedBusManager()) { var guid = manager.GetCameraFromIndex((uint)Index); camera = new ManagedCamera(); camera.Connect(guid); } var capture = 0; var numBuffers = NumBuffers; var config = camera.GetConfiguration(); config.grabMode = GrabMode; config.numBuffers = (uint)NumBuffers; config.highPerformanceRetrieveBuffer = true; camera.SetConfiguration(config); try { var colorProcessing = ColorProcessing; using (var image = new ManagedImage()) using (var notification = cancellationToken.Register(() => { Interlocked.Exchange(ref capture, 0); camera.StopCapture(); })) { camera.StartCapture(); Interlocked.Exchange(ref capture, 1); while (!cancellationToken.IsCancellationRequested) { IplImage output; BayerTileFormat bayerTileFormat; try { camera.RetrieveBuffer(image); } catch (FC2Exception) { if (capture == 0) { break; } else { throw; } } var raw16 = image.pixelFormat == PixelFormat.PixelFormatRaw16; if (image.pixelFormat == PixelFormat.PixelFormatMono8 || image.pixelFormat == PixelFormat.PixelFormatMono16 || ((image.pixelFormat == PixelFormat.PixelFormatRaw8 || raw16) && (image.bayerTileFormat == BayerTileFormat.None || colorProcessing == ColorProcessingAlgorithm.NoColorProcessing))) { unsafe { bayerTileFormat = image.bayerTileFormat; var depth = image.pixelFormat == PixelFormat.PixelFormatMono16 || raw16 ? IplDepth.U16 : IplDepth.U8; var bitmapHeader = new IplImage(new Size((int)image.cols, (int)image.rows), depth, 1, new IntPtr(image.data)); output = new IplImage(bitmapHeader.Size, bitmapHeader.Depth, bitmapHeader.Channels); CV.Copy(bitmapHeader, output); } } else { unsafe { bayerTileFormat = BayerTileFormat.None; var depth = raw16 ? IplDepth.U16 : IplDepth.U8; var format = raw16 ? PixelFormat.PixelFormatBgr16 : PixelFormat.PixelFormatBgr; output = new IplImage(new Size((int)image.cols, (int)image.rows), depth, 3); using (var convertedImage = new ManagedImage( (uint)output.Height, (uint)output.Width, (uint)output.WidthStep, (byte *)output.ImageData.ToPointer(), (uint)(output.WidthStep * output.Height), format)) { convertedImage.colorProcessingAlgorithm = colorProcessing; image.Convert(format, convertedImage); } } } observer.OnNext(new FlyCaptureDataFrame(output, image.imageMetadata, bayerTileFormat)); } } } finally { if (capture != 0) { camera.StopCapture(); } camera.Disconnect(); camera.Dispose(); } } }, cancellationToken, TaskCreationOptions.LongRunning, TaskScheduler.Default)); }) .PublishReconnectable() .RefCount(); }
private void PopulateCameraList() { uint numCameras = 0; CameraInfo[] discoveredCameras = new CameraInfo[0]; try { numCameras = m_busMgr.GetNumOfCameras(); discoveredCameras = ManagedBusManager.DiscoverGigECameras(); } catch (FC2Exception ex) { BasePage.ShowErrorMessageDialog("Error getting number of cameras.", ex); } if (numCameras == 0 && discoveredCameras.Length == 0) { m_cameraListLabel.Text = string.Format("Camera List (No cameras detected)"); m_cameraDataGridView.Rows.Clear(); m_cameraInfoPanel.ClearInformation(); HideGigEInformation(); AdjustWindowMinimumSize(); this.Height = this.MinimumSize.Height; m_needShrinkWindowHeight = false; return; } SortedDictionary <uint, CameraInfo> discoveredCameraInfo = new SortedDictionary <uint, CameraInfo>(); m_badCameraInfo = new Dictionary <string, CameraInfo>(); m_goodCameraInfo = new Dictionary <ManagedPGRGuid, CameraInfo>(); for (uint currCamIdx = 0; currCamIdx < discoveredCameras.Length; currCamIdx++) { try { Debug.WriteLine( String.Format( "Discovered camera: {0} ({1})", discoveredCameras[currCamIdx].modelName, discoveredCameras[currCamIdx].serialNumber)); // Check if the camera already exists - we sometimes get duplicate cameras // returned from the discover call if (!discoveredCameraInfo.ContainsKey(discoveredCameras[currCamIdx].serialNumber)) { discoveredCameraInfo.Add( discoveredCameras[currCamIdx].serialNumber, discoveredCameras[currCamIdx]); } } catch (ArgumentNullException ex) { Debug.WriteLine("A null key was specified for discovered camera lookup."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); continue; } catch (ArgumentException ex) { Debug.WriteLine("An element with the same key already exists in the discovered camera dictionary."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); continue; } catch (System.Exception ex) { Debug.WriteLine("An error occurred while updating the discovered GigE camera list."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); continue; } } List <DataGridViewRow> goodCameraList = new List <DataGridViewRow>(); List <DataGridViewRow> badCameraList = new List <DataGridViewRow>(); for (uint i = 0; i < numCameras; i++) { try { ManagedPGRGuid guid; guid = m_busMgr.GetCameraFromIndex(i); InterfaceType currInterface; currInterface = m_busMgr.GetInterfaceTypeFromGuid(guid); using (ManagedCamera camera = new ManagedCamera()) { bool compatibleDriver = true; string errorMessage = string.Empty; try { camera.Connect(guid); } catch (FC2Exception ex) { if (ex.Type == ErrorType.IncompatibleDriver) { compatibleDriver = false; errorMessage = ex.Message; } } CameraInfo camInfo; if (compatibleDriver) { camInfo = camera.GetCameraInfo(); if (discoveredCameraInfo.ContainsKey(camInfo.serialNumber) == true) { // Remove good camera from dictionary discoveredCameraInfo.Remove(camInfo.serialNumber); m_goodCameraInfo.Add(guid, camInfo); } // Append the camera to the list try { DataGridViewRow newCamera = new DataGridViewRow(); DataGridViewTextBoxCell[] cells = new DataGridViewTextBoxCell[4]; for (int ci = 0; ci < cells.Length; ci++) { cells[ci] = new DataGridViewTextBoxCell(); } cells[0].Value = camInfo.serialNumber.ToString(); cells[1].Value = camInfo.modelName; cells[2].Value = InterfaceTranslator.GetInterfaceString(currInterface); cells[3].Value = camInfo.ipAddress.Equals(new IPAddress(0)) ? "N/A" : camInfo.ipAddress.ToString(); newCamera.Cells.AddRange(cells); goodCameraList.Add(newCamera); } catch (InvalidOperationException ex) { Debug.WriteLine("Error appending new row to camera list."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); continue; } catch (ArgumentNullException ex) { Debug.WriteLine("The cell in camera list contains null value."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); continue; } } else { camInfo = new CameraInfo(); DataGridViewRow newCamera = new DataGridViewRow(); newCamera.DefaultCellStyle.BackColor = IMCOMPATIBLE_DRIVER; DataGridViewTextBoxCell[] cells = new DataGridViewTextBoxCell[4]; for (int ci = 0; ci < cells.Length; ci++) { cells[ci] = new DataGridViewTextBoxCell(); } cells[0].Value = "N/A"; cells[1].Value = ManagedUtilities.GetDriverDeviceName(guid); cells[2].Value = "Incompatible Driver"; cells[3].Value = "N/A"; cells[0].ToolTipText = "An incompatible driver is installed on this device."; foreach (DataGridViewTextBoxCell cell in cells) { cell.ToolTipText = errorMessage; } newCamera.Cells.AddRange(cells); badCameraList.Add(newCamera); } } } catch (FC2Exception ex) { BasePage.ShowErrorMessageDialog("Error populating camera list.", ex); continue; } } foreach (KeyValuePair <uint, CameraInfo> pair in discoveredCameraInfo) { try { CameraInfo info = pair.Value; m_badCameraInfo.Add(info.serialNumber.ToString(), info); DataGridViewRow newCamera = new DataGridViewRow(); newCamera.DefaultCellStyle.BackColor = IP_PROBLEM; DataGridViewTextBoxCell[] cells = new DataGridViewTextBoxCell[4]; for (int ci = 0; ci < cells.Length; ci++) { cells[ci] = new DataGridViewTextBoxCell(); } cells[0].Value = info.serialNumber.ToString(); cells[1].Value = info.modelName; cells[2].Value = "GigE"; cells[3].Value = info.ipAddress.Equals(new IPAddress(0)) ? "N/A" : info.ipAddress.ToString(); cells[0].ToolTipText = "This camera is discoverable but can not be controlled"; foreach (DataGridViewTextBoxCell cell in cells) { if (m_GigEEnumerationIsDisabled) { cell.ToolTipText = "This camera cannot be enumerated by FlyCapture2 because GigE camera enumeration \n" + "has been disabled)"; } else { cell.ToolTipText = "Camera IP settings or local interface is mis-configured. Use \"Force IP\" to \n" + "correct it"; } } newCamera.Cells.AddRange(cells); badCameraList.Add(newCamera); } catch (InvalidOperationException ex) { Debug.WriteLine("Error appending new row to camera list."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); continue; } catch (ArgumentNullException ex) { Debug.WriteLine("The cell in camera list contains null value."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); continue; } } m_cameraDataGridView.Rows.Clear(); m_cameraListLabel.Text = string.Format("Camera List ({0} cameras detected)", (goodCameraList.Count + badCameraList.Count)); for (int i = 0; i < goodCameraList.Count; i++) { try { m_cameraDataGridView.Rows.Add(goodCameraList[i]); } catch (InvalidOperationException ex) { Debug.WriteLine("Error adding camera list to the view."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); } catch (ArgumentNullException ex) { Debug.WriteLine("The camera list contains null value."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); } catch (ArgumentException ex) { Debug.WriteLine("The camera list contains invalid value."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); } } for (int i = 0; i < badCameraList.Count; i++) { try { m_cameraDataGridView.Rows.Add(badCameraList[i]); } catch (InvalidOperationException ex) { Debug.WriteLine("Error adding camera list to the view."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); } catch (ArgumentNullException ex) { Debug.WriteLine("The camera list contains null value."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); } catch (ArgumentException ex) { Debug.WriteLine("The camera list contains invalid value."); Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); } } if (m_cameraDataGridView.Rows.Count > 0) { // display first camera information DisplayCameraInformationFromRowIndex(0); } else { // Nothing need to display m_cameraInfoPanel.ClearInformation(); } }
// calibrator deafult constructor public Flea3Calibrator(PictureBox displaybox) { DisplayBox = displaybox; // 1. creating the camera object Cam = new ManagedCamera(); // 2. creating the bus manager in order to handle (potentially) // multiple cameras BusMgr = new ManagedBusManager(); // 3. retrieving the List of all camera ids connected to the bus CamIDs = new List<ManagedPGRGuid>(); int camCount = (int)BusMgr.GetNumOfCameras(); for (int i = 0; i < camCount; i++) { ManagedPGRGuid guid = BusMgr.GetCameraFromIndex((uint)i); CamIDs.Add(guid); } FrameCount = FRAME_COUNT; ChessHorizCount = CHESS_HORIZ_CORNER_COUNT; ChessVertCount = CHESS_VERT_CORNER_COUNT; RectWidth = RECT_WIDTH; RectHeight = RECT_HEIGHT; // creatring the imageViewer to display the calibration frame sequence //imageViewer = new ImageViewer(); state = ST_IDLE; }