private void OpenCamera() { try { System.Diagnostics.Debug.WriteLine("OpenCamera:" + DateTime.Now.ToString("HH:mm:ss.fff")); ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); if (numCameras == 0) { System.Diagnostics.Debug.WriteLine("没有发现相机!"); return; } m_camera = new ManagedCamera(); //m_processedImage = new ManagedImage(); m_grabThreadExited = new AutoResetEvent(false); ManagedPGRGuid m_guid = busMgr.GetCameraFromIndex(0); // Connect to the first selected GUID m_camera.Connect(m_guid); // Set embedded timestamp to on EmbeddedImageInfo embeddedInfo = m_camera.GetEmbeddedImageInfo(); embeddedInfo.timestamp.onOff = true; m_camera.SetEmbeddedImageInfo(embeddedInfo); m_camera.StartCapture(); System.Diagnostics.Debug.WriteLine("OpenCamera:" + DateTime.Now.ToString("HH:mm:ss.fff")); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); } }
public override bool Open() { try { if (FGuid == null) { throw new Exception("No Camera GUID specified"); } FCamera.Connect(FGuid); VideoMode mode = new VideoMode(); FrameRate rate = new FrameRate(); FCamera.GetVideoModeAndFrameRate(ref mode, ref rate); FMode = mode.ToString(); FFramerate = Utils.GetFramerate(rate); FRunning = true; FCamera.StartCapture(CaptureCallback); ReAllocate(); Status = "OK"; return(true); } catch (Exception e) { FRunning = false; Status = e.Message; return(false); } }
void RunSingleCamera(ManagedPGRGuid guid) { const int k_numImages = 10; ManagedCamera cam = new ManagedCamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(OnImageGrabbed); CameraProperty frameRateProp = cam.GetProperty(PropertyType.FrameRate); while (imageCnt < 10) { int millisecondsToSleep = (int)(1000 / frameRateProp.absValue); Thread.Sleep(millisecondsToSleep); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); // Reset counter for next iteration imageCnt = 0; }
public FullImageWindow() { InitializeComponent(); this.Title = string.Format("Point Grey Research. Zoom Demo. Tier {0}", (RenderCapability.Tier >> 16).ToString()); m_busmgr = new ManagedBusManager(); m_cam = new ManagedCamera(); m_ctldlg = new CameraControlDialog(); m_selDlg = new CameraSelectionDialog(); m_image = new ManagedImage(); m_converted = new ManagedImage(); m_bitmap = new BitmapImage(); m_worker = new BackgroundWorker(); m_worker.WorkerReportsProgress = true; m_worker.DoWork += new DoWorkEventHandler(m_worker_DoWork); m_worker.ProgressChanged += new ProgressChangedEventHandler(m_worker_ProgressChanged); m_Done = new AutoResetEvent(false); RenderOptions.SetBitmapScalingMode(myImage, BitmapScalingMode.LowQuality); RenderOptions.SetEdgeMode(myImage, EdgeMode.Aliased); if (m_selDlg.ShowModal()) { ManagedPGRGuid[] guids = m_selDlg.GetSelectedCameraGuids(); m_cam.Connect(guids[0]); m_ctldlg.Connect(m_cam); m_cam.StartCapture(); btn_nearfast.IsChecked = true; WorkerHelper helper = new WorkerHelper(); helper.converted = m_converted; helper.raw = m_image; helper.cam = m_cam; m_continue = true; m_worker.RunWorkerAsync(helper); } else { Application.Current.Shutdown(); } }
protected override void Open() { Close(); if (!FEnabled) { return; } if (FGuid == null) { Status = "Awaiting camera guid"; return; } try { FCamera.Connect(FGuid); VideoMode mode = new VideoMode(); FrameRate rate = new FrameRate(); FCamera.GetVideoModeAndFrameRate(ref mode, ref rate); FMode = mode.ToString(); FFramerate = Utils.GetFramerate(rate); FRunning = true; FCamera.StartCapture(CaptureCallback); Status = "OK"; } catch (Exception e) { FRunning = false; Status = e.Message; } }
/// <summary> /// Set new packet size to camera /// </summary> private void ApplyPacketSize() { Format7ImageSettings currFmt7Settings = new Format7ImageSettings(); uint currPacketSize = 0; float percentage = 0.0f; ManagedCamera camera = (ManagedCamera)m_camera; try { camera.GetFormat7Configuration(currFmt7Settings, ref currPacketSize, ref percentage); } catch (FC2Exception ex) { ShowErrorMessageDialog("Error getting current Format7 configuration", ex); ex.Dispose(); return; } bool needToRestartCamera = true; // Stop the camera try { camera.StopCapture(); } catch (FC2Exception ex) { if (ex.Type == ErrorType.IsochNotStarted) { // This means the camera was stopped and therefore we // do not need to restart it needToRestartCamera = false; } else { string error = string.Format("Error stopping capture. {0}", ex.Message); Debug.WriteLine(error); ShowErrorMessageDialog("Error stopping camera.", ex); ex.Dispose(); return; } ex.Dispose(); } try { // Apply same F7 settings and new packet size camera.SetFormat7Configuration(currFmt7Settings, (uint)m_packetSizeSpinButton.Value); } catch (FC2Exception settingFormat7Exception) { ShowErrorMessageDialog("There was an error setting new packet size settings.", settingFormat7Exception); settingFormat7Exception.Dispose(); return; } // Settings were applied, or reverted to previous mode // Either way, the camera should be able to be restarted successfully if (needToRestartCamera) { try { // Restart the camera if it was running beforehand. camera.StartCapture(); } catch (FC2Exception ex) { ShowErrorMessageDialog("There was an error restarting the camera.", ex); ex.Dispose(); } } }
static void Main(string[] args) { PrintBuildInfo(); const int k_numImages = 10; bool useSoftwareTrigger = true; ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); if (!useSoftwareTrigger) { // Check for external trigger support TriggerModeInfo triggerModeInfo = cam.GetTriggerModeInfo(); if (triggerModeInfo.present != true) { Console.WriteLine("Camera does not support external trigger! Exiting...\n"); return; } } // Get current trigger settings TriggerMode triggerMode = cam.GetTriggerMode(); // Set camera to trigger mode 0 // A source of 7 means software trigger triggerMode.onOff = true; triggerMode.mode = 0; triggerMode.parameter = 0; if (useSoftwareTrigger) { // A source of 7 means software trigger triggerMode.source = 7; } else { // Triggering the camera externally using source 0. triggerMode.source = 0; } // Set the trigger mode cam.SetTriggerMode(triggerMode); // Poll to ensure camera is ready bool retVal = PollForTriggerReady(cam); if (retVal != true) { return; } // Get the camera configuration FC2Config config = cam.GetConfiguration(); // Set the grab timeout to 5 seconds config.grabTimeout = 5000; // Set the camera configuration cam.SetConfiguration(config); // Camera is ready, start capturing images cam.StartCapture(); if (useSoftwareTrigger) { if (CheckSoftwareTriggerPresence(cam) == false) { Console.WriteLine("SOFT_ASYNC_TRIGGER not implemented on this camera! Stopping application\n"); return; } } else { Console.WriteLine("Trigger the camera by sending a trigger pulse to GPIO%d.\n", triggerMode.source); } ManagedImage image = new ManagedImage(); for (int iImageCount = 0; iImageCount < k_numImages; iImageCount++) { if (useSoftwareTrigger) { // Check that the trigger is ready retVal = PollForTriggerReady(cam); Console.WriteLine("Press the Enter key to initiate a software trigger.\n"); Console.ReadLine(); // Fire software trigger retVal = FireSoftwareTrigger(cam); if (retVal != true) { Console.WriteLine("Error firing software trigger!"); return; } } // Grab image cam.RetrieveBuffer(image); Console.WriteLine(".\n"); } Console.WriteLine("Finished grabbing images"); // Stop capturing images cam.StopCapture(); // Turn off trigger mode triggerMode.onOff = false; cam.SetTriggerMode(triggerMode); // Disconnect the camera cam.Disconnect(); Console.WriteLine("Done! Press any key to exit..."); Console.ReadKey(); }
private void SetCameraVideoModeAndFrameRate(VideoMode newVideoMode, FrameRate newFrameRate) { if (m_isUpdatingRadioStatus) { Debug.WriteLine("There is a update action in progress, setting video mode and frame rate failed. "); // avoid update confliction return; } if ((m_currentVideoMode == newVideoMode) && (m_currentFrameRate == newFrameRate)) { // nothing changed return; } ManagedCamera camera = (ManagedCamera)m_camera; bool needToRestartCamera = true; bool noError = true; try { camera.StopCapture(); } catch (FC2Exception ex) { if (ex.Type == ErrorType.IsochNotStarted) { // This means the camera was stopped and therefore we // do not need to restart it needToRestartCamera = false; } else { string error = string.Format("Error stopping capture. {0}", ex.Message); Console.WriteLine(error); DialogResult result = MessageBox.Show( string.Format( "{0}\r\n Do you wish to continue change the mode? Click ok to continue", error), "FlyCapture2 Camera Control", MessageBoxButtons.OKCancel, MessageBoxIcon.Error); if (result == DialogResult.Cancel) { return; } } } try { camera.SetVideoModeAndFrameRate(newVideoMode, newFrameRate); } catch (FC2Exception ex) { string error = string.Format("Error setting video mode and frame rate. {0}", ex.Message); Console.WriteLine(error); MessageBox.Show(error, "FlyCapture2 Camera Control", MessageBoxButtons.OK, MessageBoxIcon.Error); noError = false; } if (needToRestartCamera == true) { try { camera.StartCapture(); } catch (FC2Exception ex) { string error = string.Format("Error restarting image streaming. {0}", ex.Message); Console.WriteLine(error); MessageBox.Show(error, "FlyCapture2 Camera Control", MessageBoxButtons.OK, MessageBoxIcon.Error); } } if (noError == true) { // if everything ok, update current video mode and frame rate m_currentFrameRate = newFrameRate; m_currentVideoMode = newVideoMode; // update check status for frame rate radio buttons UpdateCheckStatusForFrameRateRadioButtons(); } }
static void Main(string[] args) { PrintBuildInfo(); const int NumImages = 10; bool useSoftwareTrigger = true; ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); // Finish if there are no cameras if (numCameras == 0) { Console.WriteLine("Not enough cameras!"); Console.WriteLine("Press Enter to exit..."); Console.ReadLine(); return; } ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Power on the camera const uint CameraPower = 0x610; const uint CameraPowerValue = 0x80000000; cam.WriteRegister(CameraPower, CameraPowerValue); const Int32 MillisecondsToSleep = 100; uint cameraPowerValueRead = 0; // Wait for camera to complete power-up do { System.Threading.Thread.Sleep(MillisecondsToSleep); cameraPowerValueRead = cam.ReadRegister(CameraPower); }while ((cameraPowerValueRead & CameraPowerValue) == 0); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); if (!useSoftwareTrigger) { // Check for external trigger support TriggerModeInfo triggerModeInfo = cam.GetTriggerModeInfo(); if (triggerModeInfo.present != true) { Console.WriteLine("Camera does not support external trigger!"); Console.WriteLine("Press enter to exit..."); Console.ReadLine(); return; } } // Get current trigger settings TriggerMode triggerMode = cam.GetTriggerMode(); // Set camera to trigger mode 0 // A source of 7 means software trigger triggerMode.onOff = true; triggerMode.mode = 0; triggerMode.parameter = 0; if (useSoftwareTrigger) { // A source of 7 means software trigger triggerMode.source = 7; } else { // Triggering the camera externally using source 0. triggerMode.source = 0; } // Set the trigger mode cam.SetTriggerMode(triggerMode); // Poll to ensure camera is ready bool retVal = PollForTriggerReady(cam); if (retVal != true) { Console.WriteLine("Poll for trigger read failed!"); Console.WriteLine("Press enter to exit..."); Console.ReadLine(); return; } // Get the camera configuration FC2Config config = cam.GetConfiguration(); // Set the grab timeout to 5 seconds config.grabTimeout = 5000; // Set the camera configuration cam.SetConfiguration(config); // Camera is ready, start capturing images cam.StartCapture(); if (useSoftwareTrigger) { if (CheckSoftwareTriggerPresence(cam) == false) { Console.WriteLine("SOFT_ASYNC_TRIGGER not implemented on this camera! Stopping application\n"); Console.WriteLine("Press enter to exit..."); Console.ReadLine(); return; } } else { Console.WriteLine("Trigger the camera by sending a trigger pulse to GPIO%d.\n", triggerMode.source); } ManagedImage rawImage = new ManagedImage(); for (int iImageCount = 0; iImageCount < NumImages; iImageCount++) { if (useSoftwareTrigger) { // Check that the trigger is ready retVal = PollForTriggerReady(cam); Console.WriteLine("Press the Enter key to initiate a software trigger.\n"); Console.ReadLine(); // Fire software trigger retVal = FireSoftwareTrigger(cam); if (retVal != true) { Console.WriteLine("Error firing software trigger!"); Console.WriteLine("Press enter to exit..."); Console.ReadLine(); return; } } try { // Retrieve an image cam.RetrieveBuffer(rawImage); } catch (FC2Exception ex) { Console.WriteLine("Error retrieving buffer : {0}", ex.Message); continue; } Console.WriteLine(".\n"); } Console.WriteLine("Finished grabbing images"); // Stop capturing images cam.StopCapture(); // Turn off trigger mode triggerMode.onOff = false; cam.SetTriggerMode(triggerMode); // Disconnect the camera cam.Disconnect(); Console.WriteLine("Done! Press enter to exit..."); Console.ReadLine(); }
void RunSingleCamera(ManagedPGRGuid guid) { const int k_numImages = 10; ManagedCamera cam = new ManagedCamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); ManagedImage rawImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; Console.WriteLine( "Grabbed image {0} - {1} {2} {3}", imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount, timeStamp.cycleOffset); // Create a converted image ManagedImage convertedImage = new ManagedImage(); // Convert the raw image rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // Create a unique filename string filename = String.Format( "FlyCapture2Test_CSharp-{0}-{1}.bmp", camInfo.serialNumber, imageCnt); // Get the Bitmap object. Bitmaps are only valid if the // pixel format of the ManagedImage is RGB or RGBU. System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Save the image bitmap.Save(filename); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); }
///<summary> ///Take a picture and save it in file name /// </summary> /// <param name="fileName">File name where the picture is going to be saved> Must be a correct path</param> /// <exception cref="SoftwareTriggerNotSupportedException">Thrown when the camera doesn't support software triggering.</exception> /// <exception cref="ExternalTriggerNotSupportedException">Thrown when the camera doesn't support external triggering.</exception> /// <exception cref="TriggerFailedException">Thrown when the triggering had failed and the picture hasn't been taken.</exception> public void Snap(string fileName) // To thing of a possible separation to avoid setting everything all the time { // Power on the camera const uint CameraPower = 0x610; const uint CameraPowerValue = 0x80000000; cam.WriteRegister(CameraPower, CameraPowerValue); const Int32 MillisecondsToSleep = 100; uint cameraPowerValueRead; // Wait for camera to complete power-up do { System.Threading.Thread.Sleep(MillisecondsToSleep); cameraPowerValueRead = cam.ReadRegister(CameraPower); }while ((cameraPowerValueRead & CameraPowerValue) == 0); // Get current trigger settings TriggerMode triggerMode = cam.GetTriggerMode(); // Set camera to trigger mode 0 // A source of 7 means software trigger triggerMode.onOff = true; triggerMode.mode = 0; triggerMode.parameter = 0; // A source of 7 means software trigger triggerMode.source = 7; // Set the trigger mode cam.SetTriggerMode(triggerMode); // Get the camera configuration FC2Config config = cam.GetConfiguration(); // Set the grab timeout to 5 seconds config.grabTimeout = 5000; // Set the camera configuration cam.SetConfiguration(config); #region Taking picture // Camera is ready, start capturing images cam.StartCapture(); using (ManagedImage rawImage = new ManagedImage()) { // Check that the trigger is ready bool retVal = PollForTriggerReady(); // Fire software trigger bool retVal1 = FireSoftwareTrigger(); if (!(retVal && retVal1)) { throw new TriggerFailedException(); } try { // Retrieve an image cam.RetrieveBuffer(rawImage); rawImage.Save(fileName); } catch (FC2Exception ex) { Console.WriteLine("Error retrieving buffer : {0}", ex.Message); } } #endregion // Stop capturing images cam.StopCapture(); // Turn off trigger mode triggerMode.onOff = false; cam.SetTriggerMode(triggerMode); }
public void startCalibration(int vidmode, ManagedPGRGuid camGuid) { int i, j; VidMode = vidmode; // Starting the camera // 1. connect to the camera Cam.Connect(camGuid); // 2. setting up the video mode if (VidMode == 0) { Cam.SetVideoModeAndFrameRate(VideoMode.VideoMode1600x1200Yuv422, FrameRate.FrameRate30); } else if (VidMode == 1) { Cam.SetVideoModeAndFrameRate(VideoMode.VideoMode1600x1200Rgb, FrameRate.FrameRate15); } else if (VidMode == 2) { Format7ImageSettings fset = new Format7ImageSettings(); fset.height = 540; fset.width = 960; fset.offsetX = 40; fset.offsetY = 118; fset.mode = Mode.Mode4; fset.pixelFormat = PixelFormat.PixelFormatRgb8; Cam.SetFormat7Configuration(fset, 40.0f); // this equivalent to 24 fps } // creating the thread CalibrationThread = new Thread(MainLoop); // zeroing the index of frame counter FrameCounter = 0; // creating the point detected storage array Points = new PointF[FrameCount][]; ObjectPoints = new MCvPoint3D32f[FrameCount][]; // showing the image viewer //imageViewer.Show(); // clearing flag CalibrationDone = false; for (i = 0; i < FrameCount; i++) { ObjectPoints[i] = new MCvPoint3D32f[ChessHorizCount * ChessVertCount]; for (j = 0; j < ChessVertCount * ChessHorizCount; j++) { ObjectPoints[i][j].x = (float)(RectWidth * (j % ChessHorizCount)); ObjectPoints[i][j].y = (float)(RectHeight * (j / ChessHorizCount)); ObjectPoints[i][j].z = 0; } } // starting the camera capture Cam.StartCapture(); state = ST_CALIBRATING; CalibrationThread.Start(); }
// start capturing public void startCapture(ManagedPGRGuid camGuid, int vidMode, System.Windows.Forms.PictureBox displayPicture, String fileName, Boolean record2file) { int i; Flag_GravityFound_Y = false; // garvity is not known // CLEARING THE FRAME QUEUE NO MATTER WHAT... FrameQueue.clear(); RecordToFile = record2file; // creating the GPS data list GpsCaptureData = new List <GPSDataInstance>(); // creating the IMU data List IMUCapturedata = new List <IMUDataInstance>(); // resetting frame index FrameIndex = 0; // 1. connect to the camera Cam.Connect(camGuid); int fps_i = 0; if (vidMode == 0) { Cam.SetVideoModeAndFrameRate(VideoMode.VideoMode1600x1200Yuv422, FrameRate.FrameRate30); fps_i = 30; } else if (vidMode == 1) { Cam.SetVideoModeAndFrameRate(VideoMode.VideoMode1600x1200Rgb, FrameRate.FrameRate15); fps_i = 15; } else if (vidMode == 2) { Format7ImageSettings fset = new Format7ImageSettings(); fset.height = 540; fset.width = 960; fset.offsetX = 40; fset.offsetY = 118; fset.mode = Mode.Mode4; fset.pixelFormat = PixelFormat.PixelFormatRgb8; Cam.SetFormat7Configuration(fset, 40.0f); // this equivalent to 24 fps fps_i = 24; } if (RecordToFile) { // 3. Creating the avi recorder object AviRecorder = new ManagedAVIRecorder(); MJPGOption option = new MJPGOption(); float fps = (float)fps_i; option.frameRate = fps; option.quality = 100; // 100 for superb quality AviRecorder.AVIOpen(fileName, option); } // 4. setting the frame buffering option // leave it for now... // 5. start the capturing Cam.StartCapture(); // MUST discard the first few frames! ManagedImage rawImage = new ManagedImage(); for (i = 0; i < 10; i++) { Cam.RetrieveBuffer(rawImage); } // 6. set the display bitmap DisplayPicture = displayPicture; // 7. starting sampling, recording and dumping threads // IMU sampling thread IMUSamplingTimer = new PrecisionTimer(.0075, this.IMUSamplingEvent); // sampling frequency at 150 Hz RecordingThreadActive = true; OutOfRecordingThread = true; IMUSamplingTimer.start(); RecordingThread = new Thread(this.mainLoop); //RecordingThread.Priority = ThreadPriority.Highest; RecordingThread.Start(); // creating the thread for the dumping DumpingThread = new System.Threading.Thread(this.dumpingLoop); while (OutOfRecordingThread) { ; // must wait until the recording thread enters the loop, otherwise the dumping will never start! } DumpingThread.Start(); }
void RunSingleCamera(ManagedPGRGuid guid) { const int NumImages = 10; ManagedCamera cam = new ManagedCamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // // Register for End of Exposure (EoE) event. We simply create an instance of // the ManagedEventOptions, populate it, and register it with the camera. // ManagedEventOptions option = new ManagedEventOptions(); option.EventCallbackFcn = OnEventReceived; option.EventName = "EventExposureEnd"; try { cam.RegisterEvent(option); Console.WriteLine("Successfully registered event: {0}", option.EventName); } catch (FC2Exception ex) { Console.WriteLine("Error registering EventExposureEnd : {0}", ex.Message); return; } // // Attempt to register all events. This will fail, since we only expect this // to be called if no events have yet been registered, but a fatal error // will not be generated. If the user wants to use this call, the user can // DeregisterAllEvents(), and then run RegisterAllEvents(). // // If there are numerious different event types, and the user would like to // create a "default" callback and/or UserData struct, the user can run // RegisterAllEvents() with the default callback function, issue // DeregisterEvent() for the specific event that uses a custom callback, and // then issue RegisterEvent() with the specific callback function. This is // to ensure the user doesn't accidentally corrupt the callback function // list. // try { cam.RegisterAllEvents(option); } catch (FC2Exception ex) { // Expected error Console.WriteLine("Error registering EventExposureEnd : {0}", ex.Message); } // Start capturing images cam.StartCapture(); // Retrieve images from buffer ManagedImage rawImage = new ManagedImage(); for (ImageCount = 0; ImageCount < NumImages; ImageCount++) { try { // Retrieve an image cam.RetrieveBuffer(rawImage); } catch (FC2Exception ex) { Console.WriteLine("Error retrieving buffer : {0}", ex.Message); continue; } } // Stop capturing images cam.StopCapture(); // Uncomment the following to deregister event handler for specific device event //cam.DeregisterEvent(option); // Deregister event handler for all events cam.DeregisterAllEvents(); // Disconnect the camera cam.Disconnect(); // Reset counter for next iteration ImageCount = 0; }
static void Main(string[] args) { PrintBuildInfo(); const Mode k_fmt7Mode = Mode.Mode0; const PixelFormat k_fmt7PixelFormat = PixelFormat.PixelFormatMono8; const int k_numImages = 10; // Since this application saves images in the current folder // we must ensure that we have permission to write to this folder. // If we do not have permission, fail right away. FileStream fileStream; try { fileStream = new FileStream(@"test.txt", FileMode.Create); fileStream.Close(); File.Delete("test.txt"); } catch { Console.WriteLine("Failed to create file in current folder. Please check permissions.\n"); return; } ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Query for available Format 7 modes bool supported = false; Format7Info fmt7Info = cam.GetFormat7Info(k_fmt7Mode, ref supported); PrintFormat7Capabilities(fmt7Info); if ((k_fmt7PixelFormat & (PixelFormat)fmt7Info.pixelFormatBitField) == 0) { // Pixel format not supported! return; } Format7ImageSettings fmt7ImageSettings = new Format7ImageSettings(); fmt7ImageSettings.mode = k_fmt7Mode; fmt7ImageSettings.offsetX = 0; fmt7ImageSettings.offsetY = 0; fmt7ImageSettings.width = fmt7Info.maxWidth; fmt7ImageSettings.height = fmt7Info.maxHeight; fmt7ImageSettings.pixelFormat = k_fmt7PixelFormat; // Validate the settings to make sure that they are valid bool settingsValid = false; Format7PacketInfo fmt7PacketInfo = cam.ValidateFormat7Settings( fmt7ImageSettings, ref settingsValid); if (settingsValid != true) { // Settings are not valid return; } // Set the settings to the camera cam.SetFormat7Configuration( fmt7ImageSettings, fmt7PacketInfo.recommendedBytesPerPacket); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Retrieve frame rate property CameraProperty frmRate = cam.GetProperty(PropertyType.FrameRate); Console.WriteLine("Frame rate is {0:F2} fps", frmRate.absValue); Console.WriteLine("Grabbing {0} images", k_numImages); ManagedImage rawImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; Console.WriteLine( "Grabbed image {0} - {1} {2} {3}", imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount, timeStamp.cycleOffset); // Create a converted image ManagedImage convertedImage = new ManagedImage(); // Convert the raw image rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // Create a unique filename string filename = String.Format( "CustomImageEx_CSharp-{0}-{1}.bmp", camInfo.serialNumber, imageCnt); // Get the Bitmap object. Bitmaps are only valid if the // pixel format of the ManagedImage is RGB or RGBU. System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Save the image bitmap.Save(filename); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); Console.WriteLine("Done! Press any key to exit..."); Console.ReadKey(); }
void RunCamera(ManagedPGRGuid guid) { const uint NumImages = 100; try { using (ManagedCamera cam = new ManagedCamera()) { cam.Connect(guid); CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Start capturing images Console.WriteLine("Starting capture..."); cam.StartCapture(); List <ManagedImage> imageList = new List <ManagedImage>(); ManagedImage rawImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < NumImages; imageCnt++) { try { // Retrieve an image cam.RetrieveBuffer(rawImage); } catch (FC2Exception ex) { Console.WriteLine("Error retrieving buffer : {0}", ex.Message); continue; } ManagedImage tempImage = new ManagedImage(rawImage); imageList.Add(tempImage); Console.WriteLine("Grabbed image {0}", imageCnt); } // Stop capturing images Console.WriteLine("Stopping capture..."); // Check if the camera supports the FRAME_RATE property CameraPropertyInfo propInfo = cam.GetPropertyInfo(PropertyType.FrameRate); float frameRateToUse = 15.0F; if (propInfo.present == true) { // Get the frame rate CameraProperty prop = cam.GetProperty(PropertyType.FrameRate); frameRateToUse = prop.absValue; } Console.WriteLine("Using frame rate of {0}", frameRateToUse); string aviFileName; // Uncompressed videos are always saved with avi containers with or without // extensions specified in the filepath aviFileName = String.Format("SaveImageToAviEx_CSharp-Uncompressed-{0}", camInfo.serialNumber); SaveAviHelper(AviType.Uncompressed, ref imageList, aviFileName, frameRateToUse); // Motion JPEG videos are always saved with avi containers with or without // extensions specified in the filepath aviFileName = String.Format("SaveImageToAviEx_CSharp-Mjpg-{0}", camInfo.serialNumber); SaveAviHelper(AviType.Mjpg, ref imageList, aviFileName, frameRateToUse); // H.264 videos defaults to saving in mp4 containers if extensions are not // specified. Otherwise the extension specified by the user will be used. aviFileName = String.Format("SaveImageToAviEx_CSharp-H264-{0}", camInfo.serialNumber); SaveAviHelper(AviType.H264, ref imageList, aviFileName, frameRateToUse); } } catch (FC2Exception ex) { Console.WriteLine("There was an FC2 error: " + ex.Message); } }
void RunSingleCamera(ManagedPGRGuid guid) { const int k_numImages = 10; ManagedCamera cam = new ManagedCamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(OnImageGrabbed); CameraProperty frameRateProp = cam.GetProperty(PropertyType.FrameRate); while (imageCnt < 10) { int millisecondsToSleep = (int)(1000 / frameRateProp.absValue); Thread.Sleep(millisecondsToSleep); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); }
private void CaptureCameraCallback() { //initialise camera here #region camsetup const Mode k_fmt7Mode = Mode.Mode0; const PixelFormat k_fmt7PixelFormat = PixelFormat.PixelFormatMono8; ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Query for available Format 7 modes bool supported = false; Format7Info fmt7Info = cam.GetFormat7Info(k_fmt7Mode, ref supported); PrintFormat7Capabilities(fmt7Info); if ((k_fmt7PixelFormat & (PixelFormat)fmt7Info.pixelFormatBitField) == 0) { // Pixel format not supported! Console.WriteLine("Pixel format is not supported"); return; } Format7ImageSettings fmt7ImageSettings = new Format7ImageSettings(); fmt7ImageSettings.mode = k_fmt7Mode; fmt7ImageSettings.offsetX = 1124; fmt7ImageSettings.offsetY = 924; fmt7ImageSettings.width = 200; fmt7ImageSettings.height = 200; fmt7ImageSettings.pixelFormat = k_fmt7PixelFormat; // Validate the settings to make sure that they are valid bool settingsValid = false; Format7PacketInfo fmt7PacketInfo = cam.ValidateFormat7Settings( fmt7ImageSettings, ref settingsValid); if (settingsValid != true) { // Settings are not valid return; } // Set the settings to the camera cam.SetFormat7Configuration( fmt7ImageSettings, fmt7PacketInfo.recommendedBytesPerPacket); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Retrieve frame rate property CameraProperty frmRate = cam.GetProperty(PropertyType.FrameRate); Console.WriteLine("Frame rate is {0:F2} fps", frmRate.absValue); #endregion Mat image; Mat grey = new Mat(); int differencex = 0; int differencey = 0; double diffenceeucl = 0; OpenCvSharp.Point centre_im = new OpenCvSharp.Point(); int centrex = Convert.ToInt16(fmt7ImageSettings.width) / 2; int centrey = Convert.ToInt16(fmt7ImageSettings.width) / 2; double dp_ = 1; //inverse ratio of array accumulator to image resolution double minDist_ = 100; // minimum distance between centre of detected circles double param1_ = 100; // Higher threshold of Canny edge detection double param2_ = 20; // Accumulator threshold, smaller value leads to higher false detection rates int minRad_ = 45; // minimum radius int maxRad_ = 60; // maximum radius centre_im.X = centrex; centre_im.Y = centrey; int windowsize = 20; int[] centresx = new int[windowsize]; int[] centresy = new int[windowsize]; int[] radi = new int[windowsize]; OpenCvSharp.Point centre = new OpenCvSharp.Point(); int radius = 1; int buffpos = 0; int flag = 0; ManagedImage rawImage = new ManagedImage(); ManagedImage convertedImage = new ManagedImage(); //do repeated actions here while (true) { //Thread.Sleep(1000); cam.RetrieveBuffer(rawImage); rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); System.Drawing.Bitmap bitmap = convertedImage.bitmap; image = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap); Cv2.CvtColor(image, grey, ColorConversionCodes.BGR2GRAY); // Inner circle CircleSegment[] circles = Cv2.HoughCircles(grey, HoughMethods.Gradient, dp_, minDist_, param1_, param2_, minRad_, maxRad_); for (int i = 0; i < circles.Length; i++) { flag = 1; centre.X = Convert.ToInt16(Math.Round(circles[0].Center.X)); centre.Y = Convert.ToInt16(Math.Round(circles[0].Center.Y)); radius = Convert.ToInt16(Math.Round(circles[0].Radius)); buffpos = (buffpos + 1) % windowsize; centresx[buffpos] = centre.X; centresy[buffpos] = centre.Y; radi[buffpos] = radius; centre.X = Convert.ToInt16(centresx.Average()); centre.Y = Convert.ToInt16(centresy.Average()); radius = Convert.ToInt16(radi.Average()); differencex = centre.X - centrex; differencey = centre.Y - centrey; diffenceeucl = Math.Round((Math.Sqrt(Math.Pow(differencex, 2) + Math.Pow(differencey, 2))), 2); } if (flag == 1) { Cv2.Circle(image, centre, 3, Scalar.Red); Cv2.Circle(image, centre, radius, Scalar.Red, 3); } Cv2.Circle(image, centre_im, 3, Scalar.DeepSkyBlue); Cv2.Circle(image, centre_im, 50, Scalar.DeepSkyBlue, 3); string xdiff = differencex.ToString(); string textxparse = "X Offset: " + xdiff + " [pixels]"; string ydiff = differencey.ToString(); string textyparse = "Y Offset: " + ydiff + " [pixels]"; string eucl = diffenceeucl.ToString(); string texteucle = "Eucl. Dist: " + eucl + " [pixels]"; AppendTextBoxX(textxparse); AppendTextBoxE(texteucle); AppendTextBoxY(textyparse); Bitmap bm = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(image); bm.SetResolution(flydisp.Width, flydisp.Height); flydisp.Image = bm; } }
public FlyCapture() { NumBuffers = 10; GrabMode = GrabMode.BufferFrames; ColorProcessing = ColorProcessingAlgorithm.Default; source = Observable.Create <FlyCaptureDataFrame>((observer, cancellationToken) => { return(Task.Factory.StartNew(() => { lock (captureLock) { ManagedCamera camera; using (var manager = new ManagedBusManager()) { var guid = manager.GetCameraFromIndex((uint)Index); camera = new ManagedCamera(); camera.Connect(guid); } var capture = 0; var numBuffers = NumBuffers; var config = camera.GetConfiguration(); config.grabMode = GrabMode; config.numBuffers = (uint)NumBuffers; config.highPerformanceRetrieveBuffer = true; camera.SetConfiguration(config); try { var colorProcessing = ColorProcessing; using (var image = new ManagedImage()) using (var notification = cancellationToken.Register(() => { Interlocked.Exchange(ref capture, 0); camera.StopCapture(); })) { camera.StartCapture(); Interlocked.Exchange(ref capture, 1); while (!cancellationToken.IsCancellationRequested) { IplImage output; BayerTileFormat bayerTileFormat; try { camera.RetrieveBuffer(image); } catch (FC2Exception) { if (capture == 0) { break; } else { throw; } } var raw16 = image.pixelFormat == PixelFormat.PixelFormatRaw16; if (image.pixelFormat == PixelFormat.PixelFormatMono8 || image.pixelFormat == PixelFormat.PixelFormatMono16 || ((image.pixelFormat == PixelFormat.PixelFormatRaw8 || raw16) && (image.bayerTileFormat == BayerTileFormat.None || colorProcessing == ColorProcessingAlgorithm.NoColorProcessing))) { unsafe { bayerTileFormat = image.bayerTileFormat; var depth = image.pixelFormat == PixelFormat.PixelFormatMono16 || raw16 ? IplDepth.U16 : IplDepth.U8; var bitmapHeader = new IplImage(new Size((int)image.cols, (int)image.rows), depth, 1, new IntPtr(image.data)); output = new IplImage(bitmapHeader.Size, bitmapHeader.Depth, bitmapHeader.Channels); CV.Copy(bitmapHeader, output); } } else { unsafe { bayerTileFormat = BayerTileFormat.None; var depth = raw16 ? IplDepth.U16 : IplDepth.U8; var format = raw16 ? PixelFormat.PixelFormatBgr16 : PixelFormat.PixelFormatBgr; output = new IplImage(new Size((int)image.cols, (int)image.rows), depth, 3); using (var convertedImage = new ManagedImage( (uint)output.Height, (uint)output.Width, (uint)output.WidthStep, (byte *)output.ImageData.ToPointer(), (uint)(output.WidthStep * output.Height), format)) { convertedImage.colorProcessingAlgorithm = colorProcessing; image.Convert(format, convertedImage); } } } observer.OnNext(new FlyCaptureDataFrame(output, image.imageMetadata, bayerTileFormat)); } } } finally { if (capture != 0) { camera.StopCapture(); } camera.Disconnect(); camera.Dispose(); } } }, cancellationToken, TaskCreationOptions.LongRunning, TaskScheduler.Default)); }) .PublishReconnectable() .RefCount(); }
public FlyCapture() { ColorProcessing = ColorProcessingAlgorithm.Default; source = Observable.Create <FlyCaptureDataFrame>((observer, cancellationToken) => { return(Task.Factory.StartNew(() => { lock (captureLock) { ManagedCamera camera; using (var manager = new ManagedBusManager()) { var guid = manager.GetCameraFromIndex((uint)Index); camera = new ManagedCamera(); camera.Connect(guid); // Power on the camera const uint CameraPower = 0x610; const uint CameraPowerValue = 0x80000000; camera.WriteRegister(CameraPower, CameraPowerValue); // Wait for camera to complete power-up const Int32 MillisecondsToSleep = 100; uint cameraPowerValueRead = 0; do { Thread.Sleep(MillisecondsToSleep); cameraPowerValueRead = camera.ReadRegister(CameraPower); }while ((cameraPowerValueRead & CameraPowerValue) == 0); } var capture = 0; try { // Set frame rate var prop = new CameraProperty(PropertyType.FrameRate); prop.absControl = true; prop.absValue = FramesPerSecond; prop.autoManualMode = false; prop.onOff = true; camera.SetProperty(prop); // Enable/disable blackfly pull up const uint pullUp = 0x19D0; if (EnableBlackflyOutputVoltage) { camera.WriteRegister(pullUp, 0x10000001); } else { camera.WriteRegister(pullUp, 0x10000000); } // Acquisition parameters var colorProcessing = ColorProcessing; var autoExposure = !AutoExposure; // Horrible hack to trigger update inititally var shutter = Shutter; var gain = Gain; // Configure embedded info const uint embeddedInfo = 0x12F8; uint embeddedInfoState = camera.ReadRegister(embeddedInfo); if (EnableEmbeddedFrameCounter) { embeddedInfoState |= (uint)1 << 6; } else { embeddedInfoState &= ~((uint)1 << 6); } if (EnableEmbeddedFrameTimeStamp) { embeddedInfoState |= (uint)1 << 0; } else { embeddedInfoState &= ~((uint)1 << 0); } camera.WriteRegister(embeddedInfo, embeddedInfoState); using (var image = new ManagedImage()) using (var notification = cancellationToken.Register(() => { Interlocked.Exchange(ref capture, 0); camera.StopCapture(); })) { camera.StartCapture(); Interlocked.Exchange(ref capture, 1); while (!cancellationToken.IsCancellationRequested) { IplImage output; BayerTileFormat bayerTileFormat; if (autoExposure != AutoExposure && AutoExposure) { prop = new CameraProperty(PropertyType.AutoExposure); prop.autoManualMode = true; prop.onOff = true; camera.SetProperty(prop); autoExposure = AutoExposure; // Shutter prop = new CameraProperty(PropertyType.Shutter); prop.absControl = true; prop.autoManualMode = true; prop.onOff = true; camera.SetProperty(prop); // Shutter prop = new CameraProperty(PropertyType.Gain); prop.absControl = true; prop.autoManualMode = true; prop.onOff = true; camera.SetProperty(prop); autoExposure = AutoExposure; } else if (autoExposure != AutoExposure && !AutoExposure) { shutter = -0.1f; // Hack gain = -0.1f; autoExposure = AutoExposure; } if (shutter != Shutter && !AutoExposure) { // Figure out max shutter time given current frame rate var info = camera.GetPropertyInfo(PropertyType.Shutter); var delta = info.absMax - info.absMin; prop = new CameraProperty(PropertyType.Shutter); prop.absControl = true; prop.absValue = Shutter * delta + info.absMin; prop.autoManualMode = false; prop.onOff = true; camera.SetProperty(prop); shutter = Shutter; } if (gain != Gain && !AutoExposure) { // Figure out max shutter time given current frame rate var info = camera.GetPropertyInfo(PropertyType.Shutter); var delta = info.absMax - info.absMin; prop = new CameraProperty(PropertyType.Gain); prop.absControl = true; prop.absValue = Gain * delta + info.absMin;; prop.autoManualMode = false; prop.onOff = true; camera.SetProperty(prop); gain = Gain; } try { camera.RetrieveBuffer(image); } catch (FC2Exception ex) { if (capture == 0) { break; } else if (IgnoreImageConsistencyError && ex.CauseType == ErrorType.ImageConsistencyError) { continue; } else { throw; } } if (image.pixelFormat == PixelFormat.PixelFormatMono8 || image.pixelFormat == PixelFormat.PixelFormatMono16 || (image.pixelFormat == PixelFormat.PixelFormatRaw8 && (image.bayerTileFormat == BayerTileFormat.None || colorProcessing == ColorProcessingAlgorithm.NoColorProcessing))) { unsafe { bayerTileFormat = image.bayerTileFormat; var depth = image.pixelFormat == PixelFormat.PixelFormatMono16 ? IplDepth.U16 : IplDepth.U8; var bitmapHeader = new IplImage(new Size((int)image.cols, (int)image.rows), depth, 1, new IntPtr(image.data)); output = new IplImage(bitmapHeader.Size, bitmapHeader.Depth, bitmapHeader.Channels); CV.Copy(bitmapHeader, output); } } else { unsafe { bayerTileFormat = BayerTileFormat.None; output = new IplImage(new Size((int)image.cols, (int)image.rows), IplDepth.U8, 3); using (var convertedImage = new ManagedImage( (uint)output.Height, (uint)output.Width, (uint)output.WidthStep, (byte *)output.ImageData.ToPointer(), (uint)(output.WidthStep * output.Height), PixelFormat.PixelFormatBgr)) { convertedImage.colorProcessingAlgorithm = colorProcessing; image.Convert(PixelFormat.PixelFormatBgr, convertedImage); } } } observer.OnNext(new FlyCaptureDataFrame(output, image.imageMetadata, bayerTileFormat)); } } } finally { // Power off the camera const uint CameraPower = 0x610; const uint CameraPowerValue = 0x00000000; camera.WriteRegister(CameraPower, CameraPowerValue); if (capture != 0) { camera.StopCapture(); } camera.Disconnect(); camera.Dispose(); } } }, cancellationToken, TaskCreationOptions.LongRunning, TaskScheduler.Default)); }) .PublishReconnectable() .RefCount(); }
public void RunSingleCamera(ManagedPGRGuid guid) { const int k_numImages = 10; ManagedCamera cam = new ManagedCamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Create a raw image ManagedImage rawImage = new ManagedImage(); // Create a converted image ManagedImage convertedImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; Console.WriteLine( "Grabbed image {0} - {1} {2} {3}", imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount, timeStamp.cycleOffset); // Convert the raw image rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // Create a unique filename string filename = String.Format( "FlyCapture2Test_CSharp-{0}-{1}.bmp", camInfo.serialNumber, imageCnt); // Get the Bitmap object. Bitmaps are only valid if the // pixel format of the ManagedImage is RGB or RGBU. System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Save the image bitmap.Save(filename); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); }
unsafe void RunSingleCamera(ManagedPGRGuid guid, string save_location, int numImages) { ManagedCamera cam = new ManagedCamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Make a 300 Frame Buffer FC2Config bufferFrame = cam.GetConfiguration(); bufferFrame.grabMode = GrabMode.BufferFrames; bufferFrame.numBuffers = 300; cam.SetConfiguration(bufferFrame); // Start capturing images cam.StartCapture(); // Create a raw image ManagedImage rawImage = new ManagedImage(); // Create a converted image ManagedImage convertedImage = new ManagedImage(); System.Drawing.Size framesize = new System.Drawing.Size(1888, 1888); CvInvoke.NamedWindow("Prey Capture" + save_location, NamedWindowType.Normal); VideoWriter camvid = new VideoWriter(save_location, 0, 60, framesize, false); for (int imageCnt = 0; imageCnt < numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; // Convert the raw image // rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // m rawImage.Convert(PixelFormat.PixelFormatRaw8, convertedImage); // use raw8 for GH3s but flea3 can be color. int rws = (int)convertedImage.rows; int cols = (int)convertedImage.cols; IntPtr point = (IntPtr)convertedImage.data; Mat cvimage = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, 1, point, cols); camvid.Write(cvimage); if (imageCnt % 200 == 0) { CvInvoke.Imshow("Prey Capture" + save_location, cvimage); CvInvoke.WaitKey(1); Console.WriteLine(imageCnt); } } // Stop capturing images cam.StopCapture(); camvid.Dispose(); // Disconnect the camera Console.WriteLine("Done Brah"); cam.Disconnect(); CvInvoke.DestroyAllWindows(); }
void RunCamera(ManagedPGRGuid guid) { const uint k_numImages = 100; try { using (ManagedCamera cam = new ManagedCamera()) { cam.Connect(guid); CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Start capturing images Console.WriteLine("Starting capture..."); cam.StartCapture(); List <ManagedImage> imageList = new List <ManagedImage>(); ManagedImage rawImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { cam.RetrieveBuffer(rawImage); ManagedImage tempImage = new ManagedImage(rawImage); imageList.Add(tempImage); Console.WriteLine("Grabbed image {0}", imageCnt); } // Stop capturing images Console.WriteLine("Stopping capture..."); // Check if the camera supports the FRAME_RATE property CameraPropertyInfo propInfo = cam.GetPropertyInfo(PropertyType.FrameRate); float frameRateToUse = 15.0F; if (propInfo.present == true) { // Get the frame rate CameraProperty prop = cam.GetProperty(PropertyType.FrameRate); frameRateToUse = prop.absValue; } Console.WriteLine("Using frame rate of {0}", frameRateToUse); string aviFileName; aviFileName = String.Format("SaveImageToAviEx_CSharp-Uncompressed-{0}", camInfo.serialNumber); SaveAviHelper(AviType.Uncompressed, ref imageList, aviFileName, frameRateToUse); aviFileName = String.Format("SaveImageToAviEx_CSharp-Mjpg-{0}", camInfo.serialNumber); SaveAviHelper(AviType.Mjpg, ref imageList, aviFileName, frameRateToUse); aviFileName = String.Format("SaveImageToAviEx_CSharp-h264-{0}", camInfo.serialNumber); SaveAviHelper(AviType.H264, ref imageList, aviFileName, frameRateToUse); } } catch (FC2Exception ex) { Console.WriteLine("There was an FC2 error: " + ex.Message); } }