/// <summary> /// Attempt to connect to camera /// </summary> /// <returns>True if connection is successful</returns> public bool Initialize() { ManagedBusManager busMgr = new ManagedBusManager(); try { ManagedPGRGuid guid = busMgr.GetCameraFromSerialNumber(this.serial); this.camera.Connect(guid); GigEImageSettings config = this.camera.GetGigEImageSettings(); config.height = this.PixelHeight; config.width = this.PixelWidth; config.offsetX = (2448 - this.PixelWidth) / 2; config.offsetY = (2048 - this.PixelHeight) / 2; this.camera.SetGigEImageSettings(config); } catch (Exception ex) { // Connection unsuccessful Logger.Out(ex.ToString()); this.Connected = false; return(false); } // Set embedded timestamp to on EmbeddedImageInfo embeddedInfo = this.camera.GetEmbeddedImageInfo(); embeddedInfo.timestamp.onOff = true; this.camera.SetEmbeddedImageInfo(embeddedInfo); // Start live capture this.Connected = true; this.Start(); return(true); }
private void OpenCamera() { try { System.Diagnostics.Debug.WriteLine("OpenCamera:" + DateTime.Now.ToString("HH:mm:ss.fff")); ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); if (numCameras == 0) { System.Diagnostics.Debug.WriteLine("没有发现相机!"); return; } m_camera = new ManagedCamera(); //m_processedImage = new ManagedImage(); m_grabThreadExited = new AutoResetEvent(false); ManagedPGRGuid m_guid = busMgr.GetCameraFromIndex(0); // Connect to the first selected GUID m_camera.Connect(m_guid); // Set embedded timestamp to on EmbeddedImageInfo embeddedInfo = m_camera.GetEmbeddedImageInfo(); embeddedInfo.timestamp.onOff = true; m_camera.SetEmbeddedImageInfo(embeddedInfo); m_camera.StartCapture(); System.Diagnostics.Debug.WriteLine("OpenCamera:" + DateTime.Now.ToString("HH:mm:ss.fff")); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); } }
public void ChooseCamera() { StopStreaming(); DeleteCamera(); beginSelecCamera: CameraSelectionDialog selectDialog = new CameraSelectionDialog(); if (selectDialog.ShowModal() == true) { ManagedPGRGuid[] guids = selectDialog.GetSelectedCameraGuids(); if (guids.Length < 1) { if (MessageBox.Show("You have not selected a camera. Do you want to restart camera selection diaolog?", "No camera", MessageBoxButton.YesNo, MessageBoxImage.Question) == MessageBoxResult.Yes) { goto beginSelecCamera; } else { Application.Current.Shutdown(); } } m_busManager = new ManagedBusManager(); var interfaceType = m_busManager.GetInterfaceTypeFromGuid(guids[0]); if (interfaceType == InterfaceType.GigE) { m_camera = new ManagedGigECamera(); } else { m_camera = new ManagedCamera(); } m_camera.Connect(guids[0]); EmbeddedImageInfo embeddedInfo = m_camera.GetEmbeddedImageInfo(); embeddedInfo.timestamp.onOff = true; embeddedInfo.exposure.onOff = true; embeddedInfo.shutter.onOff = true; embeddedInfo.gain.onOff = true; m_camera.SetEmbeddedImageInfo(embeddedInfo); float shutterMin = m_camera.GetPropertyInfo(PropertyType.Shutter).absMin, shutterMax = m_camera.GetPropertyInfo(PropertyType.Shutter).absMax; m_commonViewModel.CameraShutterRangeBegin = shutterMin; m_commonViewModel.CameraShutterRangeEnd = shutterMax; FC2Config config = m_camera.GetConfiguration(); config.grabMode = GrabMode.BufferFrames; m_camera.SetConfiguration(config); m_cameraCtrlDialog.Connect(m_camera); m_commonViewModel.IsCameraChosen = true; m_commonViewModel.IsStreamingWasStarted = false; } }
public bool Connect() { bool flag = false; CameraSelectionDialog camSlnDlg = new CameraSelectionDialog(); camSlnDlg.Show(); camSlnDlg.Hide(); //if (camSlnDlg.ShowModal()) { try { ManagedPGRGuid[] selectedGuids = camSlnDlg.GetSelectedCameraGuids(); ManagedPGRGuid guidToUse = selectedGuids[0]; ManagedBusManager busMgr = new ManagedBusManager(); m_camera = new ManagedCamera(); // Connect to the first selected GUID m_camera.Connect(guidToUse); m_camCtlDlg.Connect(m_camera); CameraInfo camInfo = m_camera.GetCameraInfo(); camInfo.vendorName = "MicroTest"; camInfo.modelName = "v1"; // UpdateFormCaption(camInfo); // Set embedded timestamp to on EmbeddedImageInfo embeddedInfo = m_camera.GetEmbeddedImageInfo(); embeddedInfo.timestamp.onOff = true; //embeddedInfo.exposure.onOff = true; embeddedInfo.shutter.onOff = true; //tbox_uptime.Text = embeddedInfo.timestamp.ToString(); m_camera.SetEmbeddedImageInfo(embeddedInfo); flag = true; } catch (IndexOutOfRangeException e) { m_camCtlDlg.Disconnect(); if (m_camera != null) { m_camera.Disconnect(); } flag = false; throw e; } } return(flag); }
public bool CamConnection(uint[] serialList) { NumCameras = serialList.Length; ManagedBusManager busMgr = new ManagedBusManager(); mCameras = new ManagedGigECamera[NumCameras]; for (uint i = 0; i < NumCameras; i++) { if (serialList[i] == 0) { continue; } mCameras[i] = new ManagedGigECamera(); try { ManagedPGRGuid guid = busMgr.GetCameraFromSerialNumber(serialList[i]); // Connect to a camera mCameras[i].Connect(guid); // Turn trigger mode off TriggerMode trigMode = new TriggerMode(); trigMode.onOff = false; mCameras[i].SetTriggerMode(trigMode); // Turn Timestamp on EmbeddedImageInfo imageInfo = new EmbeddedImageInfo(); imageInfo.timestamp.onOff = true; mCameras[i].SetEmbeddedImageInfo(imageInfo); //IsConnected[i] = true; GV.IsCameraConnected = true; } catch (Exception ex) { //IsConnected[i] = false; GV.IsCameraConnected = false; return(false); } } return(true); }
void RunSingleCamera(ManagedPGRGuid guid) { const int k_numImages = 10; ManagedCamera cam = new ManagedCamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(OnImageGrabbed); CameraProperty frameRateProp = cam.GetProperty(PropertyType.FrameRate); while (imageCnt < 10) { int millisecondsToSleep = (int)(1000 / frameRateProp.absValue); Thread.Sleep(millisecondsToSleep); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); // Reset counter for next iteration imageCnt = 0; }
private void Form1_Load(object sender, EventArgs e) { //MessageBox.Show("VS2013"); textBox1.Text = Properties.Settings.Default.mirrorAngleStep; textBox2.Text = Properties.Settings.Default.mirrorAngle; textBoxDCMotorTime.Text = Properties.Settings.Default.motorTime; textBoxDCMotor2Time.Text = Properties.Settings.Default.motor2Time; Directory.CreateDirectory(savepath); Directory.CreateDirectory(savepath + "\\SavedMaxima\\"); PulseStopWatch.Start(); comboBox1.Items.Clear(); foreach (string item in System.IO.Ports.SerialPort.GetPortNames()) { comboBox1.Items.Add(item); } //comboBox1.SelectedItem = Properties.Settings.Default.myserialport; try { comboBox1.SelectedItem = comboBox1.Items[0]; } catch { } My_count = 0; Hide(); //CameraSelectionDialog camSlnDlg = new CameraSelectionDialog(); //bool retVal = camSlnDlg.ShowModal(); //if (retVal) if (true) { try { //ManagedPGRGuid[] selectedGuids = camSlnDlg.GetSelectedCameraGuids(); //ManagedPGRGuid guidToUse = selectedGuids[0]; ManagedBusManager busMgr = new ManagedBusManager(); /*InterfaceType ifType = busMgr.GetInterfaceTypeFromGuid(guidToUse); * if (ifType == InterfaceType.GigE) * { * m_camera = new ManagedGigECamera(); * } * else * { * m_camera = new ManagedCamera(); * }*/ m_camera = new ManagedCamera(); // Connect to the first selected GUID //m_camera.Connect(guidToUse); uint serial1 = busMgr.GetCameraSerialNumberFromIndex(0); ManagedPGRGuid guid = busMgr.GetCameraFromSerialNumber(serial1); m_camera.Connect(guid); m_camCtlDlg.Connect(m_camera); CameraInfo camInfo = m_camera.GetCameraInfo(); UpdateFormCaption(camInfo); // Set embedded timestamp to on EmbeddedImageInfo embeddedInfo = m_camera.GetEmbeddedImageInfo(); embeddedInfo.timestamp.onOff = true; m_camera.SetEmbeddedImageInfo(embeddedInfo); m_camera.StartCapture(); m_grabImages = true; StartGrabLoop(); } catch (FC2Exception ex) { //MessageBox.Show("Camera not detected. Make sure you have connected a camera"); label14.Show(); Debug.WriteLine("Failed to load form successfully: " + ex.Message); // Environment.ExitCode = -1; //Application.Exit(); //return; } toolStripButtonStart.Enabled = false; toolStripButtonStop.Enabled = true; } else { Environment.ExitCode = -1; Application.Exit(); return; } Show(); chart1.ChartAreas["ChartArea1"].AxisX.MajorGrid.Enabled = false; chart1.ChartAreas["ChartArea1"].AxisY.MajorGrid.Enabled = false; // chart1.ChartAreas[0].AxisY. //chart1.ChartAreas["ChartArea1"].AxisX.MajorGrid.LineColor = Color.Gray; //chart1.ChartAreas["ChartArea1"].AxisY.MajorGrid.LineColor = Color.Gray; chart1.Series[0].BorderWidth = 3; /* * if (checkBoxGraph.Checked) { timerGraph.Start(); } * else { timerGraph.Stop(); }*/ }
static void Main(string[] args) { PrintBuildInfo(); const Mode k_fmt7Mode = Mode.Mode0; const PixelFormat k_fmt7PixelFormat = PixelFormat.PixelFormatMono8; const int k_numImages = 10; // Since this application saves images in the current folder // we must ensure that we have permission to write to this folder. // If we do not have permission, fail right away. FileStream fileStream; try { fileStream = new FileStream(@"test.txt", FileMode.Create); fileStream.Close(); File.Delete("test.txt"); } catch { Console.WriteLine("Failed to create file in current folder. Please check permissions.\n"); return; } ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Query for available Format 7 modes bool supported = false; Format7Info fmt7Info = cam.GetFormat7Info(k_fmt7Mode, ref supported); PrintFormat7Capabilities(fmt7Info); if ((k_fmt7PixelFormat & (PixelFormat)fmt7Info.pixelFormatBitField) == 0) { // Pixel format not supported! return; } Format7ImageSettings fmt7ImageSettings = new Format7ImageSettings(); fmt7ImageSettings.mode = k_fmt7Mode; fmt7ImageSettings.offsetX = 0; fmt7ImageSettings.offsetY = 0; fmt7ImageSettings.width = fmt7Info.maxWidth; fmt7ImageSettings.height = fmt7Info.maxHeight; fmt7ImageSettings.pixelFormat = k_fmt7PixelFormat; // Validate the settings to make sure that they are valid bool settingsValid = false; Format7PacketInfo fmt7PacketInfo = cam.ValidateFormat7Settings( fmt7ImageSettings, ref settingsValid); if (settingsValid != true) { // Settings are not valid return; } // Set the settings to the camera cam.SetFormat7Configuration( fmt7ImageSettings, fmt7PacketInfo.recommendedBytesPerPacket); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Retrieve frame rate property CameraProperty frmRate = cam.GetProperty(PropertyType.FrameRate); Console.WriteLine("Frame rate is {0:F2} fps", frmRate.absValue); Console.WriteLine("Grabbing {0} images", k_numImages); ManagedImage rawImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; Console.WriteLine( "Grabbed image {0} - {1} {2} {3}", imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount, timeStamp.cycleOffset); // Create a converted image ManagedImage convertedImage = new ManagedImage(); // Convert the raw image rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // Create a unique filename string filename = String.Format( "CustomImageEx_CSharp-{0}-{1}.bmp", camInfo.serialNumber, imageCnt); // Get the Bitmap object. Bitmaps are only valid if the // pixel format of the ManagedImage is RGB or RGBU. System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Save the image bitmap.Save(filename); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); Console.WriteLine("Done! Press any key to exit..."); Console.ReadKey(); }
public void RunSingleCamera(ManagedPGRGuid guid) { const int k_numImages = 10; ManagedCamera cam = new ManagedCamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Create a raw image ManagedImage rawImage = new ManagedImage(); // Create a converted image ManagedImage convertedImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; Console.WriteLine( "Grabbed image {0} - {1} {2} {3}", imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount, timeStamp.cycleOffset); // Convert the raw image rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // Create a unique filename string filename = String.Format( "FlyCapture2Test_CSharp-{0}-{1}.bmp", camInfo.serialNumber, imageCnt); // Get the Bitmap object. Bitmaps are only valid if the // pixel format of the ManagedImage is RGB or RGBU. System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Save the image bitmap.Save(filename); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); }
private void Form1_Load(object sender, EventArgs e) { Hide(); CameraSelectionDialog camSlnDlg = new CameraSelectionDialog(); bool retVal = camSlnDlg.ShowModal(); if (retVal) { try { ManagedPGRGuid[] selectedGuids = camSlnDlg.GetSelectedCameraGuids(); if (selectedGuids.Length == 0) { Debug.WriteLine("No cameras selected!"); Close(); return; } ManagedPGRGuid guidToUse = selectedGuids[0]; ManagedBusManager busMgr = new ManagedBusManager(); InterfaceType ifType = busMgr.GetInterfaceTypeFromGuid(guidToUse); if (ifType == InterfaceType.GigE) { m_camera = new ManagedGigECamera(); } else { m_camera = new ManagedCamera(); } // Connect to the first selected GUID m_camera.Connect(guidToUse); m_camCtlDlg.Connect(m_camera); CameraInfo camInfo = m_camera.GetCameraInfo(); UpdateFormCaption(camInfo); // Set embedded timestamp to on EmbeddedImageInfo embeddedInfo = m_camera.GetEmbeddedImageInfo(); embeddedInfo.timestamp.onOff = true; m_camera.SetEmbeddedImageInfo(embeddedInfo); m_camera.StartCapture(); m_grabImages = true; StartGrabLoop(); } catch (FC2Exception ex) { Debug.WriteLine("Failed to load form successfully: " + ex.Message); Close(); } } else { Close(); } Show(); }
static void Main(string[] args) { PrintBuildInfo(); const int NumImages = 50; Program program = new Program(); // // Initialize BusManager and retrieve number of cameras detected // ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); // // Check to make sure at least two cameras are connected before // running example // if (numCameras < 2) { Console.WriteLine("Insufficient number of cameras."); Console.WriteLine("Make sure at least two cameras are connected for example to run."); Console.WriteLine("Press Enter to exit."); Console.ReadLine(); return; } // // Initialize an array of cameras // // *** NOTES *** // The size of the array is equal to the number of cameras detected. // The array of cameras will be used for connecting, configuring, // and capturing images. // ManagedCamera[] cameras = new ManagedCamera[numCameras]; // // Prepare each camera to acquire images // // *** NOTES *** // For pseudo-simultaneous streaming, each camera is prepared as if it // were just one, but in a loop. Notice that cameras are selected with // an index. We demonstrate pseduo-simultaneous streaming because true // simultaneous streaming would require multiple process or threads, // which is too complex for an example. // for (uint i = 0; i < numCameras; i++) { cameras[i] = new ManagedCamera(); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(i); // Connect to a camera cameras[i].Connect(guid); // Get the camera information CameraInfo camInfo = cameras[i].GetCameraInfo(); PrintCameraInfo(camInfo); try { // Turn trigger mode off TriggerMode trigMode = new TriggerMode(); trigMode.onOff = false; cameras[i].SetTriggerMode(trigMode); // Turn Timestamp on EmbeddedImageInfo imageInfo = new EmbeddedImageInfo(); imageInfo.timestamp.onOff = true; cameras[i].SetEmbeddedImageInfo(imageInfo); } catch (System.Exception ex) { Console.WriteLine("Error configuring camera : {0}", ex.Message); Console.WriteLine("Press any key to exit..."); Console.ReadLine(); return; } try { // Start streaming on camera cameras[i].StartCapture(); } catch (System.Exception ex) { Console.WriteLine("Error starting camera : {0}", ex.Message); Console.WriteLine("Press any key to exit..."); Console.ReadLine(); return; } } // // Retrieve images from all cameras // // *** NOTES *** // In order to work with simultaneous camera streams, nested loops are // needed. It is important that the inner loop be the one iterating // through the cameras; otherwise, all images will be grabbed from a // single camera before grabbing any images from another. // ManagedImage tempImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < NumImages; imageCnt++) { for (int camCount = 0; camCount < numCameras; camCount++) { try { // Retrieve an image cameras[camCount].RetrieveBuffer(tempImage); } catch (System.Exception ex) { Console.WriteLine("Error retrieving buffer : {0}", ex.Message); Console.WriteLine("Press any key to exit..."); Console.ReadLine(); return; } // Display the timestamps of the images grabbed for each camera TimeStamp timeStamp = tempImage.timeStamp; Console.Out.WriteLine("Camera {0} - Frame {1} - TimeStamp {2} {3}", camCount, imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount); } } // // Stop streaming for each camera // for (uint i = 0; i < numCameras; i++) { try { cameras[i].StopCapture(); cameras[i].Disconnect(); } catch (System.Exception ex) { Console.WriteLine("Error cleaning up camera : {0}", ex.Message); Console.WriteLine("Press any key to exit..."); Console.ReadLine(); return; } } Console.WriteLine("Press enter to exit..."); Console.ReadLine(); }
void RunSingleCamera(ManagedPGRGuid guid) { const int k_numImages = 10; ManagedGigECamera cam = new ManagedGigECamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); uint numStreamChannels = cam.GetNumStreamChannels(); for (uint i = 0; i < numStreamChannels; i++) { PrintStreamChannelInfo(cam.GetGigEStreamChannelInfo(i)); } GigEImageSettingsInfo imageSettingsInfo = cam.GetGigEImageSettingsInfo(); GigEImageSettings imageSettings = new GigEImageSettings(); imageSettings.offsetX = 0; imageSettings.offsetY = 0; imageSettings.height = imageSettingsInfo.maxHeight; imageSettings.width = imageSettingsInfo.maxWidth; imageSettings.pixelFormat = PixelFormat.PixelFormatMono8; cam.SetGigEImageSettings(imageSettings); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); ManagedImage rawImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; Console.WriteLine( "Grabbed image {0} - {1} {2} {3}", imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount, timeStamp.cycleOffset); // Create a converted image ManagedImage convertedImage = new ManagedImage(); // Convert the raw image rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // Create a unique filename string filename = String.Format( "GigEGrabEx_CSharp-{0}-{1}.bmp", camInfo.serialNumber, imageCnt); // Get the Bitmap object. Bitmaps are only valid if the // pixel format of the ManagedImage is RGB or RGBU. System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Save the image bitmap.Save(filename); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); }
unsafe void RunSingleCamera(ManagedPGRGuid guid, string save_location, int numImages) { ManagedCamera cam = new ManagedCamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Make a 300 Frame Buffer FC2Config bufferFrame = cam.GetConfiguration(); bufferFrame.grabMode = GrabMode.BufferFrames; bufferFrame.numBuffers = 300; cam.SetConfiguration(bufferFrame); // Start capturing images cam.StartCapture(); // Create a raw image ManagedImage rawImage = new ManagedImage(); // Create a converted image ManagedImage convertedImage = new ManagedImage(); System.Drawing.Size framesize = new System.Drawing.Size(1888, 1888); CvInvoke.NamedWindow("Prey Capture" + save_location, NamedWindowType.Normal); VideoWriter camvid = new VideoWriter(save_location, 0, 60, framesize, false); for (int imageCnt = 0; imageCnt < numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; // Convert the raw image // rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // m rawImage.Convert(PixelFormat.PixelFormatRaw8, convertedImage); // use raw8 for GH3s but flea3 can be color. int rws = (int)convertedImage.rows; int cols = (int)convertedImage.cols; IntPtr point = (IntPtr)convertedImage.data; Mat cvimage = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, 1, point, cols); camvid.Write(cvimage); if (imageCnt % 200 == 0) { CvInvoke.Imshow("Prey Capture" + save_location, cvimage); CvInvoke.WaitKey(1); Console.WriteLine(imageCnt); } } // Stop capturing images cam.StopCapture(); camvid.Dispose(); // Disconnect the camera Console.WriteLine("Done Brah"); cam.Disconnect(); CvInvoke.DestroyAllWindows(); }
private void CaptureCameraCallback() { //initialise camera here #region camsetup const Mode k_fmt7Mode = Mode.Mode0; const PixelFormat k_fmt7PixelFormat = PixelFormat.PixelFormatMono8; ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Query for available Format 7 modes bool supported = false; Format7Info fmt7Info = cam.GetFormat7Info(k_fmt7Mode, ref supported); PrintFormat7Capabilities(fmt7Info); if ((k_fmt7PixelFormat & (PixelFormat)fmt7Info.pixelFormatBitField) == 0) { // Pixel format not supported! Console.WriteLine("Pixel format is not supported"); return; } Format7ImageSettings fmt7ImageSettings = new Format7ImageSettings(); fmt7ImageSettings.mode = k_fmt7Mode; fmt7ImageSettings.offsetX = 1124; fmt7ImageSettings.offsetY = 924; fmt7ImageSettings.width = 200; fmt7ImageSettings.height = 200; fmt7ImageSettings.pixelFormat = k_fmt7PixelFormat; // Validate the settings to make sure that they are valid bool settingsValid = false; Format7PacketInfo fmt7PacketInfo = cam.ValidateFormat7Settings( fmt7ImageSettings, ref settingsValid); if (settingsValid != true) { // Settings are not valid return; } // Set the settings to the camera cam.SetFormat7Configuration( fmt7ImageSettings, fmt7PacketInfo.recommendedBytesPerPacket); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Retrieve frame rate property CameraProperty frmRate = cam.GetProperty(PropertyType.FrameRate); Console.WriteLine("Frame rate is {0:F2} fps", frmRate.absValue); #endregion Mat image; Mat grey = new Mat(); int differencex = 0; int differencey = 0; double diffenceeucl = 0; OpenCvSharp.Point centre_im = new OpenCvSharp.Point(); int centrex = Convert.ToInt16(fmt7ImageSettings.width) / 2; int centrey = Convert.ToInt16(fmt7ImageSettings.width) / 2; double dp_ = 1; //inverse ratio of array accumulator to image resolution double minDist_ = 100; // minimum distance between centre of detected circles double param1_ = 100; // Higher threshold of Canny edge detection double param2_ = 20; // Accumulator threshold, smaller value leads to higher false detection rates int minRad_ = 45; // minimum radius int maxRad_ = 60; // maximum radius centre_im.X = centrex; centre_im.Y = centrey; int windowsize = 20; int[] centresx = new int[windowsize]; int[] centresy = new int[windowsize]; int[] radi = new int[windowsize]; OpenCvSharp.Point centre = new OpenCvSharp.Point(); int radius = 1; int buffpos = 0; int flag = 0; ManagedImage rawImage = new ManagedImage(); ManagedImage convertedImage = new ManagedImage(); //do repeated actions here while (true) { //Thread.Sleep(1000); cam.RetrieveBuffer(rawImage); rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); System.Drawing.Bitmap bitmap = convertedImage.bitmap; image = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap); Cv2.CvtColor(image, grey, ColorConversionCodes.BGR2GRAY); // Inner circle CircleSegment[] circles = Cv2.HoughCircles(grey, HoughMethods.Gradient, dp_, minDist_, param1_, param2_, minRad_, maxRad_); for (int i = 0; i < circles.Length; i++) { flag = 1; centre.X = Convert.ToInt16(Math.Round(circles[0].Center.X)); centre.Y = Convert.ToInt16(Math.Round(circles[0].Center.Y)); radius = Convert.ToInt16(Math.Round(circles[0].Radius)); buffpos = (buffpos + 1) % windowsize; centresx[buffpos] = centre.X; centresy[buffpos] = centre.Y; radi[buffpos] = radius; centre.X = Convert.ToInt16(centresx.Average()); centre.Y = Convert.ToInt16(centresy.Average()); radius = Convert.ToInt16(radi.Average()); differencex = centre.X - centrex; differencey = centre.Y - centrey; diffenceeucl = Math.Round((Math.Sqrt(Math.Pow(differencex, 2) + Math.Pow(differencey, 2))), 2); } if (flag == 1) { Cv2.Circle(image, centre, 3, Scalar.Red); Cv2.Circle(image, centre, radius, Scalar.Red, 3); } Cv2.Circle(image, centre_im, 3, Scalar.DeepSkyBlue); Cv2.Circle(image, centre_im, 50, Scalar.DeepSkyBlue, 3); string xdiff = differencex.ToString(); string textxparse = "X Offset: " + xdiff + " [pixels]"; string ydiff = differencey.ToString(); string textyparse = "Y Offset: " + ydiff + " [pixels]"; string eucl = diffenceeucl.ToString(); string texteucle = "Eucl. Dist: " + eucl + " [pixels]"; AppendTextBoxX(textxparse); AppendTextBoxE(texteucle); AppendTextBoxY(textyparse); Bitmap bm = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(image); bm.SetResolution(flydisp.Width, flydisp.Height); flydisp.Image = bm; } }