static void Main(string[] args) { PrintBuildInfo(); const Mode k_fmt7Mode = Mode.Mode0; const PixelFormat k_fmt7PixelFormat = PixelFormat.PixelFormatMono8; const int k_numImages = 10; // Since this application saves images in the current folder // we must ensure that we have permission to write to this folder. // If we do not have permission, fail right away. FileStream fileStream; try { fileStream = new FileStream(@"test.txt", FileMode.Create); fileStream.Close(); File.Delete("test.txt"); } catch { Console.WriteLine("Failed to create file in current folder. Please check permissions.\n"); return; } ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Query for available Format 7 modes bool supported = false; Format7Info fmt7Info = cam.GetFormat7Info(k_fmt7Mode, ref supported); PrintFormat7Capabilities(fmt7Info); if ((k_fmt7PixelFormat & (PixelFormat)fmt7Info.pixelFormatBitField) == 0) { // Pixel format not supported! return; } Format7ImageSettings fmt7ImageSettings = new Format7ImageSettings(); fmt7ImageSettings.mode = k_fmt7Mode; fmt7ImageSettings.offsetX = 0; fmt7ImageSettings.offsetY = 0; fmt7ImageSettings.width = fmt7Info.maxWidth; fmt7ImageSettings.height = fmt7Info.maxHeight; fmt7ImageSettings.pixelFormat = k_fmt7PixelFormat; // Validate the settings to make sure that they are valid bool settingsValid = false; Format7PacketInfo fmt7PacketInfo = cam.ValidateFormat7Settings( fmt7ImageSettings, ref settingsValid); if (settingsValid != true) { // Settings are not valid return; } // Set the settings to the camera cam.SetFormat7Configuration( fmt7ImageSettings, fmt7PacketInfo.recommendedBytesPerPacket); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Retrieve frame rate property CameraProperty frmRate = cam.GetProperty(PropertyType.FrameRate); Console.WriteLine("Frame rate is {0:F2} fps", frmRate.absValue); Console.WriteLine("Grabbing {0} images", k_numImages); ManagedImage rawImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; Console.WriteLine( "Grabbed image {0} - {1} {2} {3}", imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount, timeStamp.cycleOffset); // Create a converted image ManagedImage convertedImage = new ManagedImage(); // Convert the raw image rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // Create a unique filename string filename = String.Format( "CustomImageEx_CSharp-{0}-{1}.bmp", camInfo.serialNumber, imageCnt); // Get the Bitmap object. Bitmaps are only valid if the // pixel format of the ManagedImage is RGB or RGBU. System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Save the image bitmap.Save(filename); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); Console.WriteLine("Done! Press any key to exit..."); Console.ReadKey(); }
// start capturing public void startCapture(ManagedPGRGuid camGuid, int vidMode, System.Windows.Forms.PictureBox displayPicture, String fileName, Boolean record2file) { int i; Flag_GravityFound_Y = false; // garvity is not known // CLEARING THE FRAME QUEUE NO MATTER WHAT... FrameQueue.clear(); RecordToFile = record2file; // creating the GPS data list GpsCaptureData = new List <GPSDataInstance>(); // creating the IMU data List IMUCapturedata = new List <IMUDataInstance>(); // resetting frame index FrameIndex = 0; // 1. connect to the camera Cam.Connect(camGuid); int fps_i = 0; if (vidMode == 0) { Cam.SetVideoModeAndFrameRate(VideoMode.VideoMode1600x1200Yuv422, FrameRate.FrameRate30); fps_i = 30; } else if (vidMode == 1) { Cam.SetVideoModeAndFrameRate(VideoMode.VideoMode1600x1200Rgb, FrameRate.FrameRate15); fps_i = 15; } else if (vidMode == 2) { Format7ImageSettings fset = new Format7ImageSettings(); fset.height = 540; fset.width = 960; fset.offsetX = 40; fset.offsetY = 118; fset.mode = Mode.Mode4; fset.pixelFormat = PixelFormat.PixelFormatRgb8; Cam.SetFormat7Configuration(fset, 40.0f); // this equivalent to 24 fps fps_i = 24; } if (RecordToFile) { // 3. Creating the avi recorder object AviRecorder = new ManagedAVIRecorder(); MJPGOption option = new MJPGOption(); float fps = (float)fps_i; option.frameRate = fps; option.quality = 100; // 100 for superb quality AviRecorder.AVIOpen(fileName, option); } // 4. setting the frame buffering option // leave it for now... // 5. start the capturing Cam.StartCapture(); // MUST discard the first few frames! ManagedImage rawImage = new ManagedImage(); for (i = 0; i < 10; i++) { Cam.RetrieveBuffer(rawImage); } // 6. set the display bitmap DisplayPicture = displayPicture; // 7. starting sampling, recording and dumping threads // IMU sampling thread IMUSamplingTimer = new PrecisionTimer(.0075, this.IMUSamplingEvent); // sampling frequency at 150 Hz RecordingThreadActive = true; OutOfRecordingThread = true; IMUSamplingTimer.start(); RecordingThread = new Thread(this.mainLoop); //RecordingThread.Priority = ThreadPriority.Highest; RecordingThread.Start(); // creating the thread for the dumping DumpingThread = new System.Threading.Thread(this.dumpingLoop); while (OutOfRecordingThread) { ; // must wait until the recording thread enters the loop, otherwise the dumping will never start! } DumpingThread.Start(); }
/// <summary> /// Set new packet size to camera /// </summary> private void ApplyPacketSize() { Format7ImageSettings currFmt7Settings = new Format7ImageSettings(); uint currPacketSize = 0; float percentage = 0.0f; ManagedCamera camera = (ManagedCamera)m_camera; try { camera.GetFormat7Configuration(currFmt7Settings, ref currPacketSize, ref percentage); } catch (FC2Exception ex) { ShowErrorMessageDialog("Error getting current Format7 configuration", ex); ex.Dispose(); return; } bool needToRestartCamera = true; // Stop the camera try { camera.StopCapture(); } catch (FC2Exception ex) { if (ex.Type == ErrorType.IsochNotStarted) { // This means the camera was stopped and therefore we // do not need to restart it needToRestartCamera = false; } else { string error = string.Format("Error stopping capture. {0}", ex.Message); Debug.WriteLine(error); ShowErrorMessageDialog("Error stopping camera.", ex); ex.Dispose(); return; } ex.Dispose(); } try { // Apply same F7 settings and new packet size camera.SetFormat7Configuration(currFmt7Settings, (uint)m_packetSizeSpinButton.Value); } catch (FC2Exception settingFormat7Exception) { ShowErrorMessageDialog("There was an error setting new packet size settings.", settingFormat7Exception); settingFormat7Exception.Dispose(); return; } // Settings were applied, or reverted to previous mode // Either way, the camera should be able to be restarted successfully if (needToRestartCamera) { try { // Restart the camera if it was running beforehand. camera.StartCapture(); } catch (FC2Exception ex) { ShowErrorMessageDialog("There was an error restarting the camera.", ex); ex.Dispose(); } } }
private void CaptureCameraCallback() { //initialise camera here #region camsetup const Mode k_fmt7Mode = Mode.Mode0; const PixelFormat k_fmt7PixelFormat = PixelFormat.PixelFormatMono8; ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Query for available Format 7 modes bool supported = false; Format7Info fmt7Info = cam.GetFormat7Info(k_fmt7Mode, ref supported); PrintFormat7Capabilities(fmt7Info); if ((k_fmt7PixelFormat & (PixelFormat)fmt7Info.pixelFormatBitField) == 0) { // Pixel format not supported! Console.WriteLine("Pixel format is not supported"); return; } Format7ImageSettings fmt7ImageSettings = new Format7ImageSettings(); fmt7ImageSettings.mode = k_fmt7Mode; fmt7ImageSettings.offsetX = 1124; fmt7ImageSettings.offsetY = 924; fmt7ImageSettings.width = 200; fmt7ImageSettings.height = 200; fmt7ImageSettings.pixelFormat = k_fmt7PixelFormat; // Validate the settings to make sure that they are valid bool settingsValid = false; Format7PacketInfo fmt7PacketInfo = cam.ValidateFormat7Settings( fmt7ImageSettings, ref settingsValid); if (settingsValid != true) { // Settings are not valid return; } // Set the settings to the camera cam.SetFormat7Configuration( fmt7ImageSettings, fmt7PacketInfo.recommendedBytesPerPacket); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Retrieve frame rate property CameraProperty frmRate = cam.GetProperty(PropertyType.FrameRate); Console.WriteLine("Frame rate is {0:F2} fps", frmRate.absValue); #endregion Mat image; Mat grey = new Mat(); int differencex = 0; int differencey = 0; double diffenceeucl = 0; OpenCvSharp.Point centre_im = new OpenCvSharp.Point(); int centrex = Convert.ToInt16(fmt7ImageSettings.width) / 2; int centrey = Convert.ToInt16(fmt7ImageSettings.width) / 2; double dp_ = 1; //inverse ratio of array accumulator to image resolution double minDist_ = 100; // minimum distance between centre of detected circles double param1_ = 100; // Higher threshold of Canny edge detection double param2_ = 20; // Accumulator threshold, smaller value leads to higher false detection rates int minRad_ = 45; // minimum radius int maxRad_ = 60; // maximum radius centre_im.X = centrex; centre_im.Y = centrey; int windowsize = 20; int[] centresx = new int[windowsize]; int[] centresy = new int[windowsize]; int[] radi = new int[windowsize]; OpenCvSharp.Point centre = new OpenCvSharp.Point(); int radius = 1; int buffpos = 0; int flag = 0; ManagedImage rawImage = new ManagedImage(); ManagedImage convertedImage = new ManagedImage(); //do repeated actions here while (true) { //Thread.Sleep(1000); cam.RetrieveBuffer(rawImage); rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); System.Drawing.Bitmap bitmap = convertedImage.bitmap; image = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap); Cv2.CvtColor(image, grey, ColorConversionCodes.BGR2GRAY); // Inner circle CircleSegment[] circles = Cv2.HoughCircles(grey, HoughMethods.Gradient, dp_, minDist_, param1_, param2_, minRad_, maxRad_); for (int i = 0; i < circles.Length; i++) { flag = 1; centre.X = Convert.ToInt16(Math.Round(circles[0].Center.X)); centre.Y = Convert.ToInt16(Math.Round(circles[0].Center.Y)); radius = Convert.ToInt16(Math.Round(circles[0].Radius)); buffpos = (buffpos + 1) % windowsize; centresx[buffpos] = centre.X; centresy[buffpos] = centre.Y; radi[buffpos] = radius; centre.X = Convert.ToInt16(centresx.Average()); centre.Y = Convert.ToInt16(centresy.Average()); radius = Convert.ToInt16(radi.Average()); differencex = centre.X - centrex; differencey = centre.Y - centrey; diffenceeucl = Math.Round((Math.Sqrt(Math.Pow(differencex, 2) + Math.Pow(differencey, 2))), 2); } if (flag == 1) { Cv2.Circle(image, centre, 3, Scalar.Red); Cv2.Circle(image, centre, radius, Scalar.Red, 3); } Cv2.Circle(image, centre_im, 3, Scalar.DeepSkyBlue); Cv2.Circle(image, centre_im, 50, Scalar.DeepSkyBlue, 3); string xdiff = differencex.ToString(); string textxparse = "X Offset: " + xdiff + " [pixels]"; string ydiff = differencey.ToString(); string textyparse = "Y Offset: " + ydiff + " [pixels]"; string eucl = diffenceeucl.ToString(); string texteucle = "Eucl. Dist: " + eucl + " [pixels]"; AppendTextBoxX(textxparse); AppendTextBoxE(texteucle); AppendTextBoxY(textyparse); Bitmap bm = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(image); bm.SetResolution(flydisp.Width, flydisp.Height); flydisp.Image = bm; } }
public void startCalibration(int vidmode, ManagedPGRGuid camGuid) { int i, j; VidMode = vidmode; // Starting the camera // 1. connect to the camera Cam.Connect(camGuid); // 2. setting up the video mode if (VidMode == 0) { Cam.SetVideoModeAndFrameRate(VideoMode.VideoMode1600x1200Yuv422, FrameRate.FrameRate30); } else if (VidMode == 1) { Cam.SetVideoModeAndFrameRate(VideoMode.VideoMode1600x1200Rgb, FrameRate.FrameRate15); } else if (VidMode == 2) { Format7ImageSettings fset = new Format7ImageSettings(); fset.height = 540; fset.width = 960; fset.offsetX = 40; fset.offsetY = 118; fset.mode = Mode.Mode4; fset.pixelFormat = PixelFormat.PixelFormatRgb8; Cam.SetFormat7Configuration(fset, 40.0f); // this equivalent to 24 fps } // creating the thread CalibrationThread = new Thread(MainLoop); // zeroing the index of frame counter FrameCounter = 0; // creating the point detected storage array Points = new PointF[FrameCount][]; ObjectPoints = new MCvPoint3D32f[FrameCount][]; // showing the image viewer //imageViewer.Show(); // clearing flag CalibrationDone = false; for (i = 0; i < FrameCount; i++) { ObjectPoints[i] = new MCvPoint3D32f[ChessHorizCount * ChessVertCount]; for (j = 0; j < ChessVertCount * ChessHorizCount; j++) { ObjectPoints[i][j].x = (float)(RectWidth * (j % ChessHorizCount)); ObjectPoints[i][j].y = (float)(RectHeight * (j / ChessHorizCount)); ObjectPoints[i][j].z = 0; } } // starting the camera capture Cam.StartCapture(); state = ST_CALIBRATING; CalibrationThread.Start(); }