/// <summary> /// Main version of capture /// </summary> /// <param name="Angle"></param> /// <param name="cropOn"></param> /// <returns></returns> public Image <Bgr, byte> capture(ManagedGigECamera camera, int Angle) //, bool cropOn) { Image <Bgr, byte> bmp = null; if (camera == null) { return(bmp); } try { camera.RetrieveBuffer(m_rawImage); } catch (FC2Exception ex) { Debug.WriteLine("Error: " + ex.Message); } lock (this) { m_rawImage.Convert(FlyCapture2Managed.PixelFormat.PixelFormatBgr, m_processedImage); bmp = new Image <Bgr, byte>(m_processedImage.bitmap); } //if (Angle != 0) bmp = bmp.Convert<Bgr, Byte>().Rotate(Angle, new Bgr(0, 0, 0), cropOn); if (Angle != 0) { bmp = bmp.Convert <Bgr, Byte>().Rotate(Angle, new Bgr(0, 0, 0), false); } return(bmp); }
private void GrabLoop(object sender, DoWorkEventArgs e) { BackgroundWorker worker = sender as BackgroundWorker; while (m_grabImages) { try { m_camera.RetrieveBuffer(m_rawImage); } catch (FC2Exception ex) { Debug.WriteLine("Error: " + ex.Message); continue; } lock (this) { m_rawImage.Convert(PixelFormat.PixelFormatBgr, m_processedImage); } worker.ReportProgress(0); } m_grabThreadExited.Set(); }
private void ActionWork(ManagedImage managedImage) { m_oldManagedIMage = new ManagedImage(m_processedManagedImage); m_processedManagedImage = new ManagedImage(); managedImage.Convert(FlyCapture2Managed.PixelFormat.PixelFormatBgr, m_processedManagedImage); uint gain = managedImage.imageMetadata.embeddedGain, shutter = managedImage.imageMetadata.embeddedShutter; bool isNeedCorrect = (m_oldGain != gain) && (m_oldShutter != shutter); if (isNeedCorrect) { CalcIMage(); } else { ManagedImage temp = new ManagedImage(); m_processedManagedImage.ConvertToWriteAbleBitmap(temp); temp.writeableBitmap.Freeze(); m_newResultImage = temp.writeableBitmap; OnNewResultImageIsReady(); } m_oldGain = gain; m_oldShutter = shutter; if (this.IsNeedSaveAllImages) { Task.Run(() => SaveImage()); } }
private void OnImageGrabbed(ManagedImage image) { while (accessing) { } image.Convert(PixelFormat.PixelFormatRgb, processedImage); unsafe { Marshal.Copy((IntPtr)processedImage.data, pixelData, 0, pixelData.Length); } }
private ManagedImage GrabImage() { ManagedImage m_rawImage = new ManagedImage(); FireSoftwareTrigger(m_camera); //trigger camera m_camera.RetrieveBuffer(m_rawImage); //recevi image ManagedImage m_processedImage = new ManagedImage(); m_rawImage.Convert(PixelFormat.PixelFormatRaw8, m_processedImage);//convert return(m_processedImage); }
override protected void OnImageEvent(ManagedImage image) { //Debug.WriteLine("OnImageEvent"); try { if (!image.IsIncomplete) { // Convert image using (var temp = image.Convert(PixelFormatEnums.BGR8)) { if (_imageQueue.Count <= 10) { _imageQueue.Enqueue( new PtGreyCameraImage { FrameId = image.FrameID, TimeStamp = image.TimeStamp, Image = new System.Drawing.Bitmap(temp.bitmap) } ); } else { Debug.WriteLine("Dropped frame"); } } } image.Release(); } catch (SpinnakerException ex) { Debug.WriteLine("Error: {0}", ex.Message); } catch (Exception ex1) { Debug.WriteLine("Error: {0}", ex1.Message); } finally { // Must manually release the image to prevent buffers on the camera stream from filling up //image.Release(); } }
public Bitmap GrabImage() { try { if (m_camera.IsConnected()) { m_camera.StartCapture(); m_camera.RetrieveBuffer(m_rawImage); m_rawImage.Convert(PixelFormat.PixelFormatBgr, m_processedImage); m_camera.StopCapture(); imageSize.Width = (int)m_rawImage.cols; imageSize.Height = (int)m_rawImage.rows; timeStamp = m_rawImage.timeStamp; } } catch (FC2Exception ex) { Debug.WriteLine("Error: " + ex.Message); } return(m_processedImage.bitmap); }
public FlyCapture() { NumBuffers = 10; GrabMode = GrabMode.BufferFrames; ColorProcessing = ColorProcessingAlgorithm.Default; source = Observable.Create <FlyCaptureDataFrame>((observer, cancellationToken) => { return(Task.Factory.StartNew(() => { lock (captureLock) { ManagedCamera camera; using (var manager = new ManagedBusManager()) { var guid = manager.GetCameraFromIndex((uint)Index); camera = new ManagedCamera(); camera.Connect(guid); } var capture = 0; var numBuffers = NumBuffers; var config = camera.GetConfiguration(); config.grabMode = GrabMode; config.numBuffers = (uint)NumBuffers; config.highPerformanceRetrieveBuffer = true; camera.SetConfiguration(config); try { var colorProcessing = ColorProcessing; using (var image = new ManagedImage()) using (var notification = cancellationToken.Register(() => { Interlocked.Exchange(ref capture, 0); camera.StopCapture(); })) { camera.StartCapture(); Interlocked.Exchange(ref capture, 1); while (!cancellationToken.IsCancellationRequested) { IplImage output; BayerTileFormat bayerTileFormat; try { camera.RetrieveBuffer(image); } catch (FC2Exception) { if (capture == 0) { break; } else { throw; } } var raw16 = image.pixelFormat == PixelFormat.PixelFormatRaw16; if (image.pixelFormat == PixelFormat.PixelFormatMono8 || image.pixelFormat == PixelFormat.PixelFormatMono16 || ((image.pixelFormat == PixelFormat.PixelFormatRaw8 || raw16) && (image.bayerTileFormat == BayerTileFormat.None || colorProcessing == ColorProcessingAlgorithm.NoColorProcessing))) { unsafe { bayerTileFormat = image.bayerTileFormat; var depth = image.pixelFormat == PixelFormat.PixelFormatMono16 || raw16 ? IplDepth.U16 : IplDepth.U8; var bitmapHeader = new IplImage(new Size((int)image.cols, (int)image.rows), depth, 1, new IntPtr(image.data)); output = new IplImage(bitmapHeader.Size, bitmapHeader.Depth, bitmapHeader.Channels); CV.Copy(bitmapHeader, output); } } else { unsafe { bayerTileFormat = BayerTileFormat.None; var depth = raw16 ? IplDepth.U16 : IplDepth.U8; var format = raw16 ? PixelFormat.PixelFormatBgr16 : PixelFormat.PixelFormatBgr; output = new IplImage(new Size((int)image.cols, (int)image.rows), depth, 3); using (var convertedImage = new ManagedImage( (uint)output.Height, (uint)output.Width, (uint)output.WidthStep, (byte *)output.ImageData.ToPointer(), (uint)(output.WidthStep * output.Height), format)) { convertedImage.colorProcessingAlgorithm = colorProcessing; image.Convert(format, convertedImage); } } } observer.OnNext(new FlyCaptureDataFrame(output, image.imageMetadata, bayerTileFormat)); } } } finally { if (capture != 0) { camera.StopCapture(); } camera.Disconnect(); camera.Dispose(); } } }, cancellationToken, TaskCreationOptions.LongRunning, TaskScheduler.Default)); }) .PublishReconnectable() .RefCount(); }
void RunSingleCamera(ManagedPGRGuid guid) { const int k_numImages = 10; ManagedGigECamera cam = new ManagedGigECamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); uint numStreamChannels = cam.GetNumStreamChannels(); for (uint i=0; i < numStreamChannels; i++) { PrintStreamChannelInfo(cam.GetGigEStreamChannelInfo(i)); } GigEImageSettingsInfo imageSettingsInfo = cam.GetGigEImageSettingsInfo(); GigEImageSettings imageSettings = new GigEImageSettings(); imageSettings.offsetX = 0; imageSettings.offsetY = 0; imageSettings.height = imageSettingsInfo.maxHeight; imageSettings.width = imageSettingsInfo.maxWidth; imageSettings.pixelFormat = PixelFormat.PixelFormatMono8; cam.SetGigEImageSettings(imageSettings); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); ManagedImage rawImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; Console.WriteLine( "Grabbed image {0} - {1} {2} {3}", imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount, timeStamp.cycleOffset); // Create a converted image ManagedImage convertedImage = new ManagedImage(); // Convert the raw image rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // Create a unique filename string filename = String.Format( "GigEGrabEx_CSharp-{0}-{1}.bmp", camInfo.serialNumber, imageCnt); // Get the Bitmap object. Bitmaps are only valid if the // pixel format of the ManagedImage is RGB or RGBU. System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Save the image bitmap.Save(filename); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); }
void RunSingleCamera(ManagedPGRGuid guid) { const int k_numImages = 10; ManagedGigECamera cam = new ManagedGigECamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); uint numStreamChannels = cam.GetNumStreamChannels(); for (uint i = 0; i < numStreamChannels; i++) { PrintStreamChannelInfo(cam.GetGigEStreamChannelInfo(i)); } GigEImageSettingsInfo imageSettingsInfo = cam.GetGigEImageSettingsInfo(); GigEImageSettings imageSettings = new GigEImageSettings(); imageSettings.offsetX = 0; imageSettings.offsetY = 0; imageSettings.height = imageSettingsInfo.maxHeight; imageSettings.width = imageSettingsInfo.maxWidth; imageSettings.pixelFormat = PixelFormat.PixelFormatMono8; cam.SetGigEImageSettings(imageSettings); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); ManagedImage rawImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; Console.WriteLine( "Grabbed image {0} - {1} {2} {3}", imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount, timeStamp.cycleOffset); // Create a converted image ManagedImage convertedImage = new ManagedImage(); // Convert the raw image rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // Create a unique filename string filename = String.Format( "GigEGrabEx_CSharp-{0}-{1}.bmp", camInfo.serialNumber, imageCnt); // Get the Bitmap object. Bitmaps are only valid if the // pixel format of the ManagedImage is RGB or RGBU. System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Save the image bitmap.Save(filename); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); }
public void MainLoop() { // Managed Image MUST BE OUT OF THE LOOP! (For some reason...) ManagedImage rawImage = new ManagedImage(); ManagedImage convertedImage = new ManagedImage(); //System.Drawing.Bitmap bitmap; while (state == ST_CALIBRATING) { // retrieving an image using the Flea3 API Cam.RetrieveBuffer(rawImage); // Convert the raw image to a System.Drawing.Bitmap rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Suimply create a new openCV frame with the bitmap Image<Bgr, Byte> frame = new Image<Bgr, byte>(bitmap); Image<Gray, Byte> grayFrame = frame.Convert<Gray, Byte>(); // and creating the drawImage frame DrawImage = grayFrame.Clone(); // decalring array of points for all RGB components PointF[][] corners = new PointF[3][]; // finding corners in the frame // left frame // bool result = CameraCalibration.FindChessboardCorners(grayFrame, // new Size(ChessHorizCount, ChessVertCount), Emgu.CV.CvEnum.CALIB_CB_TYPE.FAST_CHECK, out corners[0]); corners[0] = CameraCalibration.FindChessboardCorners(grayFrame, new Size(ChessHorizCount, ChessVertCount), Emgu.CV.CvEnum.CALIB_CB_TYPE.FAST_CHECK); //Emgu.CV.CvEnum.CALIB_CB_TYPE.ADAPTIVE_THRESH | Emgu.CV.CvEnum.CALIB_CB_TYPE.NORMALIZE_IMAGE //); bool result = !(corners[0] == null); if (result) { FrameCounter++; //finding corners with sub pixel accuracy grayFrame.FindCornerSubPix(corners, new Size(10, 10), new Size(-1, -1), new MCvTermCriteria(0.01)); // now draing the corners /* CameraCalibration.DrawChessboardCorners(DrawImage, new Size(ChessHorizCount, ChessVertCount), corners[0], true ); */ CameraCalibration.DrawChessboardCorners(DrawImage, new Size(ChessHorizCount, ChessVertCount), corners[0] ); // adding the detected points to the list Points[FrameCounter - 1] = corners[0]; } // assiging the image to the imageviewer (so that it shows) //imageViewer.Image = DrawImage; DisplayBox.Image = (Image)DrawImage.Bitmap; if (FrameCounter >= FrameCount) { state = ST_IDLE; calibrate(); Console.WriteLine("Calibration now is complete. You may NOW kill the thread!"); } } }
override protected void OnImageEvent(ManagedImage image) { //Debug.WriteLine("OnImageEvent"); try { Dispatcher dispatcher = Dispatcher.CurrentDispatcher; dispatcher.BeginInvokeShutdown(DispatcherPriority.Normal); Dispatcher.Run(); if (!image.IsIncomplete) { // test get image from file //string currentDirectory = System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().Location); //var tempFileNamePath = Path.Combine(currentDirectory, "laser_spot.bmp"); //System.Drawing.Bitmap bmp = new System.Drawing.Bitmap(tempFileNamePath); //var bitmapData = bmp.LockBits(new System.Drawing.Rectangle(0, 0, bmp.Width, bmp.Height), // System.Drawing.Imaging.ImageLockMode.ReadOnly, bmp.PixelFormat); //var img = BitmapSource.Create(bitmapData.Width, bitmapData.Height, bmp.HorizontalResolution, bmp.VerticalResolution, // PixelFormats.Bgr24, null, bitmapData.Scan0, bitmapData.Stride * bitmapData.Height, bitmapData.Stride); //bmp.UnlockBits(bitmapData); // Convert image image.ConvertToBitmapSource(PixelFormatEnums.BGR8, managedImageConverted); BitmapSource img = managedImageConverted.bitmapsource.Clone(); img.Freeze(); RaiseImageChangedEvent(new ImageEventArgs(img)); //img.Dispatcher.InvokeShutdown(); if (_measuring && _imageQueue.Count <= 100) { imgCounter++; lock (_locker) { var temp = image.Convert(PixelFormatEnums.BGR8); _imageQueue.Enqueue(new PtGreyCameraImage { FrameId = (ulong)imgCounter, TimeStamp = (ulong)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalMilliseconds, Image = new System.Drawing.Bitmap(temp.bitmap) // for testing //Image = new System.Drawing.Bitmap(bmp) } ); } RaiseImageEnqueuedEvent(new ImageEnqueuedEventArgs(imgCounter)); _wh.Set(); Debug.WriteLine("enqueue frame: {0}", imgCounter); } else { //Debug.WriteLine("Dropped frame"); } } image.Release(); } catch (SpinnakerException ex) { Debug.WriteLine("Error: {0}", ex.Message); } catch (Exception ex1) { Debug.WriteLine("Error: {0}", ex1.Message); } finally { GC.Collect(); } }
static void Main(string[] args) { PrintBuildInfo(); const Mode k_fmt7Mode = Mode.Mode0; const PixelFormat k_fmt7PixelFormat = PixelFormat.PixelFormatMono8; const int k_numImages = 10; // Since this application saves images in the current folder // we must ensure that we have permission to write to this folder. // If we do not have permission, fail right away. FileStream fileStream; try { fileStream = new FileStream(@"test.txt", FileMode.Create); fileStream.Close(); File.Delete("test.txt"); } catch { Console.WriteLine("Failed to create file in current folder. Please check permissions.\n"); return; } ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Query for available Format 7 modes bool supported = false; Format7Info fmt7Info = cam.GetFormat7Info(k_fmt7Mode, ref supported); PrintFormat7Capabilities(fmt7Info); if ((k_fmt7PixelFormat & (PixelFormat)fmt7Info.pixelFormatBitField) == 0) { // Pixel format not supported! return; } Format7ImageSettings fmt7ImageSettings = new Format7ImageSettings(); fmt7ImageSettings.mode = k_fmt7Mode; fmt7ImageSettings.offsetX = 0; fmt7ImageSettings.offsetY = 0; fmt7ImageSettings.width = fmt7Info.maxWidth; fmt7ImageSettings.height = fmt7Info.maxHeight; fmt7ImageSettings.pixelFormat = k_fmt7PixelFormat; // Validate the settings to make sure that they are valid bool settingsValid = false; Format7PacketInfo fmt7PacketInfo = cam.ValidateFormat7Settings( fmt7ImageSettings, ref settingsValid); if (settingsValid != true) { // Settings are not valid return; } // Set the settings to the camera cam.SetFormat7Configuration( fmt7ImageSettings, fmt7PacketInfo.recommendedBytesPerPacket); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Retrieve frame rate property CameraProperty frmRate = cam.GetProperty(PropertyType.FrameRate); Console.WriteLine("Frame rate is {0:F2} fps", frmRate.absValue); Console.WriteLine("Grabbing {0} images", k_numImages); ManagedImage rawImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; Console.WriteLine( "Grabbed image {0} - {1} {2} {3}", imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount, timeStamp.cycleOffset); // Create a converted image ManagedImage convertedImage = new ManagedImage(); // Convert the raw image rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // Create a unique filename string filename = String.Format( "CustomImageEx_CSharp-{0}-{1}.bmp", camInfo.serialNumber, imageCnt); // Get the Bitmap object. Bitmaps are only valid if the // pixel format of the ManagedImage is RGB or RGBU. System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Save the image bitmap.Save(filename); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); Console.WriteLine("Done! Press any key to exit..."); Console.ReadKey(); }
public FlyCapture() { ColorProcessing = ColorProcessingAlgorithm.Default; source = Observable.Create <FlyCaptureDataFrame>((observer, cancellationToken) => { return(Task.Factory.StartNew(() => { lock (captureLock) { ManagedCamera camera; using (var manager = new ManagedBusManager()) { var guid = manager.GetCameraFromIndex((uint)Index); camera = new ManagedCamera(); camera.Connect(guid); // Power on the camera const uint CameraPower = 0x610; const uint CameraPowerValue = 0x80000000; camera.WriteRegister(CameraPower, CameraPowerValue); // Wait for camera to complete power-up const Int32 MillisecondsToSleep = 100; uint cameraPowerValueRead = 0; do { Thread.Sleep(MillisecondsToSleep); cameraPowerValueRead = camera.ReadRegister(CameraPower); }while ((cameraPowerValueRead & CameraPowerValue) == 0); } var capture = 0; try { // Set frame rate var prop = new CameraProperty(PropertyType.FrameRate); prop.absControl = true; prop.absValue = FramesPerSecond; prop.autoManualMode = false; prop.onOff = true; camera.SetProperty(prop); // Enable/disable blackfly pull up const uint pullUp = 0x19D0; if (EnableBlackflyOutputVoltage) { camera.WriteRegister(pullUp, 0x10000001); } else { camera.WriteRegister(pullUp, 0x10000000); } // Acquisition parameters var colorProcessing = ColorProcessing; var autoExposure = !AutoExposure; // Horrible hack to trigger update inititally var shutter = Shutter; var gain = Gain; // Configure embedded info const uint embeddedInfo = 0x12F8; uint embeddedInfoState = camera.ReadRegister(embeddedInfo); if (EnableEmbeddedFrameCounter) { embeddedInfoState |= (uint)1 << 6; } else { embeddedInfoState &= ~((uint)1 << 6); } if (EnableEmbeddedFrameTimeStamp) { embeddedInfoState |= (uint)1 << 0; } else { embeddedInfoState &= ~((uint)1 << 0); } camera.WriteRegister(embeddedInfo, embeddedInfoState); using (var image = new ManagedImage()) using (var notification = cancellationToken.Register(() => { Interlocked.Exchange(ref capture, 0); camera.StopCapture(); })) { camera.StartCapture(); Interlocked.Exchange(ref capture, 1); while (!cancellationToken.IsCancellationRequested) { IplImage output; BayerTileFormat bayerTileFormat; if (autoExposure != AutoExposure && AutoExposure) { prop = new CameraProperty(PropertyType.AutoExposure); prop.autoManualMode = true; prop.onOff = true; camera.SetProperty(prop); autoExposure = AutoExposure; // Shutter prop = new CameraProperty(PropertyType.Shutter); prop.absControl = true; prop.autoManualMode = true; prop.onOff = true; camera.SetProperty(prop); // Shutter prop = new CameraProperty(PropertyType.Gain); prop.absControl = true; prop.autoManualMode = true; prop.onOff = true; camera.SetProperty(prop); autoExposure = AutoExposure; } else if (autoExposure != AutoExposure && !AutoExposure) { shutter = -0.1f; // Hack gain = -0.1f; autoExposure = AutoExposure; } if (shutter != Shutter && !AutoExposure) { // Figure out max shutter time given current frame rate var info = camera.GetPropertyInfo(PropertyType.Shutter); var delta = info.absMax - info.absMin; prop = new CameraProperty(PropertyType.Shutter); prop.absControl = true; prop.absValue = Shutter * delta + info.absMin; prop.autoManualMode = false; prop.onOff = true; camera.SetProperty(prop); shutter = Shutter; } if (gain != Gain && !AutoExposure) { // Figure out max shutter time given current frame rate var info = camera.GetPropertyInfo(PropertyType.Shutter); var delta = info.absMax - info.absMin; prop = new CameraProperty(PropertyType.Gain); prop.absControl = true; prop.absValue = Gain * delta + info.absMin;; prop.autoManualMode = false; prop.onOff = true; camera.SetProperty(prop); gain = Gain; } try { camera.RetrieveBuffer(image); } catch (FC2Exception ex) { if (capture == 0) { break; } else if (IgnoreImageConsistencyError && ex.CauseType == ErrorType.ImageConsistencyError) { continue; } else { throw; } } if (image.pixelFormat == PixelFormat.PixelFormatMono8 || image.pixelFormat == PixelFormat.PixelFormatMono16 || (image.pixelFormat == PixelFormat.PixelFormatRaw8 && (image.bayerTileFormat == BayerTileFormat.None || colorProcessing == ColorProcessingAlgorithm.NoColorProcessing))) { unsafe { bayerTileFormat = image.bayerTileFormat; var depth = image.pixelFormat == PixelFormat.PixelFormatMono16 ? IplDepth.U16 : IplDepth.U8; var bitmapHeader = new IplImage(new Size((int)image.cols, (int)image.rows), depth, 1, new IntPtr(image.data)); output = new IplImage(bitmapHeader.Size, bitmapHeader.Depth, bitmapHeader.Channels); CV.Copy(bitmapHeader, output); } } else { unsafe { bayerTileFormat = BayerTileFormat.None; output = new IplImage(new Size((int)image.cols, (int)image.rows), IplDepth.U8, 3); using (var convertedImage = new ManagedImage( (uint)output.Height, (uint)output.Width, (uint)output.WidthStep, (byte *)output.ImageData.ToPointer(), (uint)(output.WidthStep * output.Height), PixelFormat.PixelFormatBgr)) { convertedImage.colorProcessingAlgorithm = colorProcessing; image.Convert(PixelFormat.PixelFormatBgr, convertedImage); } } } observer.OnNext(new FlyCaptureDataFrame(output, image.imageMetadata, bayerTileFormat)); } } } finally { // Power off the camera const uint CameraPower = 0x610; const uint CameraPowerValue = 0x00000000; camera.WriteRegister(CameraPower, CameraPowerValue); if (capture != 0) { camera.StopCapture(); } camera.Disconnect(); camera.Dispose(); } } }, cancellationToken, TaskCreationOptions.LongRunning, TaskScheduler.Default)); }) .PublishReconnectable() .RefCount(); }
private unsafe void InitializeNewResultImage(ManagedImage mImg) { //byte* managedImageDataPtr = mImg.data; IntPtr dataPtr = new IntPtr(managedImageDataPtr); m_pixelsFormat = mImg.pixelFormat; ManagedImage manImg = new ManagedImage(); if (m_pixelsFormat == PixelFormat.PixelFormatMono12 || m_pixelsFormat == PixelFormat.PixelFormatMono16 || m_pixelsFormat == PixelFormat.PixelFormatMono8 || m_pixelsFormat == PixelFormat.PixelFormatSignedMono16) { mImg.Convert(PixelFormat.PixelFormatMono8, manImg); } else { mImg.Convert(PixelFormat.PixelFormatBgr, manImg); } int height = (int)manImg.rows, width = (int)manImg.cols, stride = (int)manImg.stride, dataSize = height * stride, bpp = (int)manImg.bitsPerPixel / 8; byte *imageData = manImg.data; //byte[] imgData = new byte[dataSize]; //Marshal.Copy(dataPtr, imgData, 0, dataSize); Point cursorPosition = this.CurrentCursorPosition; int xRec = (int)(cursorPosition.X / this.VideoStreamAreaActualWidth * width), yRec = (int)(cursorPosition.Y / this.VideoStreamAreaActualHeight * height); Int32Rect rec = GetValidRectangle(xRec, yRec, width, height); m_currentRec = rec; byte[] currBytes = new byte[rec.Height * rec.Width * bpp]; ManagedImage managImgForWrbmp = new ManagedImage(); manImg.ConvertToWriteAbleBitmap(managImgForWrbmp); WriteableBitmap bmpTemp = managImgForWrbmp.writeableBitmap; bmpTemp.CopyPixels(rec, currBytes, rec.Width * bpp, 0); m_currentBytes = currBytes; OnSetNeededCameraProperty(); OnCurrentBytesUpdated(); int xBegin = rec.X, xEnd = rec.X + rec.Width, yBegin = rec.Y, yEnd = rec.Y + rec.Height; for (int xPix = xBegin; xPix <= xEnd; xPix++) { for (int yPix = yBegin; yPix <= yEnd; yPix++) { if (yPix == yBegin || yPix == yEnd || xPix == xBegin || xPix == xEnd) { int pixelPosition = yPix * stride + xPix * bpp; if (manImg.pixelFormat == PixelFormat.PixelFormatBgr) { imageData[pixelPosition] = 0; imageData[pixelPosition + 1] = 0; imageData[pixelPosition + 2] = 0; } else { imageData[pixelPosition] = 0; } } } } ManagedImage temp = new ManagedImage(); manImg.ConvertToBitmapSource(temp); temp.bitmapsource.Freeze(); this.NewResultImage = temp.bitmapsource; OnNewResultImageIsReady(); }
private void CaptureCameraCallback() { //initialise camera here #region camsetup const Mode k_fmt7Mode = Mode.Mode0; const PixelFormat k_fmt7PixelFormat = PixelFormat.PixelFormatMono8; ManagedBusManager busMgr = new ManagedBusManager(); uint numCameras = busMgr.GetNumOfCameras(); Console.WriteLine("Number of cameras detected: {0}", numCameras); ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0); ManagedCamera cam = new ManagedCamera(); cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Query for available Format 7 modes bool supported = false; Format7Info fmt7Info = cam.GetFormat7Info(k_fmt7Mode, ref supported); PrintFormat7Capabilities(fmt7Info); if ((k_fmt7PixelFormat & (PixelFormat)fmt7Info.pixelFormatBitField) == 0) { // Pixel format not supported! Console.WriteLine("Pixel format is not supported"); return; } Format7ImageSettings fmt7ImageSettings = new Format7ImageSettings(); fmt7ImageSettings.mode = k_fmt7Mode; fmt7ImageSettings.offsetX = 1124; fmt7ImageSettings.offsetY = 924; fmt7ImageSettings.width = 200; fmt7ImageSettings.height = 200; fmt7ImageSettings.pixelFormat = k_fmt7PixelFormat; // Validate the settings to make sure that they are valid bool settingsValid = false; Format7PacketInfo fmt7PacketInfo = cam.ValidateFormat7Settings( fmt7ImageSettings, ref settingsValid); if (settingsValid != true) { // Settings are not valid return; } // Set the settings to the camera cam.SetFormat7Configuration( fmt7ImageSettings, fmt7PacketInfo.recommendedBytesPerPacket); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Retrieve frame rate property CameraProperty frmRate = cam.GetProperty(PropertyType.FrameRate); Console.WriteLine("Frame rate is {0:F2} fps", frmRate.absValue); #endregion Mat image; Mat grey = new Mat(); int differencex = 0; int differencey = 0; double diffenceeucl = 0; OpenCvSharp.Point centre_im = new OpenCvSharp.Point(); int centrex = Convert.ToInt16(fmt7ImageSettings.width) / 2; int centrey = Convert.ToInt16(fmt7ImageSettings.width) / 2; double dp_ = 1; //inverse ratio of array accumulator to image resolution double minDist_ = 100; // minimum distance between centre of detected circles double param1_ = 100; // Higher threshold of Canny edge detection double param2_ = 20; // Accumulator threshold, smaller value leads to higher false detection rates int minRad_ = 45; // minimum radius int maxRad_ = 60; // maximum radius centre_im.X = centrex; centre_im.Y = centrey; int windowsize = 20; int[] centresx = new int[windowsize]; int[] centresy = new int[windowsize]; int[] radi = new int[windowsize]; OpenCvSharp.Point centre = new OpenCvSharp.Point(); int radius = 1; int buffpos = 0; int flag = 0; ManagedImage rawImage = new ManagedImage(); ManagedImage convertedImage = new ManagedImage(); //do repeated actions here while (true) { //Thread.Sleep(1000); cam.RetrieveBuffer(rawImage); rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); System.Drawing.Bitmap bitmap = convertedImage.bitmap; image = OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap); Cv2.CvtColor(image, grey, ColorConversionCodes.BGR2GRAY); // Inner circle CircleSegment[] circles = Cv2.HoughCircles(grey, HoughMethods.Gradient, dp_, minDist_, param1_, param2_, minRad_, maxRad_); for (int i = 0; i < circles.Length; i++) { flag = 1; centre.X = Convert.ToInt16(Math.Round(circles[0].Center.X)); centre.Y = Convert.ToInt16(Math.Round(circles[0].Center.Y)); radius = Convert.ToInt16(Math.Round(circles[0].Radius)); buffpos = (buffpos + 1) % windowsize; centresx[buffpos] = centre.X; centresy[buffpos] = centre.Y; radi[buffpos] = radius; centre.X = Convert.ToInt16(centresx.Average()); centre.Y = Convert.ToInt16(centresy.Average()); radius = Convert.ToInt16(radi.Average()); differencex = centre.X - centrex; differencey = centre.Y - centrey; diffenceeucl = Math.Round((Math.Sqrt(Math.Pow(differencex, 2) + Math.Pow(differencey, 2))), 2); } if (flag == 1) { Cv2.Circle(image, centre, 3, Scalar.Red); Cv2.Circle(image, centre, radius, Scalar.Red, 3); } Cv2.Circle(image, centre_im, 3, Scalar.DeepSkyBlue); Cv2.Circle(image, centre_im, 50, Scalar.DeepSkyBlue, 3); string xdiff = differencex.ToString(); string textxparse = "X Offset: " + xdiff + " [pixels]"; string ydiff = differencey.ToString(); string textyparse = "Y Offset: " + ydiff + " [pixels]"; string eucl = diffenceeucl.ToString(); string texteucle = "Eucl. Dist: " + eucl + " [pixels]"; AppendTextBoxX(textxparse); AppendTextBoxE(texteucle); AppendTextBoxY(textyparse); Bitmap bm = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(image); bm.SetResolution(flydisp.Width, flydisp.Height); flydisp.Image = bm; } }
public void RunSingleCamera(ManagedPGRGuid guid) { const int k_numImages = 10; ManagedCamera cam = new ManagedCamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Start capturing images cam.StartCapture(); // Create a raw image ManagedImage rawImage = new ManagedImage(); // Create a converted image ManagedImage convertedImage = new ManagedImage(); for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; Console.WriteLine( "Grabbed image {0} - {1} {2} {3}", imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount, timeStamp.cycleOffset); // Convert the raw image rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // Create a unique filename string filename = String.Format( "FlyCapture2Test_CSharp-{0}-{1}.bmp", camInfo.serialNumber, imageCnt); // Get the Bitmap object. Bitmaps are only valid if the // pixel format of the ManagedImage is RGB or RGBU. System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Save the image bitmap.Save(filename); } // Stop capturing images cam.StopCapture(); // Disconnect the camera cam.Disconnect(); }
unsafe void RunSingleCamera(ManagedPGRGuid guid, string save_location, int numImages) { ManagedCamera cam = new ManagedCamera(); // Connect to a camera cam.Connect(guid); // Get the camera information CameraInfo camInfo = cam.GetCameraInfo(); PrintCameraInfo(camInfo); // Get embedded image info from camera EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo(); // Enable timestamp collection if (embeddedInfo.timestamp.available == true) { embeddedInfo.timestamp.onOff = true; } // Set embedded image info to camera cam.SetEmbeddedImageInfo(embeddedInfo); // Make a 300 Frame Buffer FC2Config bufferFrame = cam.GetConfiguration(); bufferFrame.grabMode = GrabMode.BufferFrames; bufferFrame.numBuffers = 300; cam.SetConfiguration(bufferFrame); // Start capturing images cam.StartCapture(); // Create a raw image ManagedImage rawImage = new ManagedImage(); // Create a converted image ManagedImage convertedImage = new ManagedImage(); System.Drawing.Size framesize = new System.Drawing.Size(1888, 1888); CvInvoke.NamedWindow("Prey Capture" + save_location, NamedWindowType.Normal); VideoWriter camvid = new VideoWriter(save_location, 0, 60, framesize, false); for (int imageCnt = 0; imageCnt < numImages; imageCnt++) { // Retrieve an image cam.RetrieveBuffer(rawImage); // Get the timestamp TimeStamp timeStamp = rawImage.timeStamp; // Convert the raw image // rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); // m rawImage.Convert(PixelFormat.PixelFormatRaw8, convertedImage); // use raw8 for GH3s but flea3 can be color. int rws = (int)convertedImage.rows; int cols = (int)convertedImage.cols; IntPtr point = (IntPtr)convertedImage.data; Mat cvimage = new Mat(framesize, Emgu.CV.CvEnum.DepthType.Cv8U, 1, point, cols); camvid.Write(cvimage); if (imageCnt % 200 == 0) { CvInvoke.Imshow("Prey Capture" + save_location, cvimage); CvInvoke.WaitKey(1); Console.WriteLine(imageCnt); } } // Stop capturing images cam.StopCapture(); camvid.Dispose(); // Disconnect the camera Console.WriteLine("Done Brah"); cam.Disconnect(); CvInvoke.DestroyAllWindows(); }
private void GrabLoop(object sender, DoWorkEventArgs e) { BackgroundWorker worker = sender as BackgroundWorker; while (m_grabImages) { try { m_camera.RetrieveBuffer(m_rawImage); } catch (FC2Exception ex) { Debug.WriteLine("Error: " + ex.Message); continue; } lock (this) { m_rawImage.Convert(PixelFormat.PixelFormatBgr, m_processedImage); //sum = sum + 1; } System.Drawing.Imaging.PixelFormat format = m_processedImage.bitmap.PixelFormat; if (checkBoxTestMode.Checked && checkBoxShow.Checked == false) { label6.Text = (testCount.ToString("000")); try { newimageTEST = new Bitmap(savepath + "TestImages\\trimmed" + testCount.ToString("000") + ".bmp"); newimage = newimageTEST.Clone(cloneRect, format); } catch { stopCaptureLoop(); MessageBox.Show("Images not found"); } testCount++; if (testCount > 435) { testCount = 0; CheckBoxRecordValues.Checked = false; } } if (checkBoxShow.Checked) { numSaved = Directory.GetFiles(savepath + "\\SavedMaxima", "*", SearchOption.TopDirectoryOnly).Length; if (showCount < numSaved) { try { newimageTEST = new Bitmap(savepath + "\\SavedMaxima\\" + showCount.ToString("000") + ".bmp"); newimage = newimageTEST.Clone(cloneRect, format); } catch { stopCaptureLoop(); MessageBox.Show("Images not found: " + savepath + "\\SavedMaxima\\" + showCount.ToString("000") + ".bmp"); } showCount++; } else { showCount = 0; } } if (checkBoxShow.Checked == false && checkBoxTestMode.Checked == false) { newimage = m_processedImage.bitmap.Clone(cloneRect, format); } lockBitmap = new LockBitmap(newimage); lockBitmap.LockBits(); sum = 0; sumW = 0; aveSumPrevious = aveSum; aveSum = 0; if (CheckBoxAlgorithm2.Checked) { /* //this was the previous alternative algorithm * for (int y = ya; y < yb; y = y + 1) * { * for (int x = xa; x < xb; x = x + 1) * { * sumW = sumW + (x - xa) * (lockBitmap.GetPixel(x, y).G + lockBitmap.GetPixel(x, y).R); //the .R selects the red component of the RGB color * sum = sum + (lockBitmap.GetPixel(x, y).G + lockBitmap.GetPixel(x, y).R); * } * aveSum = aveSum + Convert.ToInt32(1000*sumW / sum); * } * aveSum = aveSum / (yb - ya); * lockBitmap.UnlockBits();*/ } else { for (int y = ya; y < yb; y = y + 1) { for (int x = xa; x < xb; x = x + 1) { sum = sum + (lockBitmap.GetPixel(x, y).G + lockBitmap.GetPixel(x, y).R); //the .R selects the red component of the RGB color } } sumBackground = 0; if (checkBoxBackgroundCorrection.Checked) { for (int y = 160; y < 320; y = y + 3) { //TESTint = 0; for (int x = 214; x < 426; x = x + 3) { // TESTint = TESTint + 1; sumBackground = sumBackground + (lockBitmap.GetPixel(x, y).G + lockBitmap.GetPixel(x, y).R); //the .R selects the red component of the RGB color } } } lockBitmap.UnlockBits(); if (((yb - ya) * (xb - xa)) != 0) { ave = Convert.ToDouble(sum) / ((yb - ya) * (xb - xa)); } if (checkBoxBackgroundCorrection.Checked) { aveBackground = Convert.ToDouble(sumBackground) / (54 * 71); } else { aveBackground = 1; } aveSum = Convert.ToInt32(((10000 * ave / aveBackground))); } if (checkBoxMax.Checked) { if (UseSensitiveAlgorithmCheckbox.Checked) { for (int i = 1; i < longarray.Length; i++) { longarray[i - 1] = longarray[i]; } longarray[longarray.Length - 1] = aveSum; pastAverage = Convert.ToInt32(longarray.Average()); if (radioButtonUp.Checked) { if ((aveSum > pastAverage) & (aveSumPrevious < pastAverage)) { ismax = true; if (checkBoxSaveMax.Checked) { newimage.Save(savepath + "\\SavedMaxima\\" + saveCount.ToString("000") + ".bmp"); saveCount++; } } else { ismax = false; } } else if (radioButtonDown.Checked) { if ((aveSum < pastAverage) & (aveSumPrevious > pastAverage)) { ismax = true; if (checkBoxSaveMax.Checked) { newimage.Save(savepath + "\\SavedMaxima\\" + saveCount.ToString("000") + ".bmp"); saveCount++; } } else { ismax = false; } } } else { array[0] = array[1]; array[1] = array[2]; array[2] = array[3]; array[3] = array[4]; array[4] = aveSum; if (checkBoxTriggerOnMinimum.Checked) { if ((array[1] < array[0]) && (array[2] < array[1]) && (array[2] < array[3]) && (array[3] < array[4])) { ismax = true; if (checkBoxSaveMax.Checked) { newimage.Save(savepath + "\\SavedMaxima\\" + saveCount.ToString("000") + ".bmp"); saveCount++; } } else { ismax = false; } } else { if ((array[1] > array[0]) && (array[2] > array[1]) && (array[2] > array[3]) && (array[3] > array[4])) { ismax = true; if (checkBoxSaveMax.Checked) { newimage.Save(savepath + "\\SavedMaxima\\" + saveCount.ToString("000") + ".bmp"); saveCount++; } } else { ismax = false; } } } } else { ismax = false; } if (readyToRecordValues) { if (ismax) { if (CheckBoxAlgorithm2.Checked) { aveSumCopy = aveSum + 0; //+1500; to highlight the max value in a graph. } else { aveSumCopy = aveSum + 0; // 700; } } else { aveSumCopy = aveSum; } using (System.IO.StreamWriter file = new System.IO.StreamWriter(@savepath + "values.txt", true)) { file.Write(aveSumCopy.ToString()); if (ismax) { file.Write("\t1"); } else { file.Write("\t0"); } if (FakePulse) { file.WriteLine("\t1"); FakePulse = false; } else { file.WriteLine("\t0"); } } /*using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"C:\tmp\values2.txt", true)) * { * file.WriteLine(array[0]+" " +array[1]+" " +array[2]+" " +array[3]+" " +array[4]); * }*/ } if (My_count == 0) { stopWatch.Start(); } My_count = My_count + 1; if (My_count > 10) { My_count = 0; stopWatch.Stop(); ts = stopWatch.Elapsed; elapsedTime = String.Format("{0:0.0}", (10 / ts.TotalSeconds)); //hertz (10 images over their duration) //elapsedTime = String.Format("{0:00}.{1:00}",ts.Seconds,ts.Milliseconds / 10); stopWatch.Reset(); } Thread.Sleep(10); //this sleep is neccessary, otherwise it gets close to 60fps and UpdateUI can't go that fast. worker.ReportProgress(0); } m_grabThreadExited.Set(); }
public void MainLoop() { // Managed Image MUST BE OUT OF THE LOOP! (For some reason...) ManagedImage rawImage = new ManagedImage(); ManagedImage convertedImage = new ManagedImage(); //System.Drawing.Bitmap bitmap; while (state == ST_CALIBRATING) { // retrieving an image using the Flea3 API Cam.RetrieveBuffer(rawImage); // Convert the raw image to a System.Drawing.Bitmap rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage); System.Drawing.Bitmap bitmap = convertedImage.bitmap; // Suimply create a new openCV frame with the bitmap Image <Bgr, Byte> frame = new Image <Bgr, byte>(bitmap); Image <Gray, Byte> grayFrame = frame.Convert <Gray, Byte>(); // and creating the drawImage frame DrawImage = grayFrame.Clone(); // decalring array of points for all RGB components PointF[][] corners = new PointF[3][]; // finding corners in the frame // left frame // bool result = CameraCalibration.FindChessboardCorners(grayFrame, // new Size(ChessHorizCount, ChessVertCount), Emgu.CV.CvEnum.CALIB_CB_TYPE.FAST_CHECK, out corners[0]); corners[0] = CameraCalibration.FindChessboardCorners(grayFrame, new Size(ChessHorizCount, ChessVertCount), Emgu.CV.CvEnum.CALIB_CB_TYPE.FAST_CHECK); //Emgu.CV.CvEnum.CALIB_CB_TYPE.ADAPTIVE_THRESH | Emgu.CV.CvEnum.CALIB_CB_TYPE.NORMALIZE_IMAGE //); bool result = !(corners[0] == null); if (result) { FrameCounter++; //finding corners with sub pixel accuracy grayFrame.FindCornerSubPix(corners, new Size(10, 10), new Size(-1, -1), new MCvTermCriteria(0.01)); // now draing the corners /* CameraCalibration.DrawChessboardCorners(DrawImage, * new Size(ChessHorizCount, ChessVertCount), * corners[0], * true * ); */ CameraCalibration.DrawChessboardCorners(DrawImage, new Size(ChessHorizCount, ChessVertCount), corners[0] ); // adding the detected points to the list Points[FrameCounter - 1] = corners[0]; } // assiging the image to the imageviewer (so that it shows) //imageViewer.Image = DrawImage; DisplayBox.Image = (Image)DrawImage.Bitmap; if (FrameCounter >= FrameCount) { state = ST_IDLE; calibrate(); Console.WriteLine("Calibration now is complete. You may NOW kill the thread!"); } } }