private void PopulateStreamFormat() { bool readable = Pylon.DeviceFeatureIsReadable(deviceHandle, "PixelFormat"); if (!readable) { cmbFormat.Enabled = false; return; } string currentValue = Pylon.DeviceFeatureToString(deviceHandle, "PixelFormat"); List <StreamFormat> streamFormats = PylonHelper.GetSupportedStreamFormats(deviceHandle); if (streamFormats == null) { cmbFormat.Enabled = false; return; } foreach (StreamFormat streamFormat in streamFormats) { cmbFormat.Items.Add(streamFormat); if (currentValue == streamFormat.Symbol) { selectedStreamFormat = streamFormat; cmbFormat.SelectedIndex = cmbFormat.Items.Count - 1; } } }
private void PopulateStreamFormat() { lblColorSpace.Text = CameraLang.FormConfiguration_Properties_StreamFormat; bool readable = Pylon.DeviceFeatureIsReadable(deviceHandle, "PixelFormat"); if (!readable) { cmbFormat.Enabled = false; return; } string currentValue = Pylon.DeviceFeatureToString(deviceHandle, "PixelFormat"); List <GenApiEnum> streamFormats = PylonHelper.ReadEnum(deviceHandle, "PixelFormat"); if (streamFormats == null) { cmbFormat.Enabled = false; return; } foreach (GenApiEnum streamFormat in streamFormats) { cmbFormat.Items.Add(streamFormat); if (currentValue == streamFormat.Symbol) { selectedStreamFormat = streamFormat; cmbFormat.SelectedIndex = cmbFormat.Items.Count - 1; } } }
/* * Regardless of the parameter's type, any parameter value can be retrieved as a string. Likewise, each parameter * can be set by passing in a string. This function illustrates how to set and get the * Width parameter as a string. As demonstrated above, the Width parameter is of the integer type. */ private static void demonstrateFromStringToString(PYLON_DEVICE_HANDLE hDev) { string featureName = "Width"; /* The name of the feature. */ string value; /* Get the value of a feature as a string. */ value = Pylon.DeviceFeatureToString(hDev, featureName); Console.WriteLine("{0}: {1}", featureName, value); /* A feature can be set as a string using the PylonDeviceFeatureFromString() function. * If the content of a string can not be converted to the type of the feature, an * error is returned. */ try { Pylon.DeviceFeatureFromString(hDev, featureName, "fourty-two"); /* Cannot be converted to an integer. */ } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.WriteLine("Exception caught:"); Console.WriteLine(e.Message); if (msg != "\n") { Console.WriteLine("Last error message:"); Console.WriteLine(msg); } } }
/// <summary> /// Configure device and report frame format that will be used during streaming. /// This method must return a proper ImageDescriptor so we can pre-allocate buffers. /// </summary> public ImageDescriptor Prepare() { Open(); if (deviceHandle == null || !deviceHandle.IsValid) { return(ImageDescriptor.Invalid); } firstOpen = false; // Get the configured framerate for recording support. if (Pylon.DeviceFeatureIsReadable(deviceHandle, "ResultingFrameRateAbs")) { resultingFramerate = (float)Pylon.DeviceGetFloatFeature(deviceHandle, "ResultingFrameRateAbs"); } else if (Pylon.DeviceFeatureIsReadable(deviceHandle, "ResultingFrameRate")) { resultingFramerate = (float)Pylon.DeviceGetFloatFeature(deviceHandle, "ResultingFrameRate"); } SpecificInfo specific = summary.Specific as SpecificInfo; string streamFormatSymbol = specific.StreamFormat; bool hasWidth = Pylon.DeviceFeatureIsReadable(deviceHandle, "Width"); bool hasHeight = Pylon.DeviceFeatureIsReadable(deviceHandle, "Height"); bool hasPixelFormat = Pylon.DeviceFeatureIsReadable(deviceHandle, "PixelFormat"); bool canComputeImageDescriptor = hasWidth && hasHeight && hasPixelFormat; if (!canComputeImageDescriptor) { return(ImageDescriptor.Invalid); } int width = (int)Pylon.DeviceGetIntegerFeature(deviceHandle, "Width"); int height = (int)Pylon.DeviceGetIntegerFeature(deviceHandle, "Height"); string pixelFormat = Pylon.DeviceFeatureToString(deviceHandle, "PixelFormat"); // Note: the image provider will perform the Bayer conversion itself and only output two formats. // - Y800 for anything monochrome. // - RGB32 for anything color. EPylonPixelType pixelType = Pylon.PixelTypeFromString(pixelFormat); if (pixelType == EPylonPixelType.PixelType_Undefined) { return(ImageDescriptor.Invalid); } bool monochrome = Pylon.IsMono(pixelType) && !Pylon.IsBayer(pixelType); ImageFormat format = monochrome ? format = ImageFormat.Y800 : ImageFormat.RGB32; int bufferSize = ImageFormatHelper.ComputeBufferSize(width, height, format); bool topDown = true; return(new ImageDescriptor(format, width, height, topDown, bufferSize)); }
/// <summary> /// Get camera config /// </summary> /// <param name="device">camera device</param> /// <param name="featureName">feature name</param> /// <returns>feature value</returns> private string GetConfig(PYLON_DEVICE_HANDLE device, string featureName) { string value = string.Empty; try { value = Pylon.DeviceFeatureToString(device, featureName); } catch (Exception ex) { throw ex; } return(value); }
const uint GIGE_PROTOCOL_OVERHEAD = 36; /* Total number of bytes of protocol overhead. */ static void Main(string[] args) { PYLON_DEVICE_HANDLE[] hDev = new PYLON_DEVICE_HANDLE[NUM_DEVICES]; /* Handles for the pylon devices. */ for (int deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { hDev[deviceIndex] = new PYLON_DEVICE_HANDLE(); } try { uint numDevicesAvail; /* Number of the available devices. */ bool isAvail; /* Used for checking feature availability. */ bool isReady; /* Used as an output parameter. */ int i; /* Counter. */ int deviceIndex; /* Index of device used in the following variables. */ PYLON_WAITOBJECTS_HANDLE wos; /* Wait objects. */ int nGrabs; /* Counts the number of grab iterations. */ PYLON_WAITOBJECT_HANDLE woTimer; /* Timer wait object. */ /* These are camera specific variables: */ PYLON_STREAMGRABBER_HANDLE[] hGrabber = new PYLON_STREAMGRABBER_HANDLE[NUM_DEVICES]; /* Handle for the pylon stream grabber. */ PYLON_WAITOBJECT_HANDLE[] hWait = new PYLON_WAITOBJECT_HANDLE[NUM_DEVICES]; /* Handle used for waiting for a grab to be finished. */ uint[] payloadSize = new uint[NUM_DEVICES]; /* Size of an image frame in bytes. */ PylonGrabResult_t[] grabResult = new PylonGrabResult_t[NUM_DEVICES]; /* Stores the result of a grab operation. */ uint[] nStreams = new uint[NUM_DEVICES]; /* The number of streams provided by the device. */ Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >[] buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > [NUM_DEVICES]; /* Holds handles and buffers used for grabbing. */ #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the Programmer's Guide. */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevicesAvail = Pylon.EnumerateDevices(); if (numDevicesAvail < NUM_DEVICES) { Console.Error.WriteLine("Found {0} devices. At least {1} devices needed to run this sample.", numDevicesAvail, NUM_DEVICES); throw new Exception("Not enough devices found."); } /* Create wait objects. This must be done outside of the loop. */ wos = Pylon.WaitObjectsCreate(); /* In this sample, we want to grab for a given amount of time, then stop. * Create a timer that tiggers an AutoResetEvent, wrap the AutoResetEvent in a pylon C.NET wait object, and add it to * the wait object set. */ AutoResetEvent timoutEvent = new AutoResetEvent(false); /* The timeout event to wait for. */ TimerCallbackWrapper timerCallbackWrapper = new TimerCallbackWrapper(timoutEvent); /* Receives the timer callback and sets the timeout event. */ Timer timer = new Timer(timerCallbackWrapper.TimerCallback); /* The timeout timer. */ woTimer = Pylon.WaitObjectFromW32(timoutEvent.SafeWaitHandle, true); Pylon.WaitObjectsAdd(wos, woTimer); /* Open cameras and set parameters. */ for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* Get handles for the devices. */ hDev[deviceIndex] = Pylon.CreateDeviceByIndex((uint)deviceIndex); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev[deviceIndex], Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Print out the name of the camera we are using. */ { bool isReadable = Pylon.DeviceFeatureIsReadable(hDev[deviceIndex], "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev[deviceIndex], "DeviceModelName"); Console.WriteLine("Using camera '{0}'", name); } } /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev[deviceIndex], "PixelFormat", "Mono8"); /* Disable acquisition start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* Disable frame start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ Pylon.DeviceFeatureFromString(hDev[deviceIndex], "AcquisitionMode", "Continuous"); PYLON_DEVICE_INFO_HANDLE hDi = Pylon.GetDeviceInfoHandle((uint)deviceIndex); string deviceClass = Pylon.DeviceInfoGetPropertyValueByName(hDi, Pylon.cPylonDeviceInfoDeviceClassKey); if (deviceClass == "BaslerGigE") { /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. * * We also set the Inter-Packet and the Frame Transmission delay * so the switch can line up packets better. */ Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCPSPacketSize", GIGE_PACKET_SIZE); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCPD", (GIGE_PACKET_SIZE + GIGE_PROTOCOL_OVERHEAD) * (NUM_DEVICES - 1)); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCFTD", (GIGE_PACKET_SIZE + GIGE_PROTOCOL_OVERHEAD) * deviceIndex); } else if (deviceClass == "Basler1394") { /* For FireWire we just set the PacketSize node to limit the bandwidth we're using. */ /* We first divide the available bandwidth (4915 for FW400, 9830 for FW800) * by the number of devices we are using. */ long newPacketSize = 4915 / NUM_DEVICES; long recommendedPacketSize = 0; /* Get the recommended packet size from the camera. */ recommendedPacketSize = Pylon.DeviceGetIntegerFeature(hDev[deviceIndex], "RecommendedPacketSize"); if (newPacketSize < recommendedPacketSize) { /* Get the increment value for the packet size. * We must make sure that the new value we're setting is divisible by the increment of that feature. */ long packetSizeInc = 0; packetSizeInc = Pylon.DeviceGetIntegerFeatureInc(hDev[deviceIndex], "PacketSize"); /* Adjust the new packet size so is divisible by its increment. */ newPacketSize -= newPacketSize % packetSizeInc; } else { /* The recommended packet size should always be valid. No need to check against the increment. */ newPacketSize = recommendedPacketSize; } /* Set the new packet size. */ Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "PacketSize", newPacketSize); Console.WriteLine("Using packetsize: {0}", newPacketSize); } } /* Allocate and register buffers for grab. */ for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* Determine the required size for the grab buffer. */ payloadSize[deviceIndex] = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev[deviceIndex], "PayloadSize")); /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams[deviceIndex] = Pylon.DeviceGetNumStreamGrabberChannels(hDev[deviceIndex]); if (nStreams[deviceIndex] < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber[deviceIndex] = Pylon.DeviceGetStreamGrabber(hDev[deviceIndex], 0); Pylon.StreamGrabberOpen(hGrabber[deviceIndex]); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait[deviceIndex] = Pylon.StreamGrabberGetWaitObject(hGrabber[deviceIndex]); /* Add the stream grabber's wait object to our wait objects. * This is needed to be able to wait until all cameras have * grabbed an image in our grab loop below. */ Pylon.WaitObjectsAdd(wos, hWait[deviceIndex]); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber[deviceIndex], NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber[deviceIndex], payloadSize[deviceIndex]); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber[deviceIndex]); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers[deviceIndex] = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); for (i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize[deviceIndex], true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber[deviceIndex], ref buffer); buffers[deviceIndex].Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers[deviceIndex]) { Pylon.StreamGrabberQueueBuffer(hGrabber[deviceIndex], pair.Key, i++); } } /* The stream grabber is now prepared. As soon the camera starts acquiring images, * the image data will be grabbed into the provided buffers. */ for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* Let the camera acquire images. */ Pylon.DeviceExecuteCommandFeature(hDev[deviceIndex], "AcquisitionStart"); } /* Set the timer to 5 s and start it. */ timer.Change(5000, Timeout.Infinite); /* Counts the number of grabbed images. */ nGrabs = 0; /* Grab until the timer expires. */ for (;;) { int bufferIndex; /* Index of the buffer. */ Byte min, max; uint woIndex; /* Wait for the next buffer to be filled. Wait up to 1000 ms. */ isReady = Pylon.WaitObjectsWaitForAny(wos, 1000, out woIndex); if (!isReady) { /* Timeout occurred. */ throw new Exception("Grab timeout occurred."); } /* If the timer has expired, exit the grab loop. */ if (woIndex == 0) { Console.Error.WriteLine("Grabbing completed successfully."); break; /* Timer expired. */ } /* Account for the timer. */ --woIndex; /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hGrabber[woIndex], out grabResult[woIndex]); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result."); } /* Get the buffer index from the context information. */ bufferIndex = grabResult[woIndex].Context; /* Check to see if the image was grabbed successfully. */ if (grabResult[woIndex].Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. Since we passed more than one buffer * to the stream grabber, the remaining buffers are filled in the background while * we do the image processing. The processed buffer won't be touched by * the stream grabber until we pass it back to the stream grabber. */ PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. Since we also got the buffer index, * we could alternatively use an array, e.g. buffers[bufferIndex]. */ if (!buffers[woIndex].TryGetValue(grabResult[woIndex].hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } /* Perform processing. */ getMinMax(buffer.Array, grabResult[woIndex].SizeX, grabResult[woIndex].SizeY, out min, out max); Console.WriteLine("Grabbed frame {0} from camera {1} into buffer {2}. Min. val={3}, Max. val={4}", nGrabs, woIndex, bufferIndex, min, max); /* Display image */ Pylon.ImageWindowDisplayImage <Byte>(woIndex, buffer, grabResult[woIndex]); } else if (grabResult[woIndex].Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", nGrabs, grabResult[woIndex].ErrorCode); } /* Once finished with the processing, requeue the buffer to be filled again. */ Pylon.StreamGrabberQueueBuffer(hGrabber[woIndex], grabResult[woIndex].hBuffer, bufferIndex); nGrabs++; } /* Clean up. */ /* Stop the image aquisition on the cameras. */ for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* ... Stop the camera. */ Pylon.DeviceExecuteCommandFeature(hDev[deviceIndex], "AcquisitionStop"); } // Remove all wait objects from WaitObjects. Pylon.WaitObjectsRemoveAll(wos); Pylon.WaitObjectDestroy(woTimer); Pylon.WaitObjectsDestroy(wos); for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* ... We must issue a cancel call to ensure that all pending buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(hGrabber[deviceIndex]); /* ... The buffers can now be retrieved from the stream grabber. */ do { isReady = Pylon.StreamGrabberRetrieveResult(hGrabber[deviceIndex], out grabResult[deviceIndex]); } while (isReady); /* ... When all buffers are retrieved from the stream grabber, they can be deregistered. * After deregistering the buffers, it is safe to free the memory. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers[deviceIndex]) { Pylon.StreamGrabberDeregisterBuffer(hGrabber[deviceIndex], pair.Key); pair.Value.Dispose(); } buffers[deviceIndex] = null; /* ... Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(hGrabber[deviceIndex]); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ /* ... Close the stream grabber. */ Pylon.StreamGrabberClose(hGrabber[deviceIndex]); /* ... Close and release the pylon device. The stream grabber becomes invalid * after closing the pylon device. Don't call stream grabber related methods after * closing or releasing the device. */ Pylon.DeviceClose(hDev[deviceIndex]); Pylon.DestroyDevice(hDev[deviceIndex]); } /* Dispose timer and event. */ timer.Dispose(); timoutEvent.Close(); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); /* ... Shut down the pylon runtime system. Don't call any pylon function after * calling PylonTerminate(). */ Pylon.Terminate(); } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } for (uint deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { try { if (hDev[deviceIndex].IsValid) { /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev[deviceIndex])) { Pylon.DeviceClose(hDev[deviceIndex]); } Pylon.DestroyDevice(hDev[deviceIndex]); } } catch (Exception) { /*No further handling here.*/ } } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
int i; /* Counter. */ public void PylonC_Open() { #if Local_Variables PYLON_DEVICE_HANDLE hDev = new PYLON_DEVICE_HANDLE(); /* Handle for the pylon device. */ #endif try { #if Local_Variables uint numDevices; /* Number of available devices. */ PYLON_STREAMGRABBER_HANDLE hGrabber; /* Handle for the pylon stream grabber. */ PYLON_WAITOBJECT_HANDLE hWait; /* Handle used for waiting for a grab to be finished. */ uint payloadSize; /* Size of an image frame in bytes. */ Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > buffers; /* Holds handles and buffers used for grabbing. */ PylonGrabResult_t grabResult; /* Stores the result of a grab operation. */ int nGrabs; /* Counts the number of buffers grabbed. */ uint nStreams; /* The number of streams provides by the device. */ bool isAvail; /* Used for checking feature availability. */ bool isReady; /* Used as an output parameter. */ int i; /* Counter. */ #endif #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the programmer's guide. */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { throw new Exception("No devices found."); } /* Get a handle for the first device found. */ hDev = Pylon.CreateDeviceByIndex(0); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Print out the name of the camera we are using. */ { bool isReadable = Pylon.DeviceFeatureIsReadable(hDev, "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev, "DeviceModelName"); Console.WriteLine("Using camera {0}.", name); } } /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_YUV422Packed"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8/YUV422Packed pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "YUV422Packed"); /* Disable acquisition start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ Pylon.DeviceFeatureFromString(hDev, "AcquisitionMode", "Continuous"); /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", 1500); } /* Determine the required size of the grab buffer. */ payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev, "PayloadSize")); /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams = Pylon.DeviceGetNumStreamGrabberChannels(hDev); if (nStreams < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber = Pylon.DeviceGetStreamGrabber(hDev, 0); Pylon.StreamGrabberOpen(hGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait = Pylon.StreamGrabberGetWaitObject(hGrabber); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber, NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); for (i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber, ref buffer); buffers.Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberQueueBuffer(hGrabber, pair.Key, i++); } /* The stream grabber is now prepared. As soon the camera starts acquiring images, * the image data will be grabbed into the provided buffers. */ /* Let the camera acquire images. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStart"); /* Grab NUM_GRABS images */ nGrabs = 0; /* Counts the number of grabbed images. */ //while (nGrabs < NUM_GRABS) while (nGrabs < NUM_GRABS) { int bufferIndex; /* Index of the buffer. */ Byte min, max; /* Wait for the next buffer to be filled. Wait up to 1000 ms. */ isReady = Pylon.WaitObjectWait(hWait, 1000); if (!isReady) { /* Timeout occurred. */ throw new Exception("Grab timeout occurred."); } /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result"); } nGrabs++; /* Get the buffer index from the context information. */ bufferIndex = (int)grabResult.Context; /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. Since we passed more than one buffer * to the stream grabber, the remaining buffers are filled in the background while * we do the image processing. The processed buffer won't be touched by * the stream grabber until we pass it back to the stream grabber. */ PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. Since we also got the buffer index, * we could alternatively use an array, e.g. buffers[bufferIndex]. */ if (!buffers.TryGetValue(grabResult.hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } /* Perform processing. */ getMinMax(buffer.Array, grabResult.SizeX, grabResult.SizeY, out min, out max); Console.WriteLine("Grabbed frame {0} into buffer {1}. Min. gray value = {2}, Max. gray value = {3}", nGrabs, bufferIndex, min, max); /* Display image */ ////////////////////////////////////////////////////////////////////////////////////////////////////////////////// WpfApp5.MainWindow.Pylon_Buffer = buffer; WpfApp5.MainWindow.GrabResult = grabResult; Console.WriteLine("the contents of buffer: [{0, 10}] in OverlappedGrab Class.", buffer.Array[0]); Console.WriteLine("the contents of buffer: [{0, 10}] in WpfApp3.MainWindow.Pylon_Buffer.", WpfApp5.MainWindow.Pylon_Buffer.Array[0]); Pylon.ImageWindowDisplayImage <Byte>(0, buffer, grabResult); ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// } else if (grabResult.Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", nGrabs, grabResult.ErrorCode); } /* Once finished with the processing, requeue the buffer to be filled again. */ Pylon.StreamGrabberQueueBuffer(hGrabber, grabResult.hBuffer, bufferIndex); } /* Clean up. */ #if DONT_COMPILE_THESE_CODE /* ... Stop the camera. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStop"); /* ... We must issue a cancel call to ensure that all pending buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(hGrabber); /* ... The buffers can now be retrieved from the stream grabber. */ do { isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); } while (isReady); /* ... When all buffers are retrieved from the stream grabber, they can be deregistered. * After deregistering the buffers, it is safe to free the memory. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberDeregisterBuffer(hGrabber, pair.Key); pair.Value.Dispose(); } buffers = null; /* ... Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(hGrabber); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ /* ... Close the stream grabber. */ Pylon.StreamGrabberClose(hGrabber); /* ... Close and release the pylon device. The stream grabber becomes invalid * after closing the pylon device. Don't call stream grabber related methods after * closing or releasing the device. */ Pylon.DeviceClose(hDev); Pylon.DestroyDevice(hDev); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); /* ... Shut down the pylon runtime system. Don't call any pylon method after * calling Pylon.Terminate(). */ Pylon.Terminate(); #endif } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } try { if (hDev.IsValid) { /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev)) { Pylon.DeviceClose(hDev); } Pylon.DestroyDevice(hDev); } } catch (Exception) { /*No further handling here.*/ } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
/// <summary> /// Configure device and report frame format that will be used during streaming. /// This method must return a proper ImageDescriptor so we can pre-allocate buffers. /// </summary> public ImageDescriptor Prepare() { Open(); if (deviceHandle == null || !deviceHandle.IsValid) { return(ImageDescriptor.Invalid); } firstOpen = false; // Get the configured framerate for recording support. resultingFramerate = PylonHelper.GetResultingFramerate(deviceHandle); SpecificInfo specific = summary.Specific as SpecificInfo; string streamFormatSymbol = specific.StreamFormat; bool hasWidth = Pylon.DeviceFeatureIsReadable(deviceHandle, "Width"); bool hasHeight = Pylon.DeviceFeatureIsReadable(deviceHandle, "Height"); bool hasPixelFormat = Pylon.DeviceFeatureIsReadable(deviceHandle, "PixelFormat"); bool canComputeImageDescriptor = hasWidth && hasHeight && hasPixelFormat; if (!canComputeImageDescriptor) { return(ImageDescriptor.Invalid); } int width = (int)Pylon.DeviceGetIntegerFeature(deviceHandle, "Width"); int height = (int)Pylon.DeviceGetIntegerFeature(deviceHandle, "Height"); string pixelFormat = Pylon.DeviceFeatureToString(deviceHandle, "PixelFormat"); EPylonPixelType pixelType = Pylon.PixelTypeFromString(pixelFormat); if (pixelType == EPylonPixelType.PixelType_Undefined) { return(ImageDescriptor.Invalid); } // Note: the image provider will perform the Bayer conversion itself and only output two formats. // - Y800 for anything monochrome. // - RGB32 for anything color. imageProvider.SetDebayering(specific.Bayer8Conversion); bool isBayer = Pylon.IsBayer(pixelType); bool isBayer8 = PylonHelper.IsBayer8(pixelType); bool bayerColor = (isBayer && !isBayer8) || (isBayer8 && specific.Bayer8Conversion == Bayer8Conversion.Color); bool color = !Pylon.IsMono(pixelType) || bayerColor; ImageFormat format = color ? ImageFormat.RGB32 : ImageFormat.Y800; finishline.Prepare(width, height, format, resultingFramerate); if (finishline.Enabled) { height = finishline.Height; resultingFramerate = finishline.ResultingFramerate; } int bufferSize = ImageFormatHelper.ComputeBufferSize(width, height, format); bool topDown = true; return(new ImageDescriptor(format, width, height, topDown, bufferSize)); }
static void mmMain(string[] args) { PYLON_DEVICE_HANDLE hDev = new PYLON_DEVICE_HANDLE(); /* Handle for the pylon device. */ try { uint numDevices; /* Number of devices available. */ #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the programmer's guide. */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * Pylon.EnumerateDevices() before creating a device. */ numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { throw new Exception("No devices found."); } /* Get a handle for the first device found. */ hDev = Pylon.CreateDeviceByIndex(0); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Print out the name of the camera we are using. */ { bool isReadable; isReadable = Pylon.DeviceFeatureIsReadable(hDev, "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev, "DeviceModelName"); Console.WriteLine("Using camera {0}", name); } } /* Demonstrate how to check the accessibility of a feature. */ demonstrateAccessibilityCheck(hDev); Console.WriteLine(""); /* Demonstrate how to handle integer camera parameters. */ demonstrateIntFeature(hDev); Console.WriteLine(""); /* Demonstrate how to handle floating point camera parameters. */ demonstrateFloatFeature(hDev); Console.WriteLine(""); /* Demonstrate how to handle boolean camera parameters. */ demonstrateBooleanFeature(hDev); Console.WriteLine(""); /* Each feature can be read as a string and also set as a string. */ demonstrateFromStringToString(hDev); Console.WriteLine(""); /* Demonstrate how to handle enumeration camera parameters. */ demonstrateEnumFeature(hDev); Console.WriteLine(""); /* Demonstrate how to iterate enumeration entries. */ demonstrateEnumIteration(hDev); Console.WriteLine(""); /* Demonstrate how to execute actions. */ demonstrateCommandFeature(hDev); Console.WriteLine(""); /* Display category nodes. */ demonstrateCategory(hDev); /* Clean up. Close and release the pylon device. */ Pylon.DeviceClose(hDev); Pylon.DestroyDevice(hDev); /* Shut down the pylon runtime system. Don't call any pylon method after * calling Pylon.Terminate(). */ Pylon.Terminate(); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg.Length > 0) { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } try { if (hDev.IsValid) { /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev)) { Pylon.DeviceClose(hDev); } Pylon.DestroyDevice(hDev); } } catch (Exception) { /*No further handling here.*/ } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
static int callbackCounter = 0; /* Will be incremented by the callback function. */ static void Main(string[] args) { PYLON_DEVICE_HANDLE hDev = new PYLON_DEVICE_HANDLE(); /* Handle for the pylon device. */ try { uint numDevices; /* Number of available devices. */ PYLON_DEVICECALLBACK_HANDLE hCb; /* Required for deregistering the callback. */ int loopCount; /* Counter. */ #if DEBUG bool isGigECamera; /* 1 if the device is a GigE device. */ #endif DeviceCallbackHandler callbackHandler = new DeviceCallbackHandler(); /* Handles callbacks from a device. */ /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { throw new Exception("No devices found."); } /* Get a handle for the first device found. */ hDev = Pylon.CreateDeviceByIndex(0); /* Before using the device, it must be opened. Open it for configuring * parameters, for grabbing images, and for grabbing events. */ Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Print out the name of the camera we are using. */ { bool isReadable = Pylon.DeviceFeatureIsReadable(hDev, "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev, "DeviceModelName"); Console.WriteLine("Using camera {0}", name); } } /* Register the callback function. */ callbackHandler.CallbackEvent += new DeviceCallbackHandler.DeviceCallback(removalCallbackFunction); hCb = Pylon.DeviceRegisterRemovalCallback(hDev, callbackHandler); #if DEBUG /* For GigE cameras, the application periodically sends heartbeat signals to the camera to keep the * connection to the camera alive. If the camera doesn't receive heartbeat signals within the time * period specified by the heartbeat timeout counter, the camera resets the connection. * When the application is stopped by the debugger, the application cannot create the heartbeat signals. * For that reason, the pylon runtime extends the heartbeat timeout in debug mode to 5 minutes to allow * debugging. For GigE cameras, we will set the heartbeat timeout to a shorter period before testing the * callbacks. * The heartbeat mechanism is also used for detection of device removal. When the pylon runtime doesn't * receive an acknowledge for the heartbeat signal, it is assumed that the device has been removed. A * removal callback will be fired in that case. * By decreasing the heartbeat timeout in debug mode for GigE cameras, the surprise removal will be noticed sooner than set by the pylon runtime. */ { /* Find out if we are using a GigE camera. */ PYLON_DEVICE_INFO_HANDLE hDi = Pylon.DeviceGetDeviceInfoHandle(hDev); string deviceClass = Pylon.DeviceInfoGetPropertyValueByName(hDi, Pylon.cPylonDeviceInfoDeviceClassKey); isGigECamera = deviceClass == "BaslerGigE"; /* Adjust the heartbeat timeout. */ if (isGigECamera) { setHeartbeatTimeout(hDev, 1000); /* 1000 ms */ } } #endif /* Ask the user to disconnect a device. */ loopCount = 20 * 4; Console.WriteLine("Please disconnect the device (timeout {0} s) ", loopCount / 4); /* Wait until the removal has been noticed and the callback function has been fired. */ do { /* Print a . every few seconds to tell the user we're waiting for the callback. */ if (--loopCount % 4 == 0) { Console.Write("."); } System.Threading.Thread.Sleep(250); }while (callbackCounter < 1 && loopCount >= 0); /* Check loopCount so we won't wait forever. */ if (callbackCounter < 1) { Console.WriteLine("\nTimeout expired. Device hasn't been removed."); } /* Clean up. */ /* ... Deregister the removal callback. */ Pylon.DeviceDeregisterRemovalCallback(hDev, hCb); /* ....Close and release the pylon device. */ Pylon.DeviceClose(hDev); Pylon.DestroyDevice(hDev); /* Shut down the pylon runtime system. Don't call any pylon method after * calling Pylon.Terminate(). */ Pylon.Terminate(); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } try { if (hDev.IsValid) { /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev)) { Pylon.DeviceClose(hDev); } Pylon.DestroyDevice(hDev); } } catch (Exception) { /*No further handling here.*/ } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
public bool Initialize(int _ID, string _DeviceID) { bool _Result = false; if (0 == AvailableDeviceCount) { return(false); } if (_ID >= AvailableDeviceCount) { return(false); } CameraNumber = _ID; for (int iLoopCount = 0; iLoopCount < AvailableDeviceCount; ++iLoopCount) { try { DeviceHandle = new PYLON_DEVICE_HANDLE(); DeviceHandle = Pylon.CreateDeviceByIndex((uint)iLoopCount); Pylon.DeviceOpen(DeviceHandle, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); IsAvailable = Pylon.DeviceFeatureIsAvailable(DeviceHandle, "EnumEntry_PixelFormat_Mono8"); if (false == IsAvailable) { DestroyDeviceHandle(); return(false); } string _DeviceIDTemp = Pylon.DeviceFeatureToString(DeviceHandle, "DeviceID"); if (_DeviceID != _DeviceIDTemp) { DestroyDeviceHandle(); continue; } Pylon.DeviceFeatureFromString(DeviceHandle, "PixelFormat", "Mono8"); IsAvailable = Pylon.DeviceFeatureIsAvailable(DeviceHandle, "EnumEntry_TriggerSelector_AcquisitionStart"); if (false == IsAvailable) { DestroyDeviceHandle(); return(false); } Pylon.DeviceFeatureFromString(DeviceHandle, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(DeviceHandle, "TriggerMode", "Off"); _Result = true; break; } catch { CLogManager.AddInspectionLog(CLogManager.LOG_TYPE.ERR, "CBaslerManager Initialize Exception!!", CLogManager.LOG_LEVEL.LOW); _Result = false; } } if (false == _Result) { return(false); } ThreadContinuousGrab = new Thread(ThreadContinuousGrabFunc); ThreadContinuousGrab.IsBackground = true; IsThreadContinuousGrabExit = false; IsThreadContinuousGrabTrigger = false; ThreadContinuousGrab.Start(); PauseEvent.Reset(); return(true); }
/* There are camera features that behave like enumerations. These features can take a value from a fixed * set of possible values. One example is the pixel format feature. This function illustrates how to deal with * enumeration features. * */ private static void demonstrateEnumFeature(PYLON_DEVICE_HANDLE hDev) { string value; /* The current value of the feature. */ bool isWritable, supportsMono8, supportsYUV422Packed, supportsMono16; /* The allowed values for an enumeration feature are represented as strings. Use the * PylonDeviceFeatureFromString() and PylonDeviceFeatureToString() methods for setting and getting * the value of an enumeration feature. */ /* Get the current value of the enumeration feature. */ value = Pylon.DeviceFeatureToString(hDev, "PixelFormat"); Console.WriteLine("PixelFormat: {0}", value); /* * For an enumeration feature, the pylon Viewer's "Feature Documentation" window lists the * names of the possible values. Some of the values might not be supported by the device. * To check if a certain "SomeValue" value for a "SomeFeature" feature can be set, call the * PylonDeviceFeatureIsAvailable() function with "EnumEntry_SomeFeature_SomeValue" as an argument. */ /* Check to see if the Mono8 pixel format can be set. */ supportsMono8 = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); Console.WriteLine("Mono8 {0} a supported value for the PixelFormat feature.", supportsMono8 ? "is" : "isn't"); /* Check to see if the YUV422Packed pixel format can be set. */ supportsYUV422Packed = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_YUV422Packed"); Console.WriteLine("YUV422Packed {0} a supported value for the PixelFormat feature.", supportsYUV422Packed ? "is" : "isn't"); /* Check to see if the Mono16 pixel format can be set. */ supportsMono16 = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono16"); Console.WriteLine("Mono16 {0} a supported value for the PixelFormat feature.", supportsMono16 ? "is" : "isn't"); /* Before writing a value, we recommend checking to see if the enumeration feature itself is * currently writable. */ isWritable = Pylon.DeviceFeatureIsWritable(hDev, "PixelFormat"); if (isWritable) { /* The PixelFormat feature is writable. Set it to one of the supported values. */ if (supportsMono16) { Console.WriteLine("Setting PixelFormat to Mono16."); Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono16"); } else if (supportsYUV422Packed) { Console.WriteLine("Setting PixelFormat to YUV422Packed."); Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "YUV422Packed"); } else if (supportsMono8) { Console.WriteLine("Setting PixelFormat to Mono8."); Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); } /* Reset the PixelFormat feature to its previous value. */ Pylon.DeviceFeatureFromString(hDev, "PixelFormat", value); } }
private void initStreamMode() { _width = (int)Pylon.DeviceGetIntegerFeature(hDev, "Width"); _height = (int)Pylon.DeviceGetIntegerFeature(hDev, "Height"); /* Print out the name of the camera we are using. */ bool isReadable = Pylon.DeviceFeatureIsReadable(hDev, "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev, "DeviceModelName"); Console.WriteLine("Using camera {0}.", name); } /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ bool isWritable = Pylon.DeviceFeatureIsWritable(hDev, "PixelFormat"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); } /* Disable acquisition start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { isWritable = Pylon.DeviceFeatureIsWritable(hDev, "TriggerSelector"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); } isWritable = Pylon.DeviceFeatureIsWritable(hDev, "TriggerMode"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } } /* Disable frame start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { isWritable = Pylon.DeviceFeatureIsWritable(hDev, "TriggerSelector"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); } isWritable = Pylon.DeviceFeatureIsWritable(hDev, "TriggerMode"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } } /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ isWritable = Pylon.DeviceFeatureIsWritable(hDev, "AcquisitionMode"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "AcquisitionMode", "Continuous"); } /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ isWritable = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isWritable) { Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", GevSCPSPacketSize); } } _isModelValid = isAvail; /* Determine the required size of the grab buffer. */ payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev, "PayloadSize")); /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams = Pylon.DeviceGetNumStreamGrabberChannels(hDev); if (nStreams < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber = Pylon.DeviceGetStreamGrabber(hDev, 0); Pylon.StreamGrabberOpen(hGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait = Pylon.StreamGrabberGetWaitObject(hGrabber); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber, NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); int i; for (i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber, ref buffer); buffers.Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberQueueBuffer(hGrabber, pair.Key, i++); } }
const uint NUM_EVENT_BUFFERS = 20; /* Number of buffers used for grabbing. */ static void Main(string[] args) { PYLON_DEVICE_HANDLE hDev = new PYLON_DEVICE_HANDLE(); /* Handle for the pylon device. */ try { uint numDevices; /* Number of available devices. */ PYLON_STREAMGRABBER_HANDLE hStreamGrabber; /* Handle for the pylon stream grabber. */ PYLON_EVENTGRABBER_HANDLE hEventGrabber; /* Handle for the event grabber used for receiving events. */ PYLON_EVENTADAPTER_HANDLE hEventAdapter; /* Handle for the event adapter used for dispatching events. */ PYLON_WAITOBJECT_HANDLE hWaitStream; /* Handle used for waiting for a grab to be finished. */ PYLON_WAITOBJECT_HANDLE hWaitEvent; /* Handle used for waiting for an event message. */ PYLON_WAITOBJECTS_HANDLE hWaitObjects; /* Container allowing waiting for multiple wait objects. */ NODEMAP_HANDLE hNodeMap; /* Handle for the node map containing the * camera parameters. */ NODE_CALLBACK_HANDLE hCallback; /* Used for deregistering a callback function. */ NODE_HANDLE hNode; /* Handle for a camera parameter. */ uint payloadSize; /* Size of an image frame in bytes. */ Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > buffers; /* Holds handles and buffers used for grabbing. */ PylonGrabResult_t grabResult; /* Stores the result of a grab operation. */ NodeCallbackHandler callbackHandler = new NodeCallbackHandler(); /* Handles incoming callbacks. */ int nGrabs; /* Counts the number of buffers grabbed. */ uint nStreams; /* The number of streams the device provides. */ bool isAvail; /* Used for checking feature availability. */ bool isReady; /* Used as an output parameter. */ int i; /* Counter. */ PylonEventResult_t eventMsg = new PylonEventResult_t(); /* Event data container. */ long sfncVersionMajor; /* The major number of the Standard Feature Naming Convention (SFNC) * version used by the camera device. */ #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the programmer's guide */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { throw new Exception("No devices found."); } /* Get a handle for the first device found. */ hDev = Pylon.CreateDeviceByIndex(0); /* Before using the device, it must be opened. Open it for configuring * parameters, for grabbing images, and for grabbing events. */ Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream | Pylon.cPylonAccessModeEvent); /* Print out the name of the camera we are using. */ { bool isReadable = Pylon.DeviceFeatureIsReadable(hDev, "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev, "DeviceModelName"); Console.WriteLine("Using camera {0}", name); } } /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); /* Disable acquisition start trigger if available. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame start trigger if available. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ Pylon.DeviceFeatureFromString(hDev, "AcquisitionMode", "Continuous"); /* For GigE cameras, we recommend increasing the packet size for better * performance. If the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", 1500); } /* Determine the required size of the grab buffer. */ payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev, "PayloadSize")); /* Determine the major number of the SFNC version used by the camera device. */ if (Pylon.DeviceFeatureIsAvailable(hDev, "DeviceSFNCVersionMajor")) { sfncVersionMajor = Pylon.DeviceGetIntegerFeature(hDev, "DeviceSFNCVersionMajor"); } else { /* No SFNC version information is provided by the camera device. */ sfncVersionMajor = 0; } /* Enable camera events. */ /* Select the end-of-exposure event.*/ Pylon.DeviceFeatureFromString(hDev, "EventSelector", "ExposureEnd"); /* Enable the event. Select the enumeration entry name depending on the SFNC version used by the camera device. */ if (sfncVersionMajor >= 2) { Pylon.DeviceFeatureFromString(hDev, "EventNotification", "On"); } else { Pylon.DeviceFeatureFromString(hDev, "EventNotification", "GenICamEvent"); } /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams = Pylon.DeviceGetNumStreamGrabberChannels(hDev); if (nStreams < 1) { throw new Exception("The transport layer doesn't support image streams"); } /* Create and open a stream grabber for the first channel. */ hStreamGrabber = Pylon.DeviceGetStreamGrabber(hDev, 0); Pylon.StreamGrabberOpen(hStreamGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be grabbed. */ hWaitStream = Pylon.StreamGrabberGetWaitObject(hStreamGrabber); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hStreamGrabber, NUM_IMAGE_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hStreamGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hStreamGrabber); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer object pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); for (i = 0; i < NUM_IMAGE_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hStreamGrabber, ref buffer); buffers.Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberQueueBuffer(hStreamGrabber, pair.Key, i++); } /* The stream grabber is now prepared. As soon the camera starts to acquire images, * the image data will be grabbed into the provided buffers. */ /* Create and prepare an event grabber. */ /* ... Get a handle for the event grabber. */ hEventGrabber = Pylon.DeviceGetEventGrabber(hDev); if (!hEventGrabber.IsValid) { /* The transport layer doesn't support event grabbers. */ throw new Exception("No event grabber supported."); } /* ... Tell the grabber how many buffers to use. */ Pylon.EventGrabberSetNumBuffers(hEventGrabber, NUM_EVENT_BUFFERS); /* ... Open the event grabber. */ Pylon.EventGrabberOpen(hEventGrabber); /* The event grabber is now ready * for receiving events. */ /* Retrieve the wait object that is associated with the event grabber. The event * will be signaled when an event message has been received. */ hWaitEvent = Pylon.EventGrabberGetWaitObject(hEventGrabber); /* For extracting the event data from an event message, an event adapter is used. */ hEventAdapter = Pylon.DeviceCreateEventAdapter(hDev); if (!hEventAdapter.IsValid) { /* The transport layer doesn't support event grabbers. */ throw new Exception("No event adapter supported."); } /* Register the callback function for the ExposureEndEventFrameID parameter. */ /*.Get the node map containing all parameters. */ hNodeMap = Pylon.DeviceGetNodeMap(hDev); /* Get the ExposureEndEventFrameID parameter. * Select the parameter name depending on the SFNC version used by the camera device. */ if (sfncVersionMajor >= 2) { hNode = GenApi.NodeMapGetNode(hNodeMap, "EventExposureEndFrameID"); } else { hNode = GenApi.NodeMapGetNode(hNodeMap, "ExposureEndEventFrameID"); } if (!hNode.IsValid) { /* There is no ExposureEndEventFrameID parameter. */ throw new Exception("There is no ExposureEndEventFrameID or EventExposureEndFrameID parameter!"); } /* ... Register the callback function. */ callbackHandler.CallbackEvent += new NodeCallbackHandler.NodeCallback(endOfExposureCallback); hCallback = GenApi.NodeRegisterCallback(hNode, callbackHandler); /* Put the wait objects into a container. */ /* ... Create the container. */ hWaitObjects = Pylon.WaitObjectsCreate(); /* ... Add the wait objects' handles. */ Pylon.WaitObjectsAdd(hWaitObjects, hWaitEvent); Pylon.WaitObjectsAdd(hWaitObjects, hWaitStream); /* Let the camera acquire images. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStart"); /* Grab NUM_GRABS images. */ nGrabs = 0; /* Counts the number of images grabbed. */ while (nGrabs < NUM_GRABS) { int bufferIndex; /* Index of the buffer. */ uint waitObjectIndex; /* Index of the wait object that is signalled.*/ Byte min, max; /* Wait for either an image buffer grabbed or an event received. Wait up to 1000 ms. */ isReady = Pylon.WaitObjectsWaitForAny(hWaitObjects, 1000, out waitObjectIndex); if (!isReady) { /* Timeout occurred. */ throw new Exception("Timeout. Neither grabbed an image nor received an event."); } if (0 == waitObjectIndex) { /* hWaitEvent has been signalled. At least one event message is available. Retrieve it. */ isReady = Pylon.EventGrabberRetrieveEvent(hEventGrabber, ref eventMsg); if (!isReady) { /* Oops. No event message available? We should never have reached this point. * Since the wait operation above returned without a timeout, an event message * should be available. */ throw new Exception("Failed to retrieve an event."); } /* Check to see if the event was successfully received. */ if (0 == eventMsg.ErrorCode) { /* Successfully received an event message. */ /* Pass the event message to the event adapter. The event adapter will * update the parameters related to events and will fire the callbacks * registered to event related parameters. */ Pylon.EventAdapterDeliverMessage(hEventAdapter, eventMsg); } else { Console.Error.WriteLine("Error when receiving an event: {1}", eventMsg.ErrorCode); } } else if (1 == waitObjectIndex) { /* hWaitStream has been signalled. The result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hStreamGrabber, out grabResult); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result."); } nGrabs++; /* Get the buffer index from the context information. */ bufferIndex = (int)grabResult.Context; /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. Since we passed more than one buffer * to the stream grabber, the remaining buffers are filled while * we do the image processing. The processed buffer won't be touched by * the stream grabber until we pass it back to the stream grabber. */ PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. Since we also got the buffer index, * we could alternatively use an array, e.g. buffers[bufferIndex]. */ if (!buffers.TryGetValue(grabResult.hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } getMinMax(buffer.Array, out min, out max); Console.WriteLine("Grabbed frame {0} into buffer {1}. Min. gray value = {2}, Max. gray value = {3}", nGrabs, bufferIndex, min, max); } else if (grabResult.Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", nGrabs, grabResult.ErrorCode); } /* Once finished with the processing, requeue the buffer to be filled again. */ Pylon.StreamGrabberQueueBuffer(hStreamGrabber, grabResult.hBuffer, bufferIndex); } } /* Clean up. */ /* ... Stop the camera. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStop"); /* ... Switch off the events. */ Pylon.DeviceFeatureFromString(hDev, "EventSelector", "ExposureEnd"); Pylon.DeviceFeatureFromString(hDev, "EventNotification", "Off"); /* ... We must issue a cancel call to ensure that all pending buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(hStreamGrabber); /* ... The buffers can now be retrieved from the stream grabber. */ do { isReady = Pylon.StreamGrabberRetrieveResult(hStreamGrabber, out grabResult); } while (isReady); /* ... When all buffers are retrieved from the stream grabber, they can be deregistered. * After deregistering the buffers, it is safe to free the memory. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberDeregisterBuffer(hStreamGrabber, pair.Key); pair.Value.Dispose(); } buffers = null; /* ... Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(hStreamGrabber); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ /* ... Close the stream grabber. */ Pylon.StreamGrabberClose(hStreamGrabber); /* ... Deregister the callback. */ GenApi.NodeDeregisterCallback(hNode, hCallback); /* ... Close the event grabber.*/ Pylon.EventGrabberClose(hEventGrabber); /* ... Release the event adapter. */ Pylon.DeviceDestroyEventAdapter(hDev, hEventAdapter); /* ... Release the wait object container. */ Pylon.WaitObjectsDestroy(hWaitObjects); /* ... Close and release the pylon device. The stream grabber becomes invalid * after closing the pylon device. Don't call stream grabber related methods after * closing or releasing the device. */ Pylon.DeviceClose(hDev); Pylon.DestroyDevice(hDev); /* ... Shut down the pylon runtime system. Don't call any pylon method after * calling PylonTerminate(). */ Pylon.Terminate(); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } try { if (hDev.IsValid) { /* Disable the software trigger. */ Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev)) { Pylon.DeviceClose(hDev); } Pylon.DestroyDevice(hDev); } } catch (Exception) { /*No further handling here.*/ } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }