/* * Regardless of the parameter's type, any parameter value can be retrieved as a string. Likewise, each parameter * can be set by passing in a string. This function illustrates how to set and get the * Width parameter as a string. As demonstrated above, the Width parameter is of the integer type. */ private static void demonstrateFromStringToString(PYLON_DEVICE_HANDLE hDev) { string featureName = "Width"; /* The name of the feature. */ string value; /* Get the value of a feature as a string. */ value = Pylon.DeviceFeatureToString(hDev, featureName); Console.WriteLine("{0}: {1}", featureName, value); /* A feature can be set as a string using the PylonDeviceFeatureFromString() function. * If the content of a string can not be converted to the type of the feature, an * error is returned. */ try { Pylon.DeviceFeatureFromString(hDev, featureName, "fourty-two"); /* Cannot be converted to an integer. */ } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.WriteLine("Exception caught:"); Console.WriteLine(e.Message); if (msg != "\n") { Console.WriteLine("Last error message:"); Console.WriteLine(msg); } } }
private void _bgWorker_DoWork(object sender, DoWorkEventArgs e) { var worker = sender as BackgroundWorker; Pylon.DeviceFeatureFromString(_hDev, "AcquisitionMode", "Continuous"); Pylon.DeviceExecuteCommandFeature(_hDev, "AcquisitionStart"); notifyRunningMessage("Start and Wait...."); while (true) { if (worker.CancellationPending) { e.Cancel = true; break; } else { try { start(worker); } catch (Exception ex) { notifyError(ex.Message); } } } }
private void setChunkModeFeatures() { /* Before enabling individual chunks, the chunk mode in general must be activated. */ bool isAvail = Pylon.DeviceFeatureIsWritable(_pylonDevHandle, "ChunkModeActive"); if (!isAvail) { throw new Exception("The device doesn't support the chunk mode."); } /* Activate the chunk mode. */ Pylon.DeviceSetBooleanFeature(_pylonDevHandle, "ChunkModeActive", true); /* Enable some individual chunks... */ /* ... The frame counter chunk feature. */ /* Is the chunk feature available? */ isAvail = Pylon.DeviceFeatureIsAvailable(_pylonDevHandle, "EnumEntry_ChunkSelector_Framecounter"); if (isAvail) { /* Select the frame counter chunk feature. */ Pylon.DeviceFeatureFromString(_pylonDevHandle, "ChunkSelector", "Framecounter"); /* Can the chunk feature be activated? */ isAvail = Pylon.DeviceFeatureIsWritable(_pylonDevHandle, "ChunkEnable"); if (isAvail) { /* Activate the chunk feature. */ Pylon.DeviceSetBooleanFeature(_pylonDevHandle, "ChunkEnable", true); } } /* ... The CRC checksum chunk feature. */ /* Note: Enabling the CRC checksum chunk feature is not a prerequisite for using * chunks. Chunks can also be handled when the CRC checksum chunk feature is disabled. */ isAvail = Pylon.DeviceFeatureIsAvailable(_pylonDevHandle, "EnumEntry_ChunkSelector_PayloadCRC16"); if (isAvail) { /* Select the CRC checksum chunk feature. */ Pylon.DeviceFeatureFromString(_pylonDevHandle, "ChunkSelector", "PayloadCRC16"); /* Can the chunk feature be activated? */ isAvail = Pylon.DeviceFeatureIsWritable(_pylonDevHandle, "ChunkEnable"); if (isAvail) { /* Activate the chunk feature. */ Pylon.DeviceSetBooleanFeature(_pylonDevHandle, "ChunkEnable", true); } } }
public bool SetLineParameter( string strName, string strValue ) { bool bReturn = false; do { if ( false == Pylon.DeviceFeatureIsAvailable( m_hDevice, "EnumEntry_" + strName + "_" + strValue ) ) break; Pylon.DeviceFeatureFromString( m_hDevice, strName, strValue ); bReturn = true; } while ( false ); return bReturn; }
/* There are camera features, such as AcquisitionStart, that represent a command. * This function that loads the default set, illustrates how to execute a command feature. */ private static void demonstrateCommandFeature(PYLON_DEVICE_HANDLE hDev) { /* Before executing the user set load command, the user set selector must be * set to the default set. Since we are focusing on the command feature, * we skip the recommended steps for checking the availability of the user set * related features and values. */ /* Choose the default set (which includes one of the factory setups). */ Pylon.DeviceFeatureFromString(hDev, "UserSetSelector", "Default"); /* Execute the user set load command. */ Console.WriteLine("Loading the default settings."); Pylon.DeviceExecuteCommandFeature(hDev, "UserSetLoad"); }
private bool setStreamModeFeature() { bool isAvail; /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(_pylonDevHandle, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(_pylonDevHandle, "PixelFormat", "Mono8"); /* Disable acquisition start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(_pylonDevHandle, "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(_pylonDevHandle, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(_pylonDevHandle, "TriggerMode", "Off"); } /* Disable frame start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(_pylonDevHandle, "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { Pylon.DeviceFeatureFromString(_pylonDevHandle, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(_pylonDevHandle, "TriggerMode", "Off"); } /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ Pylon.DeviceFeatureFromString(_pylonDevHandle, "AcquisitionMode", "Continuous"); /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(_pylonDevHandle, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(_pylonDevHandle, "GevSCPSPacketSize", 8192); } return(isAvail); }
/// <summary> /// 設定 Device Handle 功能 /// </summary> /// <param name="hDev"></param> /// <param name="features"></param> /// <returns></returns> public bool SetPylonDeviceHandleFeatures(List <PylonFeature> features) { var success = true; bool isAvail, isWritable; _latestMessage = ""; foreach (var feature in features) { isAvail = Pylon.DeviceFeatureIsAvailable(_pylonDevHandle, feature.Name); isWritable = Pylon.DeviceFeatureIsWritable(_pylonDevHandle, feature.Key); if (!isAvail) { _latestMessage += "Device doesn't support the " + feature.Name + ";"; success = false; } else if (!isWritable) { _latestMessage += "Writable doesn't support the " + feature.Name + ";"; success = false; } else { Type valueType = feature.Value.GetType(); switch (valueType.Name) { case "Boolean": Pylon.DeviceSetBooleanFeature(_pylonDevHandle, feature.Key, (bool)feature.Value); break; case "String": Pylon.DeviceFeatureFromString(_pylonDevHandle, feature.Key, (string)feature.Value); break; case "Int32": Pylon.DeviceSetIntegerFeature(_pylonDevHandle, feature.Key, (int)feature.Value); break; case "Double": Pylon.DeviceSetFloatFeature(_pylonDevHandle, feature.Key, (double)feature.Value); break; } } } return(success); }
/// <summary> /// Set camera config. /// </summary> /// <param name="device">camera device</param> /// <param name="featureName">name of config</param> /// <param name="value">value of config</param> private void SetConfig(PYLON_DEVICE_HANDLE device, string featureName, object value) { try { if (value == null) { return; } // Check to see if a feature is implemented, writable. bool isAvailable; bool isWritable; // Check feature isAvailable = Pylon.DeviceFeatureIsImplemented(device, featureName); isWritable = Pylon.DeviceFeatureIsWritable(device, featureName); // Set config feature if (isAvailable && isWritable) { if (value.GetType() == typeof(int)) { Pylon.DeviceSetIntegerFeature(device, featureName, (int)value); } if (value.GetType() == typeof(float)) { Pylon.DeviceSetFloatFeature(device, featureName, (float)value); } if (value.GetType() == typeof(string)) { Pylon.DeviceFeatureFromString(device, featureName, (string)value); } } } catch (Exception ex) { throw ex; } }
//打开相机 public override void OpenCameraSoftTrigger(string DCF_Name) { bool isAvail; Pylon.Initialize(); /* Enumerate all camera devices. You must call PylonEnumerateDevices() before creating a device. */ numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { MessageBox.Show("没有找到相机"); return; } else { /* Get a handle for the first device found. */ hDev = Pylon.CreateDeviceByIndex(0); } /* Before using the device, it must be opened. Open it for configuring parameters and for grabbing images. */ Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ MessageBox.Show("设备不支持8位灰度图像"); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); /* Disable acquisition start trigger if available. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* For GigE cameras, we recommend increasing the packet size for better * performance. If the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", 1500); } }
const uint GIGE_PROTOCOL_OVERHEAD = 36; /* Total number of bytes of protocol overhead. */ static void Main(string[] args) { PYLON_DEVICE_HANDLE[] hDev = new PYLON_DEVICE_HANDLE[NUM_DEVICES]; /* Handles for the pylon devices. */ for (int deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { hDev[deviceIndex] = new PYLON_DEVICE_HANDLE(); } try { uint numDevicesAvail; /* Number of the available devices. */ bool isAvail; /* Used for checking feature availability. */ bool isReady; /* Used as an output parameter. */ int i; /* Counter. */ int deviceIndex; /* Index of device used in the following variables. */ PYLON_WAITOBJECTS_HANDLE wos; /* Wait objects. */ int nGrabs; /* Counts the number of grab iterations. */ PYLON_WAITOBJECT_HANDLE woTimer; /* Timer wait object. */ /* These are camera specific variables: */ PYLON_STREAMGRABBER_HANDLE[] hGrabber = new PYLON_STREAMGRABBER_HANDLE[NUM_DEVICES]; /* Handle for the pylon stream grabber. */ PYLON_WAITOBJECT_HANDLE[] hWait = new PYLON_WAITOBJECT_HANDLE[NUM_DEVICES]; /* Handle used for waiting for a grab to be finished. */ uint[] payloadSize = new uint[NUM_DEVICES]; /* Size of an image frame in bytes. */ PylonGrabResult_t[] grabResult = new PylonGrabResult_t[NUM_DEVICES]; /* Stores the result of a grab operation. */ uint[] nStreams = new uint[NUM_DEVICES]; /* The number of streams provided by the device. */ Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >[] buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > [NUM_DEVICES]; /* Holds handles and buffers used for grabbing. */ #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the Programmer's Guide. */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevicesAvail = Pylon.EnumerateDevices(); if (numDevicesAvail < NUM_DEVICES) { Console.Error.WriteLine("Found {0} devices. At least {1} devices needed to run this sample.", numDevicesAvail, NUM_DEVICES); throw new Exception("Not enough devices found."); } /* Create wait objects. This must be done outside of the loop. */ wos = Pylon.WaitObjectsCreate(); /* In this sample, we want to grab for a given amount of time, then stop. * Create a timer that tiggers an AutoResetEvent, wrap the AutoResetEvent in a pylon C.NET wait object, and add it to * the wait object set. */ AutoResetEvent timoutEvent = new AutoResetEvent(false); /* The timeout event to wait for. */ TimerCallbackWrapper timerCallbackWrapper = new TimerCallbackWrapper(timoutEvent); /* Receives the timer callback and sets the timeout event. */ Timer timer = new Timer(timerCallbackWrapper.TimerCallback); /* The timeout timer. */ woTimer = Pylon.WaitObjectFromW32(timoutEvent.SafeWaitHandle, true); Pylon.WaitObjectsAdd(wos, woTimer); /* Open cameras and set parameters. */ for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* Get handles for the devices. */ hDev[deviceIndex] = Pylon.CreateDeviceByIndex((uint)deviceIndex); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev[deviceIndex], Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Print out the name of the camera we are using. */ { bool isReadable = Pylon.DeviceFeatureIsReadable(hDev[deviceIndex], "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev[deviceIndex], "DeviceModelName"); Console.WriteLine("Using camera '{0}'", name); } } /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev[deviceIndex], "PixelFormat", "Mono8"); /* Disable acquisition start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* Disable frame start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ Pylon.DeviceFeatureFromString(hDev[deviceIndex], "AcquisitionMode", "Continuous"); PYLON_DEVICE_INFO_HANDLE hDi = Pylon.GetDeviceInfoHandle((uint)deviceIndex); string deviceClass = Pylon.DeviceInfoGetPropertyValueByName(hDi, Pylon.cPylonDeviceInfoDeviceClassKey); if (deviceClass == "BaslerGigE") { /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. * * We also set the Inter-Packet and the Frame Transmission delay * so the switch can line up packets better. */ Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCPSPacketSize", GIGE_PACKET_SIZE); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCPD", (GIGE_PACKET_SIZE + GIGE_PROTOCOL_OVERHEAD) * (NUM_DEVICES - 1)); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCFTD", (GIGE_PACKET_SIZE + GIGE_PROTOCOL_OVERHEAD) * deviceIndex); } else if (deviceClass == "Basler1394") { /* For FireWire we just set the PacketSize node to limit the bandwidth we're using. */ /* We first divide the available bandwidth (4915 for FW400, 9830 for FW800) * by the number of devices we are using. */ long newPacketSize = 4915 / NUM_DEVICES; long recommendedPacketSize = 0; /* Get the recommended packet size from the camera. */ recommendedPacketSize = Pylon.DeviceGetIntegerFeature(hDev[deviceIndex], "RecommendedPacketSize"); if (newPacketSize < recommendedPacketSize) { /* Get the increment value for the packet size. * We must make sure that the new value we're setting is divisible by the increment of that feature. */ long packetSizeInc = 0; packetSizeInc = Pylon.DeviceGetIntegerFeatureInc(hDev[deviceIndex], "PacketSize"); /* Adjust the new packet size so is divisible by its increment. */ newPacketSize -= newPacketSize % packetSizeInc; } else { /* The recommended packet size should always be valid. No need to check against the increment. */ newPacketSize = recommendedPacketSize; } /* Set the new packet size. */ Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "PacketSize", newPacketSize); Console.WriteLine("Using packetsize: {0}", newPacketSize); } } /* Allocate and register buffers for grab. */ for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* Determine the required size for the grab buffer. */ payloadSize[deviceIndex] = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev[deviceIndex], "PayloadSize")); /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams[deviceIndex] = Pylon.DeviceGetNumStreamGrabberChannels(hDev[deviceIndex]); if (nStreams[deviceIndex] < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber[deviceIndex] = Pylon.DeviceGetStreamGrabber(hDev[deviceIndex], 0); Pylon.StreamGrabberOpen(hGrabber[deviceIndex]); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait[deviceIndex] = Pylon.StreamGrabberGetWaitObject(hGrabber[deviceIndex]); /* Add the stream grabber's wait object to our wait objects. * This is needed to be able to wait until all cameras have * grabbed an image in our grab loop below. */ Pylon.WaitObjectsAdd(wos, hWait[deviceIndex]); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber[deviceIndex], NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber[deviceIndex], payloadSize[deviceIndex]); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber[deviceIndex]); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers[deviceIndex] = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); for (i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize[deviceIndex], true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber[deviceIndex], ref buffer); buffers[deviceIndex].Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers[deviceIndex]) { Pylon.StreamGrabberQueueBuffer(hGrabber[deviceIndex], pair.Key, i++); } } /* The stream grabber is now prepared. As soon the camera starts acquiring images, * the image data will be grabbed into the provided buffers. */ for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* Let the camera acquire images. */ Pylon.DeviceExecuteCommandFeature(hDev[deviceIndex], "AcquisitionStart"); } /* Set the timer to 5 s and start it. */ timer.Change(5000, Timeout.Infinite); /* Counts the number of grabbed images. */ nGrabs = 0; /* Grab until the timer expires. */ for (;;) { int bufferIndex; /* Index of the buffer. */ Byte min, max; uint woIndex; /* Wait for the next buffer to be filled. Wait up to 1000 ms. */ isReady = Pylon.WaitObjectsWaitForAny(wos, 1000, out woIndex); if (!isReady) { /* Timeout occurred. */ throw new Exception("Grab timeout occurred."); } /* If the timer has expired, exit the grab loop. */ if (woIndex == 0) { Console.Error.WriteLine("Grabbing completed successfully."); break; /* Timer expired. */ } /* Account for the timer. */ --woIndex; /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hGrabber[woIndex], out grabResult[woIndex]); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result."); } /* Get the buffer index from the context information. */ bufferIndex = grabResult[woIndex].Context; /* Check to see if the image was grabbed successfully. */ if (grabResult[woIndex].Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. Since we passed more than one buffer * to the stream grabber, the remaining buffers are filled in the background while * we do the image processing. The processed buffer won't be touched by * the stream grabber until we pass it back to the stream grabber. */ PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. Since we also got the buffer index, * we could alternatively use an array, e.g. buffers[bufferIndex]. */ if (!buffers[woIndex].TryGetValue(grabResult[woIndex].hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } /* Perform processing. */ getMinMax(buffer.Array, grabResult[woIndex].SizeX, grabResult[woIndex].SizeY, out min, out max); Console.WriteLine("Grabbed frame {0} from camera {1} into buffer {2}. Min. val={3}, Max. val={4}", nGrabs, woIndex, bufferIndex, min, max); /* Display image */ Pylon.ImageWindowDisplayImage <Byte>(woIndex, buffer, grabResult[woIndex]); } else if (grabResult[woIndex].Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", nGrabs, grabResult[woIndex].ErrorCode); } /* Once finished with the processing, requeue the buffer to be filled again. */ Pylon.StreamGrabberQueueBuffer(hGrabber[woIndex], grabResult[woIndex].hBuffer, bufferIndex); nGrabs++; } /* Clean up. */ /* Stop the image aquisition on the cameras. */ for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* ... Stop the camera. */ Pylon.DeviceExecuteCommandFeature(hDev[deviceIndex], "AcquisitionStop"); } // Remove all wait objects from WaitObjects. Pylon.WaitObjectsRemoveAll(wos); Pylon.WaitObjectDestroy(woTimer); Pylon.WaitObjectsDestroy(wos); for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* ... We must issue a cancel call to ensure that all pending buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(hGrabber[deviceIndex]); /* ... The buffers can now be retrieved from the stream grabber. */ do { isReady = Pylon.StreamGrabberRetrieveResult(hGrabber[deviceIndex], out grabResult[deviceIndex]); } while (isReady); /* ... When all buffers are retrieved from the stream grabber, they can be deregistered. * After deregistering the buffers, it is safe to free the memory. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers[deviceIndex]) { Pylon.StreamGrabberDeregisterBuffer(hGrabber[deviceIndex], pair.Key); pair.Value.Dispose(); } buffers[deviceIndex] = null; /* ... Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(hGrabber[deviceIndex]); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ /* ... Close the stream grabber. */ Pylon.StreamGrabberClose(hGrabber[deviceIndex]); /* ... Close and release the pylon device. The stream grabber becomes invalid * after closing the pylon device. Don't call stream grabber related methods after * closing or releasing the device. */ Pylon.DeviceClose(hDev[deviceIndex]); Pylon.DestroyDevice(hDev[deviceIndex]); } /* Dispose timer and event. */ timer.Dispose(); timoutEvent.Close(); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); /* ... Shut down the pylon runtime system. Don't call any pylon function after * calling PylonTerminate(). */ Pylon.Terminate(); } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } for (uint deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { try { if (hDev[deviceIndex].IsValid) { /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev[deviceIndex])) { Pylon.DeviceClose(hDev[deviceIndex]); } Pylon.DestroyDevice(hDev[deviceIndex]); } } catch (Exception) { /*No further handling here.*/ } } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
static void Main(string[] args) { PYLON_DEVICE_HANDLE hDev = new PYLON_DEVICE_HANDLE(); /* Handle for the pylon device. */ try { uint numDevices; /* Number of available devices. */ const int numGrabs = 10; /* Number of images to grab. */ PylonBuffer <Byte> imgBuf = null; /* Buffer used for grabbing. */ bool isAvail; int i; #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the Programmer's Guide. */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { throw new Exception("No devices found."); } /* Get a handle for the first device found. */ hDev = Pylon.CreateDeviceByIndex(0); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); /* Disable acquisition start trigger if available. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* For GigE cameras, we recommend increasing the packet size for better * performance. If the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported * and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", 1500); } /* Grab some images in a loop. */ for (i = 0; i < numGrabs; ++i) { Byte min, max; PylonGrabResult_t grabResult; /* Grab one single frame from stream channel 0. The * camera is set to "single frame" acquisition mode. * Wait up to 500 ms for the image to be grabbed. * If imgBuf is null a buffer is automatically created with the right size.*/ if (!Pylon.DeviceGrabSingleFrame(hDev, 0, ref imgBuf, out grabResult, 500)) { /* Timeout occurred. */ Console.WriteLine("Frame {0}: timeout.", i + 1); } /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. */ getMinMax(imgBuf.Array, grabResult.SizeX, grabResult.SizeY, out min, out max); Console.WriteLine("Grabbed frame {0}. Min. gray value = {1}, Max. gray value = {2}", i + 1, min, max); /* Display image */ Pylon.ImageWindowDisplayImage <Byte>(0, imgBuf, grabResult); } else if (grabResult.Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", i + 1, grabResult.ErrorCode); } } /* Release the buffer. */ imgBuf.Dispose(); /* Clean up. Close and release the pylon device. */ Pylon.DeviceClose(hDev); Pylon.DestroyDevice(hDev); /* Free memory for grabbing. */ imgBuf = null; Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); /* Shut down the pylon runtime system. Don't call any pylon method after * calling Pylon.Terminate(). */ Pylon.Terminate(); } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } try { if (hDev.IsValid) { /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev)) { Pylon.DeviceClose(hDev); } Pylon.DestroyDevice(hDev); } } catch (Exception) { /*No further handling here.*/ } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
static void Main(string[] args) { for (uint i = 0; i < args.Length; i++) { switch (args[i]) { case par_cameraSerialNumber: arg_cameraSerialNumber = args[i + 1]; i++; break; case par_pathToFile: arg_pathToFile = args[i + 1]; i++; break; case par_imageFormat: arg_imageFormat = args[i + 1]; i++; break; case par_packageSize: arg_packageSize = args[i + 1]; i++; break; case par_interPackageDelay: arg_interPackageDelay = args[i + 1]; i++; break; case par_attemptsToGrab: arg_attemptsToGrab = args[i + 1]; i++; break; case par_exposureTime: arg_exposureTime = args[i + 1]; i++; break; default: break; } } //arg_exposureTime = "35000"; bool error = false; if (arg_pathToFile.Length == 0) { Console.WriteLine("Path to file is empty"); error = true; } if (!(arg_imageFormat.Equals("BMP") || !arg_imageFormat.Equals("PNG") || !arg_imageFormat.Equals("JPG") || !arg_imageFormat.Equals("RAW") || !arg_imageFormat.Equals("TIFF"))) { Console.WriteLine("File format should be [BMP|PNG|JPG|RAW|TIFF]"); error = true; } if (arg_cameraSerialNumber.Length == 0) { Console.WriteLine("Camera serial number is empty"); error = true; } int exposureTime = 0; try { exposureTime = Int32.Parse(arg_exposureTime); } catch (Exception e) { Console.WriteLine("Wrong exposure time value"); error = true; } int interPackageDelay = 0; try { interPackageDelay = Int32.Parse(arg_interPackageDelay); } catch (Exception e) { Console.WriteLine("Wrong interPackageDelay value"); error = true; } int attemptsToGrap = 0; try { attemptsToGrap = Int16.Parse(arg_attemptsToGrab); }catch (Exception e) { Console.WriteLine("Wrong attempts to grab value"); error = true; } //sodapef if (error) { Console.WriteLine("Parameters usage:"); Console.WriteLine("-s Camera serial number"); Console.WriteLine("-o Path to file"); Console.WriteLine("-d Inter package delay in ticks (default 1000)"); Console.WriteLine("-a Attempts tp grab image (default 1)"); Console.WriteLine("-p Package size (default 1500)"); Console.WriteLine("-e Exposure time (default 35000)"); Console.WriteLine("-f Image format [BMP|PNG|JPG|RAW|TIFF]"); return; } PYLON_DEVICE_HANDLE hDev = new PYLON_DEVICE_HANDLE(); /* Handle for the pylon device. */ try { uint numDevices; /* Number of available devices. */ const int numGrabs = 1; /* Number of images to grab. */ PylonBuffer <Byte> imgBuf = null; /* Buffer used for grabbing. */ bool isAvail; Pylon.Initialize(); numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { Console.WriteLine("Error: No devices found"); return; } bool deviceFound = false; uint deviceNum = 0; for (uint di = 0; di < numDevices; di++) { PYLON_DEVICE_INFO_HANDLE hDi = Pylon.GetDeviceInfoHandle((uint)di); string serial = Pylon.DeviceInfoGetPropertyValueByName(hDi, Pylon.cPylonDeviceInfoSerialNumberKey); deviceNum = di; if (serial.Equals(arg_cameraSerialNumber)) { deviceFound = true; break; } } if (!deviceFound) { Console.WriteLine("Error: No devices found by serial number"); return; } hDev = Pylon.CreateDeviceByIndex(deviceNum); Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream | Pylon.cPylonAccessModeExclusive); isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { Console.WriteLine("Error: Device doesn't support the Mono8 pixel format"); return; } Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { int packageSize = 1500; try{ packageSize = Int32.Parse(arg_packageSize); } catch (Exception e) { } Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", packageSize); } isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPD"); if (isAvail) { Pylon.DeviceSetIntegerFeature(hDev, "GevSCPD", interPackageDelay); } Pylon.DeviceFeatureFromString(hDev, "ExposureAuto", "Off"); /* * isAvail = Pylon.DeviceFeatureIsWritable(hDev, "ExposureTimeRaw"); * if (isAvail) * { * try * { * Pylon.DeviceSetFloatFeature(hDev, "ExposureTimeRaw", exposureTime); * } * catch (Exception e) * { * Console.WriteLine("Some error 1"); * } * }*/ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "ExposureTimeAbs"); if (isAvail) { Pylon.DeviceSetFloatFeature(hDev, "ExposureTimeAbs", (long)exposureTime); /* * try{ * Pylon.DeviceSetFloatFeature(hDev, "ExposureTimeAbs", (long)exposureTime); * }catch (Exception e) { * //Console.WriteLine("Some error 2"); * } */ } Byte min, max; PylonGrabResult_t grabResult; for (int attempt = 0; attempt < attemptsToGrap; attempt++) { if (!Pylon.DeviceGrabSingleFrame(hDev, 0, ref imgBuf, out grabResult, 5000)) { /* Timeout occurred. */ //Console.WriteLine("Frame {0}: timeout.", i + 1); Console.WriteLine("Error: timeout"); } /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. */ getMinMax(imgBuf.Array, grabResult.SizeX, grabResult.SizeY, out min, out max); //Console.WriteLine("Grabbed frame {0}. Min. gray value = {1}, Max. gray value = {2}", i + 1, min, max); Console.WriteLine("Frame grabbed success"); /* Display image */ //Pylon.ImagePersistenceSave<Byte>(EPylonImageFileFormat.ImageFileFormat_Png, "C:\\Users\\v.yakubov\\grabber\\test.png", imgBuf, grabResult.PixelType, (uint)grabResult.SizeX, (uint)grabResult.SizeY, 0, EPylonImageOrientation.ImageOrientation_TopDown); if (arg_imageFormat.Equals("PNG")) { Pylon.ImagePersistenceSave <Byte>(EPylonImageFileFormat.ImageFileFormat_Png, arg_pathToFile, imgBuf, grabResult.PixelType, (uint)grabResult.SizeX, (uint)grabResult.SizeY, 0, EPylonImageOrientation.ImageOrientation_TopDown); } else if (arg_imageFormat.Equals("JPG")) { Pylon.ImagePersistenceSave <Byte>(EPylonImageFileFormat.ImageFileFormat_Jpeg, arg_pathToFile, imgBuf, grabResult.PixelType, (uint)grabResult.SizeX, (uint)grabResult.SizeY, 0, EPylonImageOrientation.ImageOrientation_TopDown); } else if (arg_imageFormat.Equals("RAW")) { Pylon.ImagePersistenceSave <Byte>(EPylonImageFileFormat.ImageFileFormat_Raw, arg_pathToFile, imgBuf, grabResult.PixelType, (uint)grabResult.SizeX, (uint)grabResult.SizeY, 0, EPylonImageOrientation.ImageOrientation_TopDown); } else if (arg_imageFormat.Equals("TIFF")) { Pylon.ImagePersistenceSave <Byte>(EPylonImageFileFormat.ImageFileFormat_Tiff, arg_pathToFile, imgBuf, grabResult.PixelType, (uint)grabResult.SizeX, (uint)grabResult.SizeY, 0, EPylonImageOrientation.ImageOrientation_TopDown); } else if (arg_imageFormat.Equals("BMP")) { Pylon.ImagePersistenceSave <Byte>(EPylonImageFileFormat.ImageFileFormat_Bmp, arg_pathToFile, imgBuf, grabResult.PixelType, (uint)grabResult.SizeX, (uint)grabResult.SizeY, 0, EPylonImageOrientation.ImageOrientation_TopDown); } else { Pylon.ImagePersistenceSave <Byte>(EPylonImageFileFormat.ImageFileFormat_Bmp, arg_pathToFile, imgBuf, grabResult.PixelType, (uint)grabResult.SizeX, (uint)grabResult.SizeY, 0, EPylonImageOrientation.ImageOrientation_TopDown); } break; } else if (grabResult.Status == EPylonGrabStatus.Failed) { //Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", i + 1, grabResult.ErrorCode); Console.WriteLine("Error: failed"); } } imgBuf.Dispose(); Pylon.DeviceClose(hDev); Pylon.DestroyDevice(hDev); imgBuf = null; Pylon.Terminate(); } catch (Exception e) { try { if (hDev.IsValid) { if (Pylon.DeviceIsOpen(hDev)) { Pylon.DeviceClose(hDev); } Pylon.DestroyDevice(hDev); } } catch (Exception) {} Pylon.Terminate(); /* Releases all pylon resources. */ Environment.Exit(1); } }
int i; /* Counter. */ public void PylonC_Open() { #if Local_Variables PYLON_DEVICE_HANDLE hDev = new PYLON_DEVICE_HANDLE(); /* Handle for the pylon device. */ #endif try { #if Local_Variables uint numDevices; /* Number of available devices. */ PYLON_STREAMGRABBER_HANDLE hGrabber; /* Handle for the pylon stream grabber. */ PYLON_WAITOBJECT_HANDLE hWait; /* Handle used for waiting for a grab to be finished. */ uint payloadSize; /* Size of an image frame in bytes. */ Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > buffers; /* Holds handles and buffers used for grabbing. */ PylonGrabResult_t grabResult; /* Stores the result of a grab operation. */ int nGrabs; /* Counts the number of buffers grabbed. */ uint nStreams; /* The number of streams provides by the device. */ bool isAvail; /* Used for checking feature availability. */ bool isReady; /* Used as an output parameter. */ int i; /* Counter. */ #endif #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the programmer's guide. */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { throw new Exception("No devices found."); } /* Get a handle for the first device found. */ hDev = Pylon.CreateDeviceByIndex(0); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Print out the name of the camera we are using. */ { bool isReadable = Pylon.DeviceFeatureIsReadable(hDev, "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev, "DeviceModelName"); Console.WriteLine("Using camera {0}.", name); } } /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_YUV422Packed"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8/YUV422Packed pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "YUV422Packed"); /* Disable acquisition start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ Pylon.DeviceFeatureFromString(hDev, "AcquisitionMode", "Continuous"); /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", 1500); } /* Determine the required size of the grab buffer. */ payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev, "PayloadSize")); /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams = Pylon.DeviceGetNumStreamGrabberChannels(hDev); if (nStreams < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber = Pylon.DeviceGetStreamGrabber(hDev, 0); Pylon.StreamGrabberOpen(hGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait = Pylon.StreamGrabberGetWaitObject(hGrabber); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber, NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); for (i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber, ref buffer); buffers.Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberQueueBuffer(hGrabber, pair.Key, i++); } /* The stream grabber is now prepared. As soon the camera starts acquiring images, * the image data will be grabbed into the provided buffers. */ /* Let the camera acquire images. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStart"); /* Grab NUM_GRABS images */ nGrabs = 0; /* Counts the number of grabbed images. */ //while (nGrabs < NUM_GRABS) while (nGrabs < NUM_GRABS) { int bufferIndex; /* Index of the buffer. */ Byte min, max; /* Wait for the next buffer to be filled. Wait up to 1000 ms. */ isReady = Pylon.WaitObjectWait(hWait, 1000); if (!isReady) { /* Timeout occurred. */ throw new Exception("Grab timeout occurred."); } /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result"); } nGrabs++; /* Get the buffer index from the context information. */ bufferIndex = (int)grabResult.Context; /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. Since we passed more than one buffer * to the stream grabber, the remaining buffers are filled in the background while * we do the image processing. The processed buffer won't be touched by * the stream grabber until we pass it back to the stream grabber. */ PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. Since we also got the buffer index, * we could alternatively use an array, e.g. buffers[bufferIndex]. */ if (!buffers.TryGetValue(grabResult.hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } /* Perform processing. */ getMinMax(buffer.Array, grabResult.SizeX, grabResult.SizeY, out min, out max); Console.WriteLine("Grabbed frame {0} into buffer {1}. Min. gray value = {2}, Max. gray value = {3}", nGrabs, bufferIndex, min, max); /* Display image */ ////////////////////////////////////////////////////////////////////////////////////////////////////////////////// WpfApp5.MainWindow.Pylon_Buffer = buffer; WpfApp5.MainWindow.GrabResult = grabResult; Console.WriteLine("the contents of buffer: [{0, 10}] in OverlappedGrab Class.", buffer.Array[0]); Console.WriteLine("the contents of buffer: [{0, 10}] in WpfApp3.MainWindow.Pylon_Buffer.", WpfApp5.MainWindow.Pylon_Buffer.Array[0]); Pylon.ImageWindowDisplayImage <Byte>(0, buffer, grabResult); ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// } else if (grabResult.Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", nGrabs, grabResult.ErrorCode); } /* Once finished with the processing, requeue the buffer to be filled again. */ Pylon.StreamGrabberQueueBuffer(hGrabber, grabResult.hBuffer, bufferIndex); } /* Clean up. */ #if DONT_COMPILE_THESE_CODE /* ... Stop the camera. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStop"); /* ... We must issue a cancel call to ensure that all pending buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(hGrabber); /* ... The buffers can now be retrieved from the stream grabber. */ do { isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); } while (isReady); /* ... When all buffers are retrieved from the stream grabber, they can be deregistered. * After deregistering the buffers, it is safe to free the memory. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberDeregisterBuffer(hGrabber, pair.Key); pair.Value.Dispose(); } buffers = null; /* ... Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(hGrabber); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ /* ... Close the stream grabber. */ Pylon.StreamGrabberClose(hGrabber); /* ... Close and release the pylon device. The stream grabber becomes invalid * after closing the pylon device. Don't call stream grabber related methods after * closing or releasing the device. */ Pylon.DeviceClose(hDev); Pylon.DestroyDevice(hDev); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); /* ... Shut down the pylon runtime system. Don't call any pylon method after * calling Pylon.Terminate(). */ Pylon.Terminate(); #endif } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } try { if (hDev.IsValid) { /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev)) { Pylon.DeviceClose(hDev); } Pylon.DestroyDevice(hDev); } } catch (Exception) { /*No further handling here.*/ } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
private void initStreamMode() { _width = (int)Pylon.DeviceGetIntegerFeature(hDev, "Width"); _height = (int)Pylon.DeviceGetIntegerFeature(hDev, "Height"); /* Print out the name of the camera we are using. */ bool isReadable = Pylon.DeviceFeatureIsReadable(hDev, "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev, "DeviceModelName"); Console.WriteLine("Using camera {0}.", name); } /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ bool isWritable = Pylon.DeviceFeatureIsWritable(hDev, "PixelFormat"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); } /* Disable acquisition start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { isWritable = Pylon.DeviceFeatureIsWritable(hDev, "TriggerSelector"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); } isWritable = Pylon.DeviceFeatureIsWritable(hDev, "TriggerMode"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } } /* Disable frame start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { isWritable = Pylon.DeviceFeatureIsWritable(hDev, "TriggerSelector"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); } isWritable = Pylon.DeviceFeatureIsWritable(hDev, "TriggerMode"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } } /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ isWritable = Pylon.DeviceFeatureIsWritable(hDev, "AcquisitionMode"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "AcquisitionMode", "Continuous"); } /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ isWritable = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isWritable) { Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", GevSCPSPacketSize); } } _isModelValid = isAvail; /* Determine the required size of the grab buffer. */ payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev, "PayloadSize")); /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams = Pylon.DeviceGetNumStreamGrabberChannels(hDev); if (nStreams < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber = Pylon.DeviceGetStreamGrabber(hDev, 0); Pylon.StreamGrabberOpen(hGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait = Pylon.StreamGrabberGetWaitObject(hGrabber); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber, NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); int i; for (i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber, ref buffer); buffers.Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberQueueBuffer(hGrabber, pair.Key, i++); } }
const uint NUM_BUFFERS = 2; /* Number of buffers used for grabbing. */ static void Main(string[] args) { PYLON_DEVICE_HANDLE hDev = new PYLON_DEVICE_HANDLE(); /* Handle for the pylon device. */ try { uint numDevices; /* Number of available devices. */ PYLON_STREAMGRABBER_HANDLE hGrabber; /* Handle for the pylon stream grabber. */ PYLON_CHUNKPARSER_HANDLE hChunkParser; /* Handle for the parser extracting the chunk data. */ PYLON_WAITOBJECT_HANDLE hWait; /* Handle used for waiting for a grab to be finished. */ uint payloadSize; /* Size of an image frame in bytes. */ Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > buffers; /* Holds handles and buffers used for grabbing. */ PylonGrabResult_t grabResult; /* Stores the result of a grab operation. */ int nGrabs; /* Counts the number of buffers grabbed. */ uint nStreams; /* The number of streams the device provides. */ bool isAvail; /* Used for checking feature availability */ bool isReady; /* Used as an output parameter */ int i; /* Counter. */ string triggerSelectorValue = "FrameStart"; /* Preselect the trigger for image acquisition */ bool isAvailFrameStart; /* Used for checking feature availability */ bool isAvailAcquisitionStart; /* Used for checking feature availability */ #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the programmers guide */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { throw new Exception("No devices found!"); } /* Get a handle for the first device found. */ hDev = Pylon.CreateDeviceByIndex(0); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); /* Check the available camera trigger mode(s) to select the appropriate one: acquisition start trigger mode (used by previous cameras; * do not confuse with acquisition start command) or frame start trigger mode (equivalent to previous acquisition start trigger mode). */ isAvailAcquisitionStart = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); isAvailFrameStart = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); /* Check to see if the camera implements the acquisition start trigger mode only. */ if (isAvailAcquisitionStart && !isAvailFrameStart) { /* Camera uses the acquisition start trigger as the only trigger mode. */ Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "On"); triggerSelectorValue = "AcquisitionStart"; } else { /* Camera may have the acquisition start trigger mode and the frame start trigger mode implemented. * In this case, the acquisition trigger mode must be switched off. */ if (isAvailAcquisitionStart) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* To trigger each single frame by software or external hardware trigger: Enable the frame start trigger mode. */ Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "On"); } /* Note: the trigger selector must be set to the appropriate trigger mode * before setting the trigger source or issuing software triggers. * Frame start trigger mode for newer cameras, acquisition start trigger mode for previous cameras. */ Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", triggerSelectorValue); /* Enable software triggering. */ /* ... Select the software trigger as the trigger source. */ Pylon.DeviceFeatureFromString(hDev, "TriggerSource", "Software"); /* When using software triggering, the Continuous frame mode should be used. Once * acquisition is started, the camera sends one image each time a software trigger is * issued. */ Pylon.DeviceFeatureFromString(hDev, "AcquisitionMode", "Continuous"); /* For GigE cameras, we recommend increasing the packet size for better * performance. If the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", 1500); } /* Before enabling individual chunks, the chunk mode in general must be activated. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "ChunkModeActive"); if (!isAvail) { throw new Exception("The device doesn't support the chunk mode."); } /* Activate the chunk mode. */ Pylon.DeviceSetBooleanFeature(hDev, "ChunkModeActive", true); /* Enable some individual chunks... */ /* ... The frame counter chunk feature. */ /* Is the chunk feature available? */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_ChunkSelector_Framecounter"); if (isAvail) { /* Select the frame counter chunk feature. */ Pylon.DeviceFeatureFromString(hDev, "ChunkSelector", "Framecounter"); /* Can the chunk feature be activated? */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "ChunkEnable"); if (isAvail) { /* Activate the chunk feature. */ Pylon.DeviceSetBooleanFeature(hDev, "ChunkEnable", true); } } /* ... The CRC checksum chunk feature. */ /* Note: Enabling the CRC checksum chunk feature is not a prerequisite for using * chunks. Chunks can also be handled when the CRC checksum chunk feature is disabled. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_ChunkSelector_PayloadCRC16"); if (isAvail) { /* Select the CRC checksum chunk feature. */ Pylon.DeviceFeatureFromString(hDev, "ChunkSelector", "PayloadCRC16"); /* Can the chunk feature be activated? */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "ChunkEnable"); if (isAvail) { /* Activate the chunk feature. */ Pylon.DeviceSetBooleanFeature(hDev, "ChunkEnable", true); } } /* The data block containing the image chunk and the other chunks has a self-descriptive layout. * A chunk parser is used to extract the appended chunk data from the grabbed image frame. * Create a chunk parser. */ hChunkParser = Pylon.DeviceCreateChunkParser(hDev); if (!hChunkParser.IsValid) { /* The transport layer doesn't provide a chunk parser. */ throw new Exception("No chunk parser available."); } /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams = Pylon.DeviceGetNumStreamGrabberChannels(hDev); if (nStreams < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber = Pylon.DeviceGetStreamGrabber(hDev, 0); Pylon.StreamGrabberOpen(hGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait = Pylon.StreamGrabberGetWaitObject(hGrabber); /* Determine the required size of the grab buffer. Since activating chunks will increase the * payload size and thus the required buffer size, do this after enabling the chunks. */ payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev, "PayloadSize")); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber, NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); for (i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber, ref buffer); buffers.Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberQueueBuffer(hGrabber, pair.Key, i++); } /* Issue an acquisition start command. Because the trigger mode is enabled, issuing the start command * itself will not trigger any image acquisitions. Issuing the start command simply prepares the camera. * Once the camera is prepared it will acquire one image for every trigger it receives. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStart"); /* Trigger the first image. */ Pylon.DeviceExecuteCommandFeature(hDev, "TriggerSoftware"); /* Grab NUM_GRABS images. */ nGrabs = 0; /* Counts the number of images grabbed. */ while (nGrabs < NUM_GRABS) { int bufferIndex; /* Index of the buffer. */ Byte min = 255, max = 0; long chunkWidth = 0; /* data retrieved from the chunk parser */ long chunkHeight = 0; /* data retrieved from the chunk parser */ /* Wait for the next buffer to be filled. Wait up to 1000 ms. */ isReady = Pylon.WaitObjectWait(hWait, 1000); if (!isReady) { /* Timeout occurred. */ throw new Exception("Grab timeout occurred.\n"); } /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result.\n"); } nGrabs++; /* Trigger the next image. Since we passed more than one buffer to the stream grabber, * the triggered image will be grabbed while the image processing is performed. */ Pylon.DeviceExecuteCommandFeature(hDev, "TriggerSoftware"); /* Get the buffer index from the context information. */ bufferIndex = (int)grabResult.Context; /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* The grab is successful. */ PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. Since we also got the buffer index, * we could alternatively use an array, e.g. buffers[bufferIndex]. */ if (!buffers.TryGetValue(grabResult.hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } Console.WriteLine("Grabbed frame {0} into buffer {1}.", nGrabs, bufferIndex); /* Check to see if we really got image data plus chunk data. */ if (grabResult.PayloadType != EPylonPayloadType.PayloadType_ChunkData) { Console.WriteLine("Received a buffer not containing chunk data?"); } else { /* Process the chunk data. This is done by passing the grabbed image buffer * to the chunk parser. When the chunk parser has processed the buffer, the chunk * data can be accessed in the same manner as "normal" camera parameters. * The only exception is the CRC checksum feature. There are dedicated functions for * checking the CRC checksum. */ bool hasCRC; /* Let the parser extract the data. */ Pylon.ChunkParserAttachBuffer(hChunkParser, buffer); /* Check the CRC checksum. */ hasCRC = Pylon.ChunkParserHasCRC(hChunkParser); if (hasCRC) { bool isOk = Pylon.ChunkParserCheckCRC(hChunkParser); Console.WriteLine("Frame {0} contains a CRC checksum. The checksum {1} ok.", nGrabs, isOk ? "is" : "is not"); } /* Retrieve the frame counter value. */ /* ... Check the availability. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "ChunkFramecounter"); Console.WriteLine("Frame {0} {1} a frame counter chunk.", nGrabs, isAvail ? "contains" : "doesn't contain"); if (isAvail) { /* ... Get the value. */ long counter; counter = Pylon.DeviceGetIntegerFeature(hDev, "ChunkFramecounter"); Console.WriteLine("Frame counter of frame {0}: {1}.", nGrabs, counter); } /* Retrieve the chunk width value. */ /* ... Check the availability. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "ChunkWidth"); Console.WriteLine("Frame {0} {1} a frame width chunk.", nGrabs, isAvail ? "contains" : "doesn't contain"); if (isAvail) { /* ... Get the value. */ chunkWidth = Pylon.DeviceGetIntegerFeature(hDev, "ChunkWidth"); Console.WriteLine("Width of frame {0}: {1}.", nGrabs, chunkWidth); } /* Retrieve the chunk height value. */ /* ... Check the availability. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "ChunkHeight"); Console.WriteLine("Frame {0} {1} a frame height chunk.", nGrabs, isAvail ? "contains" : "doesn't contain"); if (isAvail) { /* ... Get the value. */ chunkHeight = Pylon.DeviceGetIntegerFeature(hDev, "ChunkHeight"); Console.WriteLine("Height of frame {0}: {1}.", nGrabs, chunkHeight); } } /* Perform the image processing. */ getMinMax(buffer.Array, chunkWidth, chunkHeight, out min, out max); Console.WriteLine("Min. gray value = {0}, Max. gray value = {1}", min, max); /* Before requeueing the buffer, you should detach it from the chunk parser. */ Pylon.ChunkParserDetachBuffer(hChunkParser); /* Now the chunk data in the buffer is no longer accessible. */ } else if (grabResult.Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", nGrabs, grabResult.ErrorCode); } /* Once finished with the processing, requeue the buffer to be filled again. */ Pylon.StreamGrabberQueueBuffer(hGrabber, grabResult.hBuffer, bufferIndex); } /* Clean up. */ /* ... Stop the camera. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStop"); /* ... We must issue a cancel call to ensure that all pending buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(hGrabber); /* ... The buffers can now be retrieved from the stream grabber. */ do { isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); } while (isReady); /* ... When all buffers are retrieved from the stream grabber, they can be deregistered. * After deregistering the buffers, it is safe to free the memory */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberDeregisterBuffer(hGrabber, pair.Key); pair.Value.Dispose(); } buffers = null; /* ... Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(hGrabber); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ /* ... Close the stream grabber. */ Pylon.StreamGrabberClose(hGrabber); /* ... Release the chunk parser. */ Pylon.DeviceDestroyChunkParser(hDev, hChunkParser); /* Disable the software trigger and chunk mode. */ if (hDev.IsValid) { Pylon.DeviceSetBooleanFeature(hDev, "ChunkModeActive", false); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* ... Close and release the pylon device. The stream grabber becomes invalid * after closing the pylon device. Don't call stream grabber related methods after * closing or releasing the device. */ Pylon.DeviceClose(hDev); Pylon.DestroyDevice(hDev); /* ... Shut down the pylon runtime system. Don't call any pylon method after * calling PylonTerminate(). */ Pylon.Terminate(); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); } catch (Exception e) { /* Retrieve more details about the error. * /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } try { if (hDev.IsValid) { /* Disable the software trigger. */ Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev)) { Pylon.DeviceClose(hDev); } Pylon.DestroyDevice(hDev); } } catch (Exception) { /*No further handling here.*/ } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
/// <summary> /// The main entry point for the application. /// </summary> static void Main() { /* Use a random number as the device key. */ uint DeviceKey = (uint)(new Random()).Next(int.MaxValue); /* In this sample all cameras belong to the same group. */ const uint GroupKey = 0x24; PYLON_DEVICE_HANDLE[] hDev = new PYLON_DEVICE_HANDLE[MAX_NUM_DEVICES]; /* Handles for the pylon devices. */ for (int deviceIndex = 0; deviceIndex < MAX_NUM_DEVICES; ++deviceIndex) { hDev[deviceIndex] = new PYLON_DEVICE_HANDLE(); } try { uint numDevicesEnumerated; /* Number of the devices connected to this PC. */ uint numDevicesToUse; /* Number of the devices to use in this sample. */ bool isAvail; /* Used for checking feature availability. */ bool isReady; /* Used as an output parameter. */ int i; /* Counter. */ uint deviceIndex; /* Index of device used in the following variables. */ PYLON_WAITOBJECTS_HANDLE wos; /* Wait objects. */ /* These are camera specific variables: */ PYLON_STREAMGRABBER_HANDLE[] hGrabber = new PYLON_STREAMGRABBER_HANDLE[MAX_NUM_DEVICES]; /* Handle for the pylon stream grabber. */ PYLON_WAITOBJECT_HANDLE[] hWait = new PYLON_WAITOBJECT_HANDLE[MAX_NUM_DEVICES]; /* Handle used for waiting for a grab to be finished. */ uint[] payloadSize = new uint[MAX_NUM_DEVICES]; /* Size of an image frame in bytes. */ uint[] nStreams = new uint[MAX_NUM_DEVICES]; /* The number of streams provided by the device. */ PYLON_STREAMBUFFER_HANDLE[] hBuffer = new PYLON_STREAMBUFFER_HANDLE[MAX_NUM_DEVICES]; PylonBuffer <Byte>[] buffer = new PylonBuffer <Byte> [MAX_NUM_DEVICES]; #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the Programmer's Guide. */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevicesEnumerated = Pylon.EnumerateDevices(); if (numDevicesEnumerated == 0) { Pylon.Terminate(); Console.Error.WriteLine("No devices found!"); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); return; } /* Create wait objects. This must be done outside of the loop. */ wos = Pylon.WaitObjectsCreate(); /* Open cameras and set parameter */ deviceIndex = 0; for (uint enumeratedDeviceIndex = 0; enumeratedDeviceIndex < numDevicesEnumerated; ++enumeratedDeviceIndex) { /* only open GigE devices */ PYLON_DEVICE_INFO_HANDLE hDI = Pylon.GetDeviceInfoHandle(enumeratedDeviceIndex); if (Pylon.DeviceInfoGetPropertyValueByName(hDI, Pylon.cPylonDeviceInfoDeviceClassKey) != "BaslerGigE") { continue; } /* Get handles for the devices. */ hDev[deviceIndex] = Pylon.CreateDeviceByIndex((uint)enumeratedDeviceIndex); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev[deviceIndex], Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Print out the name of the camera we are using. */ Console.WriteLine("Using camera '{0}'", Pylon.DeviceInfoGetPropertyValueByName(hDI, Pylon.cPylonDeviceInfoModelNameKey)); isAvail = Pylon.DeviceFeatureIsReadable(hDev[deviceIndex], "ActionControl"); if (!isAvail) { throw new Exception("Device doesn't support the Action Command"); } /* Configure the first action */ Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "ActionSelector", 1); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "ActionDeviceKey", DeviceKey); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "ActionGroupKey", GroupKey); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "ActionGroupMask", AllGroupMask); /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev[deviceIndex], "PixelFormat", "Mono8"); /* Disable acquisition start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* Disable line1 trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_Line1"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "Line1"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* Enable frame start trigger with first action */ Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "On"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSource", "Action1"); /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. * * We also set the Inter-Packet and the Frame Transmission delay * so the switch can line up packets better. */ Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCPSPacketSize", GIGE_PACKET_SIZE); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCPD", (GIGE_PACKET_SIZE + GIGE_PROTOCOL_OVERHEAD) * (MAX_NUM_DEVICES - 1)); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCFTD", (GIGE_PACKET_SIZE + GIGE_PROTOCOL_OVERHEAD) * deviceIndex); /* one device opened */ ++deviceIndex; } /* remember how many devices we have actually created */ numDevicesToUse = deviceIndex; /* Remember the number of devices actually created */ numDevicesToUse = deviceIndex; if (numDevicesToUse == 0) { Console.Error.WriteLine("No suitable cameras found!"); Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(0); } if (numDevicesToUse < 2) { Console.Error.WriteLine("WARNING: This sample works best with two or more GigE cameras supporting action commands."); } /* Allocate and register buffers for grab. */ for (deviceIndex = 0; deviceIndex < numDevicesToUse; ++deviceIndex) { /* Determine the required size for the grab buffer. */ payloadSize[deviceIndex] = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev[deviceIndex], "PayloadSize")); /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams[deviceIndex] = Pylon.DeviceGetNumStreamGrabberChannels(hDev[deviceIndex]); if (nStreams[deviceIndex] < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber[deviceIndex] = Pylon.DeviceGetStreamGrabber(hDev[deviceIndex], 0); Pylon.StreamGrabberOpen(hGrabber[deviceIndex]); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait[deviceIndex] = Pylon.StreamGrabberGetWaitObject(hGrabber[deviceIndex]); /* Add the stream grabber's wait object to our wait objects. * This is needed to be able to wait until all cameras have * grabbed an image in our grab loop below. */ Pylon.WaitObjectsAdd(wos, hWait[deviceIndex]); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber[deviceIndex], NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber[deviceIndex], payloadSize[deviceIndex]); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber[deviceIndex]); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffer[deviceIndex] = new PylonBuffer <byte>(payloadSize[deviceIndex], true); hBuffer[deviceIndex] = Pylon.StreamGrabberRegisterBuffer(hGrabber[deviceIndex], ref buffer[deviceIndex]); /* Feed the buffers into the stream grabber's input queue. */ Pylon.StreamGrabberQueueBuffer(hGrabber[deviceIndex], hBuffer[deviceIndex], 0); } /* The stream grabber is now prepared. Start the image acquisition. * The camera won't send any image data, since it's configured to wait * for the action to trigger the acquisition */ for (deviceIndex = 0; deviceIndex < numDevicesToUse; ++deviceIndex) { Pylon.DeviceExecuteCommandFeature(hDev[deviceIndex], "AcquisitionStart"); } /* ====================================================================== * Issue an ActionCommand and retrieve the images. * ====================================================================== */ Console.WriteLine("*** Issuing action command ***"); /* Trigger the camera using an action command (w/o waiting for results). * If your setup support PTP, you could use a scheduled action command. * Pylon.GigEIssueScheduledActionCommand(subnet, DefaultDeviceKey, DefaultGroupKey, 1, triggertime, 0) */ string subnet = Pylon.DeviceInfoGetPropertyValueByName(Pylon.DeviceGetDeviceInfoHandle(hDev[0]), "SubnetAddress"); Pylon.GigEIssueActionCommand(DeviceKey, GroupKey, 1, subnet); /* Grab one image from each camera. */ for (i = 0; i < numDevicesToUse; ++i) { uint woIndex; /* this corresponds to the index in hDev and hGrabber */ /* Wait for the next buffer to be filled. Wait up to 5000 ms. */ isReady = Pylon.WaitObjectsWaitForAny(wos, 5000, out woIndex); if (!isReady) { /* Timeout occurred */ /* Grab Timeout occurred. */ throw new Exception("Grab timeout occurred."); } PylonGrabResult_t grabResult; /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hGrabber[woIndex], out grabResult); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result."); } /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. Since we passed more than one buffer * to the stream grabber, the remaining buffers are filled in the background while * we do the image processing. The processed buffer won't be touched by * the stream grabber until we pass it back to the stream grabber. */ /* We only use one buffer per camera */ System.Diagnostics.Debug.Assert(grabResult.hBuffer == hBuffer[woIndex]); byte pixel = buffer[woIndex].Array[0]; /* Perform processing. */ Console.WriteLine("Grabbed a frame from camera {0}.", woIndex); /* Display image */ if (woIndex < 32) { Pylon.ImageWindowDisplayImage <Byte>(woIndex, buffer[woIndex], grabResult); } } else if (grabResult.Status == EPylonGrabStatus.Failed) { /* If a buffer has been incompletely grabbed the network bandwidth is possibly insufficient for transferring * multiple images simultaneously. See note above MAX_NUM_DEVICES. */ Console.Error.WriteLine("Frame from camera {0} wasn't grabbed successfully. Error code = {1}", woIndex, grabResult.ErrorCode); } } /* Stop the image acquisition on the cameras. */ for (deviceIndex = 0; deviceIndex < numDevicesToUse; ++deviceIndex) { /* Stop the camera. */ Pylon.DeviceExecuteCommandFeature(hDev[deviceIndex], "AcquisitionStop"); } // Remove all wait objects from WaitObjects. Pylon.WaitObjectsRemoveAll(wos); Pylon.WaitObjectsDestroy(wos); for (deviceIndex = 0; deviceIndex < numDevicesToUse; ++deviceIndex) { /* We must issue a cancel call to ensure that all pending buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(hGrabber[deviceIndex]); /* The buffers can now be retrieved from the stream grabber. */ do { PylonGrabResult_t grabResult; isReady = Pylon.StreamGrabberRetrieveResult(hGrabber[deviceIndex], out grabResult); } while (isReady); /* When all buffers are retrieved from the stream grabber, they can be de-registered. * After de-registering the buffers, it is safe to free the memory. */ Pylon.StreamGrabberDeregisterBuffer(hGrabber[deviceIndex], hBuffer[deviceIndex]); buffer[deviceIndex].Dispose(); buffer[deviceIndex] = null; /* Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(hGrabber[deviceIndex]); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ /* Close the stream grabber. */ Pylon.StreamGrabberClose(hGrabber[deviceIndex]); /* Close and release the pylon device. The stream grabber becomes invalid * after closing the pylon device. Don't call stream grabber related methods after * closing or releasing the device. */ Pylon.DeviceClose(hDev[deviceIndex]); Pylon.DestroyDevice(hDev[deviceIndex]); } Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); /* Shut down the pylon runtime system. Don't call any pylon function after * calling PylonTerminate(). */ Pylon.Terminate(); } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } for (uint deviceIndex = 0; deviceIndex < MAX_NUM_DEVICES; ++deviceIndex) { try { if (hDev[deviceIndex].IsValid) { /* Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev[deviceIndex])) { Pylon.DeviceClose(hDev[deviceIndex]); } Pylon.DestroyDevice(hDev[deviceIndex]); } } catch (Exception) { /* No further handling here.*/ } } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
/* There are camera features that behave like enumerations. These features can take a value from a fixed * set of possible values. One example is the pixel format feature. This function illustrates how to deal with * enumeration features. * */ private static void demonstrateEnumFeature(PYLON_DEVICE_HANDLE hDev) { string value; /* The current value of the feature. */ bool isWritable, supportsMono8, supportsYUV422Packed, supportsMono16; /* The allowed values for an enumeration feature are represented as strings. Use the * PylonDeviceFeatureFromString() and PylonDeviceFeatureToString() methods for setting and getting * the value of an enumeration feature. */ /* Get the current value of the enumeration feature. */ value = Pylon.DeviceFeatureToString(hDev, "PixelFormat"); Console.WriteLine("PixelFormat: {0}", value); /* * For an enumeration feature, the pylon Viewer's "Feature Documentation" window lists the * names of the possible values. Some of the values might not be supported by the device. * To check if a certain "SomeValue" value for a "SomeFeature" feature can be set, call the * PylonDeviceFeatureIsAvailable() function with "EnumEntry_SomeFeature_SomeValue" as an argument. */ /* Check to see if the Mono8 pixel format can be set. */ supportsMono8 = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); Console.WriteLine("Mono8 {0} a supported value for the PixelFormat feature.", supportsMono8 ? "is" : "isn't"); /* Check to see if the YUV422Packed pixel format can be set. */ supportsYUV422Packed = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_YUV422Packed"); Console.WriteLine("YUV422Packed {0} a supported value for the PixelFormat feature.", supportsYUV422Packed ? "is" : "isn't"); /* Check to see if the Mono16 pixel format can be set. */ supportsMono16 = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono16"); Console.WriteLine("Mono16 {0} a supported value for the PixelFormat feature.", supportsMono16 ? "is" : "isn't"); /* Before writing a value, we recommend checking to see if the enumeration feature itself is * currently writable. */ isWritable = Pylon.DeviceFeatureIsWritable(hDev, "PixelFormat"); if (isWritable) { /* The PixelFormat feature is writable. Set it to one of the supported values. */ if (supportsMono16) { Console.WriteLine("Setting PixelFormat to Mono16."); Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono16"); } else if (supportsYUV422Packed) { Console.WriteLine("Setting PixelFormat to YUV422Packed."); Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "YUV422Packed"); } else if (supportsMono8) { Console.WriteLine("Setting PixelFormat to Mono8."); Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); } /* Reset the PixelFormat feature to its previous value. */ Pylon.DeviceFeatureFromString(hDev, "PixelFormat", value); } }
private void chunkGrabImageMethod() { string triggerSelectorValue = "FrameStart"; /* Preselect the trigger for image acquisition */ /* Check the available camera trigger mode(s) to select the appropriate one: * acquisition start trigger mode (used by previous cameras; * do not confuse with acquisition start command) or * frame start trigger mode (equivalent to previous acquisition start trigger mode). */ bool isAvailAcquisitionStart = Pylon.DeviceFeatureIsAvailable(_pylonDevHandle, "EnumEntry_TriggerSelector_AcquisitionStart"); bool isAvailFrameStart = Pylon.DeviceFeatureIsAvailable(_pylonDevHandle, "EnumEntry_TriggerSelector_FrameStart"); /* Check to see if the camera implements the acquisition start trigger mode only. */ bool isAcqStartTriggerModeOnly = (isAvailAcquisitionStart && !isAvailFrameStart); if (isAcqStartTriggerModeOnly) { /* Camera uses the acquisition start trigger as the only trigger mode. */ Pylon.DeviceFeatureFromString(_pylonDevHandle, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(_pylonDevHandle, "TriggerMode", "On"); triggerSelectorValue = "AcquisitionStart"; } else { /* Camera may have the acquisition start trigger mode and the frame start trigger mode implemented. * In this case, the acquisition trigger mode must be switched off. */ if (isAvailAcquisitionStart) { Pylon.DeviceFeatureFromString(_pylonDevHandle, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(_pylonDevHandle, "TriggerMode", "Off"); } /* To trigger each single frame by software or external hardware trigger: Enable the frame start trigger mode. */ Pylon.DeviceFeatureFromString(_pylonDevHandle, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(_pylonDevHandle, "TriggerMode", "On"); } /* Note: the trigger selector must be set to the appropriate trigger mode * before setting the trigger source or issuing software triggers. * Frame start trigger mode for newer cameras, acquisition start trigger mode for previous cameras. */ Pylon.DeviceFeatureFromString(_pylonDevHandle, "TriggerSelector", triggerSelectorValue); /* Enable software triggering. */ /* ... Select the software trigger as the trigger source. */ Pylon.DeviceFeatureFromString(_pylonDevHandle, "TriggerSource", "Software"); /* When using software triggering, the Continuous frame mode should be used. Once * acquisition is started, the camera sends one image each time a software trigger is * issued. */ Pylon.DeviceFeatureFromString(_pylonDevHandle, "AcquisitionMode", "Continuous"); //******************************************************************************************************** setChunkModeFeatures(); /* The data block containing the image chunk and the other chunks has a self-descriptive layout. * A chunk parser is used to extract the appended chunk data from the grabbed image frame. * Create a chunk parser. */ PYLON_CHUNKPARSER_HANDLE hChunkParser = Pylon.DeviceCreateChunkParser(_pylonDevHandle); if (!hChunkParser.IsValid) { /* The transport layer doesn't provide a chunk parser. */ throw new Exception("No chunk parser available."); } /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ var nStreams = Pylon.DeviceGetNumStreamGrabberChannels(_pylonDevHandle); if (nStreams < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ uint firstChannel = 0; PYLON_STREAMGRABBER_HANDLE hGrabber = Pylon.DeviceGetStreamGrabber(_pylonDevHandle, firstChannel); Pylon.StreamGrabberOpen(hGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ PYLON_WAITOBJECT_HANDLE hWait = Pylon.StreamGrabberGetWaitObject(hGrabber); /* Determine the required size of the grab buffer. Since activating chunks will increase the * payload size and thus the required buffer size, do this after enabling the chunks. */ uint payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(_pylonDevHandle, "PayloadSize")); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ uint NUM_BUFFERS = 2; Pylon.StreamGrabberSetMaxNumBuffer(hGrabber, NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ var buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); for (int i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber, ref buffer); buffers.Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ var index = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberQueueBuffer(hGrabber, pair.Key, index++); } /* Issue an acquisition start command. Because the trigger mode is enabled, issuing the start command * itself will not trigger any image acquisitions. Issuing the start command simply prepares the camera. * Once the camera is prepared it will acquire one image for every trigger it receives. */ Pylon.DeviceExecuteCommandFeature(_pylonDevHandle, "AcquisitionStart"); /* Trigger the first image. */ Pylon.DeviceExecuteCommandFeature(_pylonDevHandle, "TriggerSoftware"); /* Grab NUM_GRABS images. */ PylonGrabResult_t grabResult; int NUM_GRABS = 1; int nGrabs = 0; /* Counts the number of images grabbed. */ while (nGrabs < NUM_GRABS) { int bufferIndex; /* Index of the buffer. */ /* Wait for the next buffer to be filled. Wait up to 1000 ms. */ bool isReady = Pylon.WaitObjectWait(hWait, 1000); if (!isReady) { /* Timeout occurred. */ throw new Exception("Grab timeout occurred.\n"); } /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result.\n"); } nGrabs++; /* Trigger the next image. Since we passed more than one buffer to the stream grabber, * the triggered image will be grabbed while the image processing is performed. */ Pylon.DeviceExecuteCommandFeature(_pylonDevHandle, "TriggerSoftware"); /* Get the buffer index from the context information. */ bufferIndex = (int)grabResult.Context; /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* The grab is successful. */ PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. Since we also got the buffer index, * we could alternatively use an array, e.g. buffers[bufferIndex]. */ if (!buffers.TryGetValue(grabResult.hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } //Console.WriteLine("Grabbed frame {0} into buffer {1}.", nGrabs, bufferIndex); /* Check to see if we really got image data plus chunk data. */ if (grabResult.PayloadType != EPylonPayloadType.PayloadType_ChunkData) { Console.WriteLine("Received a buffer not containing chunk data?"); } else { /* Process the chunk data. This is done by passing the grabbed image buffer * to the chunk parser. When the chunk parser has processed the buffer, the chunk * data can be accessed in the same manner as "normal" camera parameters. * The only exception is the CRC checksum feature. There are dedicated functions for * checking the CRC checksum. */ bool hasCRC; /* Let the parser extract the data. */ Pylon.ChunkParserAttachBuffer(hChunkParser, buffer); /* Check the CRC checksum. */ hasCRC = Pylon.ChunkParserHasCRC(hChunkParser); bool isOk = (hasCRC) ? Pylon.ChunkParserCheckCRC(hChunkParser) : true; bool isAvail = Pylon.DeviceFeatureIsAvailable(_pylonDevHandle, "ChunkWidth"); long chunkWidth = 0, chunkHeight = 0; //Console.WriteLine("Frame {0} {1} a frame width chunk.", nGrabs, isAvail ? "contains" : "doesn't contain"); if (isAvail) { /* ... Get the value. */ chunkWidth = Pylon.DeviceGetIntegerFeature(_pylonDevHandle, "ChunkWidth"); //Console.WriteLine("Width of frame {0}: {1}.", nGrabs, chunkWidth); } /* Retrieve the chunk height value. */ /* ... Check the availability. */ isAvail = Pylon.DeviceFeatureIsAvailable(_pylonDevHandle, "ChunkHeight"); Console.WriteLine("Frame {0} {1} a frame height chunk.", nGrabs, isAvail ? "contains" : "doesn't contain"); if (isAvail) { /* ... Get the value. */ chunkHeight = Pylon.DeviceGetIntegerFeature(_pylonDevHandle, "ChunkHeight"); //Console.WriteLine("Height of frame {0}: {1}.", nGrabs, chunkHeight); } if (isOk) { _latestImage = new HImage("byte", (int)chunkWidth, (int)chunkHeight, buffer.Pointer); } } /* Before requeueing the buffer, you should detach it from the chunk parser. */ Pylon.ChunkParserDetachBuffer(hChunkParser); /* Now the chunk data in the buffer is no longer accessible. */ } else if (grabResult.Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", nGrabs, grabResult.ErrorCode); } /* Once finished with the processing, requeue the buffer to be filled again. */ Pylon.StreamGrabberQueueBuffer(hGrabber, grabResult.hBuffer, bufferIndex); } }
const uint NUM_EVENT_BUFFERS = 20; /* Number of buffers used for grabbing. */ static void Main(string[] args) { PYLON_DEVICE_HANDLE hDev = new PYLON_DEVICE_HANDLE(); /* Handle for the pylon device. */ try { uint numDevices; /* Number of available devices. */ PYLON_STREAMGRABBER_HANDLE hStreamGrabber; /* Handle for the pylon stream grabber. */ PYLON_EVENTGRABBER_HANDLE hEventGrabber; /* Handle for the event grabber used for receiving events. */ PYLON_EVENTADAPTER_HANDLE hEventAdapter; /* Handle for the event adapter used for dispatching events. */ PYLON_WAITOBJECT_HANDLE hWaitStream; /* Handle used for waiting for a grab to be finished. */ PYLON_WAITOBJECT_HANDLE hWaitEvent; /* Handle used for waiting for an event message. */ PYLON_WAITOBJECTS_HANDLE hWaitObjects; /* Container allowing waiting for multiple wait objects. */ NODEMAP_HANDLE hNodeMap; /* Handle for the node map containing the * camera parameters. */ NODE_CALLBACK_HANDLE hCallback; /* Used for deregistering a callback function. */ NODE_HANDLE hNode; /* Handle for a camera parameter. */ uint payloadSize; /* Size of an image frame in bytes. */ Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > buffers; /* Holds handles and buffers used for grabbing. */ PylonGrabResult_t grabResult; /* Stores the result of a grab operation. */ NodeCallbackHandler callbackHandler = new NodeCallbackHandler(); /* Handles incoming callbacks. */ int nGrabs; /* Counts the number of buffers grabbed. */ uint nStreams; /* The number of streams the device provides. */ bool isAvail; /* Used for checking feature availability. */ bool isReady; /* Used as an output parameter. */ int i; /* Counter. */ PylonEventResult_t eventMsg = new PylonEventResult_t(); /* Event data container. */ long sfncVersionMajor; /* The major number of the Standard Feature Naming Convention (SFNC) * version used by the camera device. */ #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the programmer's guide */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { throw new Exception("No devices found."); } /* Get a handle for the first device found. */ hDev = Pylon.CreateDeviceByIndex(0); /* Before using the device, it must be opened. Open it for configuring * parameters, for grabbing images, and for grabbing events. */ Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream | Pylon.cPylonAccessModeEvent); /* Print out the name of the camera we are using. */ { bool isReadable = Pylon.DeviceFeatureIsReadable(hDev, "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev, "DeviceModelName"); Console.WriteLine("Using camera {0}", name); } } /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); /* Disable acquisition start trigger if available. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame start trigger if available. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ Pylon.DeviceFeatureFromString(hDev, "AcquisitionMode", "Continuous"); /* For GigE cameras, we recommend increasing the packet size for better * performance. If the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", 1500); } /* Determine the required size of the grab buffer. */ payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev, "PayloadSize")); /* Determine the major number of the SFNC version used by the camera device. */ if (Pylon.DeviceFeatureIsAvailable(hDev, "DeviceSFNCVersionMajor")) { sfncVersionMajor = Pylon.DeviceGetIntegerFeature(hDev, "DeviceSFNCVersionMajor"); } else { /* No SFNC version information is provided by the camera device. */ sfncVersionMajor = 0; } /* Enable camera events. */ /* Select the end-of-exposure event.*/ Pylon.DeviceFeatureFromString(hDev, "EventSelector", "ExposureEnd"); /* Enable the event. Select the enumeration entry name depending on the SFNC version used by the camera device. */ if (sfncVersionMajor >= 2) { Pylon.DeviceFeatureFromString(hDev, "EventNotification", "On"); } else { Pylon.DeviceFeatureFromString(hDev, "EventNotification", "GenICamEvent"); } /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams = Pylon.DeviceGetNumStreamGrabberChannels(hDev); if (nStreams < 1) { throw new Exception("The transport layer doesn't support image streams"); } /* Create and open a stream grabber for the first channel. */ hStreamGrabber = Pylon.DeviceGetStreamGrabber(hDev, 0); Pylon.StreamGrabberOpen(hStreamGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be grabbed. */ hWaitStream = Pylon.StreamGrabberGetWaitObject(hStreamGrabber); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hStreamGrabber, NUM_IMAGE_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hStreamGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hStreamGrabber); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer object pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); for (i = 0; i < NUM_IMAGE_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hStreamGrabber, ref buffer); buffers.Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberQueueBuffer(hStreamGrabber, pair.Key, i++); } /* The stream grabber is now prepared. As soon the camera starts to acquire images, * the image data will be grabbed into the provided buffers. */ /* Create and prepare an event grabber. */ /* ... Get a handle for the event grabber. */ hEventGrabber = Pylon.DeviceGetEventGrabber(hDev); if (!hEventGrabber.IsValid) { /* The transport layer doesn't support event grabbers. */ throw new Exception("No event grabber supported."); } /* ... Tell the grabber how many buffers to use. */ Pylon.EventGrabberSetNumBuffers(hEventGrabber, NUM_EVENT_BUFFERS); /* ... Open the event grabber. */ Pylon.EventGrabberOpen(hEventGrabber); /* The event grabber is now ready * for receiving events. */ /* Retrieve the wait object that is associated with the event grabber. The event * will be signaled when an event message has been received. */ hWaitEvent = Pylon.EventGrabberGetWaitObject(hEventGrabber); /* For extracting the event data from an event message, an event adapter is used. */ hEventAdapter = Pylon.DeviceCreateEventAdapter(hDev); if (!hEventAdapter.IsValid) { /* The transport layer doesn't support event grabbers. */ throw new Exception("No event adapter supported."); } /* Register the callback function for the ExposureEndEventFrameID parameter. */ /*.Get the node map containing all parameters. */ hNodeMap = Pylon.DeviceGetNodeMap(hDev); /* Get the ExposureEndEventFrameID parameter. * Select the parameter name depending on the SFNC version used by the camera device. */ if (sfncVersionMajor >= 2) { hNode = GenApi.NodeMapGetNode(hNodeMap, "EventExposureEndFrameID"); } else { hNode = GenApi.NodeMapGetNode(hNodeMap, "ExposureEndEventFrameID"); } if (!hNode.IsValid) { /* There is no ExposureEndEventFrameID parameter. */ throw new Exception("There is no ExposureEndEventFrameID or EventExposureEndFrameID parameter!"); } /* ... Register the callback function. */ callbackHandler.CallbackEvent += new NodeCallbackHandler.NodeCallback(endOfExposureCallback); hCallback = GenApi.NodeRegisterCallback(hNode, callbackHandler); /* Put the wait objects into a container. */ /* ... Create the container. */ hWaitObjects = Pylon.WaitObjectsCreate(); /* ... Add the wait objects' handles. */ Pylon.WaitObjectsAdd(hWaitObjects, hWaitEvent); Pylon.WaitObjectsAdd(hWaitObjects, hWaitStream); /* Let the camera acquire images. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStart"); /* Grab NUM_GRABS images. */ nGrabs = 0; /* Counts the number of images grabbed. */ while (nGrabs < NUM_GRABS) { int bufferIndex; /* Index of the buffer. */ uint waitObjectIndex; /* Index of the wait object that is signalled.*/ Byte min, max; /* Wait for either an image buffer grabbed or an event received. Wait up to 1000 ms. */ isReady = Pylon.WaitObjectsWaitForAny(hWaitObjects, 1000, out waitObjectIndex); if (!isReady) { /* Timeout occurred. */ throw new Exception("Timeout. Neither grabbed an image nor received an event."); } if (0 == waitObjectIndex) { /* hWaitEvent has been signalled. At least one event message is available. Retrieve it. */ isReady = Pylon.EventGrabberRetrieveEvent(hEventGrabber, ref eventMsg); if (!isReady) { /* Oops. No event message available? We should never have reached this point. * Since the wait operation above returned without a timeout, an event message * should be available. */ throw new Exception("Failed to retrieve an event."); } /* Check to see if the event was successfully received. */ if (0 == eventMsg.ErrorCode) { /* Successfully received an event message. */ /* Pass the event message to the event adapter. The event adapter will * update the parameters related to events and will fire the callbacks * registered to event related parameters. */ Pylon.EventAdapterDeliverMessage(hEventAdapter, eventMsg); } else { Console.Error.WriteLine("Error when receiving an event: {1}", eventMsg.ErrorCode); } } else if (1 == waitObjectIndex) { /* hWaitStream has been signalled. The result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hStreamGrabber, out grabResult); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result."); } nGrabs++; /* Get the buffer index from the context information. */ bufferIndex = (int)grabResult.Context; /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. Since we passed more than one buffer * to the stream grabber, the remaining buffers are filled while * we do the image processing. The processed buffer won't be touched by * the stream grabber until we pass it back to the stream grabber. */ PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. Since we also got the buffer index, * we could alternatively use an array, e.g. buffers[bufferIndex]. */ if (!buffers.TryGetValue(grabResult.hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } getMinMax(buffer.Array, out min, out max); Console.WriteLine("Grabbed frame {0} into buffer {1}. Min. gray value = {2}, Max. gray value = {3}", nGrabs, bufferIndex, min, max); } else if (grabResult.Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", nGrabs, grabResult.ErrorCode); } /* Once finished with the processing, requeue the buffer to be filled again. */ Pylon.StreamGrabberQueueBuffer(hStreamGrabber, grabResult.hBuffer, bufferIndex); } } /* Clean up. */ /* ... Stop the camera. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStop"); /* ... Switch off the events. */ Pylon.DeviceFeatureFromString(hDev, "EventSelector", "ExposureEnd"); Pylon.DeviceFeatureFromString(hDev, "EventNotification", "Off"); /* ... We must issue a cancel call to ensure that all pending buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(hStreamGrabber); /* ... The buffers can now be retrieved from the stream grabber. */ do { isReady = Pylon.StreamGrabberRetrieveResult(hStreamGrabber, out grabResult); } while (isReady); /* ... When all buffers are retrieved from the stream grabber, they can be deregistered. * After deregistering the buffers, it is safe to free the memory. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberDeregisterBuffer(hStreamGrabber, pair.Key); pair.Value.Dispose(); } buffers = null; /* ... Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(hStreamGrabber); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ /* ... Close the stream grabber. */ Pylon.StreamGrabberClose(hStreamGrabber); /* ... Deregister the callback. */ GenApi.NodeDeregisterCallback(hNode, hCallback); /* ... Close the event grabber.*/ Pylon.EventGrabberClose(hEventGrabber); /* ... Release the event adapter. */ Pylon.DeviceDestroyEventAdapter(hDev, hEventAdapter); /* ... Release the wait object container. */ Pylon.WaitObjectsDestroy(hWaitObjects); /* ... Close and release the pylon device. The stream grabber becomes invalid * after closing the pylon device. Don't call stream grabber related methods after * closing or releasing the device. */ Pylon.DeviceClose(hDev); Pylon.DestroyDevice(hDev); /* ... Shut down the pylon runtime system. Don't call any pylon method after * calling PylonTerminate(). */ Pylon.Terminate(); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } try { if (hDev.IsValid) { /* Disable the software trigger. */ Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev)) { Pylon.DeviceClose(hDev); } Pylon.DestroyDevice(hDev); } } catch (Exception) { /*No further handling here.*/ } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
/* Open using device.*/ public void Open(PYLON_DEVICE_HANDLE device) { try { /* Use provided device. */ m_hDevice = device; /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(m_hDevice, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Register the callback function. */ m_hRemovalCallback = Pylon.DeviceRegisterRemovalCallback(m_hDevice, m_callbackHandler); /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ if (Pylon.DeviceFeatureIsWritable(m_hDevice, "GevSCPSPacketSize")) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(m_hDevice, "GevSCPSPacketSize", 1500); } /* The sample does not work in chunk mode. It must be disabled. */ if (Pylon.DeviceFeatureIsWritable(m_hDevice, "ChunkModeActive")) { /* Disable the chunk mode. */ Pylon.DeviceSetBooleanFeature(m_hDevice, "ChunkModeActive", false); } /* Disable acquisition start trigger if available. */ if (Pylon.DeviceFeatureIsAvailable(m_hDevice, "EnumEntry_TriggerSelector_AcquisitionStart")) { Pylon.DeviceFeatureFromString(m_hDevice, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(m_hDevice, "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ if (Pylon.DeviceFeatureIsAvailable(m_hDevice, "EnumEntry_TriggerSelector_FrameBurstStart")) { Pylon.DeviceFeatureFromString(m_hDevice, "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(m_hDevice, "TriggerMode", "Off"); } /* Disable frame start trigger if available. */ if (Pylon.DeviceFeatureIsAvailable(m_hDevice, "EnumEntry_TriggerSelector_FrameStart")) { Pylon.DeviceFeatureFromString(m_hDevice, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(m_hDevice, "TriggerMode", "Off"); } /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ if (Pylon.DeviceGetNumStreamGrabberChannels(m_hDevice) < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ m_hGrabber = Pylon.DeviceGetStreamGrabber(m_hDevice, 0); Pylon.StreamGrabberOpen(m_hGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for m_buffers to be filled with grabbed data. */ m_hWait = Pylon.StreamGrabberGetWaitObject(m_hGrabber); } catch { /* Get the last error message here, because it could be overwritten by cleaning up. */ UpdateLastError(); try { Close(); /* Try to close any open handles. */ } catch { /* Another exception cannot be handled. */ } throw; } /* Notify that the ImageProvider is open and ready for grabbing and configuration. */ OnDeviceOpenedEvent(); }
private void initialize() { _initSuccess = true; /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ _numDevices = Pylon.EnumerateDevices(); if (0 == _numDevices) { _initSuccess = false; notifyError("No devices found!"); } /* Get a handle for the first device found. */ _hDev = Pylon.CreateDeviceByIndex(0); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(_hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(_hDev, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ _initSuccess = false; notifyError("Device doesn't support the Mono8 pixel format."); } else { /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(_hDev, "PixelFormat", "Mono8"); } /* For GigE cameras, we recommend increasing the packet size for better * performance. If the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(_hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(_hDev, "GevSCPSPacketSize", 1500); } setStreamGrabber(); //Free Run Settings //setFreeRunParams(); setPEGParams(_PEGX, _PEGY); prepareDevice(); setStatus(GrabInstruction.Initialize, GrabStage.Connected, GrabState.Idle); notifyStateChange(_bgworker, new GrabImageStatusChangedEventArgs() { Status = this.Status }); }
private void ContinuousGrab() { try { lock (dev) { // prepare grabbing, see pylon C.NET docs Pylon.DeviceOpen(dev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); Pylon.DeviceFeatureFromString(dev, "AcquisitionMode", "Continuous"); uint payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(dev, "PayloadSize")); uint nStreams = Pylon.DeviceGetNumStreamGrabberChannels(dev); hGrabber = Pylon.DeviceGetStreamGrabber(dev, 0); Pylon.StreamGrabberOpen(hGrabber); hWait = Pylon.StreamGrabberGetWaitObject(hGrabber); Pylon.StreamGrabberSetMaxNumBuffer(hGrabber, NUM_BUFFERS); Pylon.StreamGrabberSetMaxBufferSize(hGrabber, payloadSize); Pylon.StreamGrabberPrepareGrab(hGrabber); var buffers = new PylonBuffer <Byte> [NUM_BUFFERS]; var handles = new PYLON_STREAMBUFFER_HANDLE[NUM_BUFFERS]; for (int i = 0; i < NUM_BUFFERS; i++) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber, ref buffer); Pylon.StreamGrabberQueueBuffer(hGrabber, handle, i); handles[i] = handle; buffers[i] = buffer; } Pylon.DeviceExecuteCommandFeature(dev, "AcquisitionStart"); PylonGrabResult_t grabResult; bool isReady; while (grabbing) { int bufferIndex; isReady = Pylon.WaitObjectWait(hWait, 1000); if (!isReady) { throw new Exception("Grab timeout"); } // isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); if (!isReady) { throw new Exception("Failed to retrieve a grab result"); } bufferIndex = (int)grabResult.Context; if (grabResult.Status == EPylonGrabStatus.Grabbed) { PylonBuffer <Byte> buffer = buffers[bufferIndex]; OnGrabAndDispose(GrabResultToImage(grabResult, buffer)); //Thread.Sleep(10); } else if (grabResult.Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Error grabbing. Error Code = {0}", grabResult.ErrorCode); } Pylon.StreamGrabberQueueBuffer(hGrabber, grabResult.hBuffer, bufferIndex); } Pylon.DeviceExecuteCommandFeature(dev, "AcquisitionStop"); Pylon.StreamGrabberCancelGrab(hGrabber); do { isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); } while (isReady); for (int i = 0; i < NUM_BUFFERS; i++) { Pylon.StreamGrabberDeregisterBuffer(hGrabber, handles[i]); buffers[i].Dispose(); } Pylon.StreamGrabberFinishGrab(hGrabber); Pylon.StreamGrabberClose(hGrabber); Pylon.DeviceClose(dev); } } catch { Console.WriteLine(GenApi.GetLastErrorDetail()); } }
public void cameraInit(string cameraType) { //Initiates camera to run in 12 bit mode with continuous acquisition. int i = 0; uint NUM_BUFFERS = 10; if (cameraType == "basler") { Pylon.Initialize(); numDevices = Pylon.EnumerateDevices(); if (ConfigurationManager.AppSettings.Get("ipAddress").Length > 0) { while (ip != ConfigurationManager.AppSettings.Get("ipAddress")) { hDev = Pylon.CreateDeviceByIndex(j); prop = Pylon.DeviceGetDeviceInfoHandle(hDev); ip = Pylon.DeviceInfoGetPropertyValueByIndex(prop, 8); j++; if (j > numDevices) { break; } } } else { numDevices = Pylon.EnumerateDevices(); hDev = Pylon.CreateDeviceByIndex(0); } try { Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); } catch (System.Threading.ThreadAbortException) { } catch (Exception ex) { EmailError.emailAlert(ex); throw (ex); } isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono12"); Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono12"); Pylon.DeviceFeatureFromString(hDev, "AcquisitionMode", "Continuous"); Pylon.DeviceSetIntegerFeature(hDev, "Height", 1); Pylon.DeviceSetFloatFeature(hDev, "ExposureTimeAbs", exp); //Exposure time is in microseconds and rounded to the closest 100 ns. isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", 1500); } payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev, "PayloadSize")); nStreams = Pylon.DeviceGetNumStreamGrabberChannels(hDev); hGrabber = Pylon.DeviceGetStreamGrabber(hDev, 0); Pylon.StreamGrabberOpen(hGrabber); hWait = Pylon.StreamGrabberGetWaitObject(hGrabber); Pylon.StreamGrabberSetMaxNumBuffer(hGrabber, NUM_BUFFERS); Pylon.StreamGrabberSetMaxBufferSize(hGrabber, payloadSize); Pylon.StreamGrabberPrepareGrab(hGrabber); buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); for (i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber, ref buffer); buffers.Add(handle, buffer); } i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberQueueBuffer(hGrabber, pair.Key, i++); } Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStart"); } }
/* Prepares everything for grabbing. */ protected void SetupGrab() { /* Clear the grab result queue. This is not done when cleaning up to still be able to provide the * images, e.g. in single frame mode.*/ lock (m_lockObject) /* Lock the grab result queue to avoid that two threads modify the same data. */ { m_grabbedBuffers.Clear(); } /* Set the acquisition mode */ if (m_grabOnce) { /* We will use the single frame mode, to take one image. */ Pylon.DeviceFeatureFromString(m_hDevice, "AcquisitionMode", "SingleFrame"); } else { /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ Pylon.DeviceFeatureFromString(m_hDevice, "AcquisitionMode", "Continuous"); } /* Clear the grab buffers to assure proper operation (because they may * still be filled if the last grab has thrown an exception). */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in m_buffers) { pair.Value.Dispose(); } m_buffers.Clear(); /* Determine the required size of the grab buffer. */ uint payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(m_hDevice, "PayloadSize")); /* We must tell the stream grabber the number and size of the m_buffers * we are using. */ /* .. We will not use more than NUM_m_buffers for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(m_hGrabber, m_numberOfBuffersUsed); /* .. We will not use m_buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(m_hGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(m_hGrabber); /* Before using the m_buffers for grabbing, they must be registered at * the stream grabber. For each buffer registered, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ for (uint i = 0; i < m_numberOfBuffersUsed; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(m_hGrabber, ref buffer); m_buffers.Add(handle, buffer); } /* Feed the m_buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in m_buffers) { Pylon.StreamGrabberQueueBuffer(m_hGrabber, pair.Key, 0); } /* The stream grabber is now prepared. As soon the camera starts acquiring images, * the image data will be grabbed into the provided m_buffers. */ /* Set the handle of the image converter invalid to assure proper operation (because it may * still be valid if the last grab has thrown an exception). */ m_hConverter.SetInvalid(); /* Let the camera acquire images. */ Pylon.DeviceExecuteCommandFeature(m_hDevice, "AcquisitionStart"); }
public bool Initialize(int _ID, string _DeviceID) { bool _Result = false; if (0 == AvailableDeviceCount) { return(false); } if (_ID >= AvailableDeviceCount) { return(false); } CameraNumber = _ID; for (int iLoopCount = 0; iLoopCount < AvailableDeviceCount; ++iLoopCount) { try { DeviceHandle = new PYLON_DEVICE_HANDLE(); DeviceHandle = Pylon.CreateDeviceByIndex((uint)iLoopCount); Pylon.DeviceOpen(DeviceHandle, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); IsAvailable = Pylon.DeviceFeatureIsAvailable(DeviceHandle, "EnumEntry_PixelFormat_Mono8"); if (false == IsAvailable) { DestroyDeviceHandle(); return(false); } string _DeviceIDTemp = Pylon.DeviceFeatureToString(DeviceHandle, "DeviceID"); if (_DeviceID != _DeviceIDTemp) { DestroyDeviceHandle(); continue; } Pylon.DeviceFeatureFromString(DeviceHandle, "PixelFormat", "Mono8"); IsAvailable = Pylon.DeviceFeatureIsAvailable(DeviceHandle, "EnumEntry_TriggerSelector_AcquisitionStart"); if (false == IsAvailable) { DestroyDeviceHandle(); return(false); } Pylon.DeviceFeatureFromString(DeviceHandle, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(DeviceHandle, "TriggerMode", "Off"); _Result = true; break; } catch { CLogManager.AddInspectionLog(CLogManager.LOG_TYPE.ERR, "CBaslerManager Initialize Exception!!", CLogManager.LOG_LEVEL.LOW); _Result = false; } } if (false == _Result) { return(false); } ThreadContinuousGrab = new Thread(ThreadContinuousGrabFunc); ThreadContinuousGrab.IsBackground = true; IsThreadContinuousGrabExit = false; IsThreadContinuousGrabTrigger = false; ThreadContinuousGrab.Start(); PauseEvent.Reset(); return(true); }