/// <summary> /// Configure device and report frame format that will be used during streaming. /// This method must return a proper ImageDescriptor so we can pre-allocate buffers. /// </summary> public ImageDescriptor Prepare() { Open(); if (deviceHandle == null || !deviceHandle.IsValid) { return(ImageDescriptor.Invalid); } firstOpen = false; // Get the configured framerate for recording support. resultingFramerate = PylonHelper.GetResultingFramerate(deviceHandle); SpecificInfo specific = summary.Specific as SpecificInfo; string streamFormatSymbol = specific.StreamFormat; bool hasWidth = Pylon.DeviceFeatureIsReadable(deviceHandle, "Width"); bool hasHeight = Pylon.DeviceFeatureIsReadable(deviceHandle, "Height"); bool hasPixelFormat = Pylon.DeviceFeatureIsReadable(deviceHandle, "PixelFormat"); bool canComputeImageDescriptor = hasWidth && hasHeight && hasPixelFormat; if (!canComputeImageDescriptor) { return(ImageDescriptor.Invalid); } int width = (int)Pylon.DeviceGetIntegerFeature(deviceHandle, "Width"); int height = (int)Pylon.DeviceGetIntegerFeature(deviceHandle, "Height"); string pixelFormat = Pylon.DeviceFeatureToString(deviceHandle, "PixelFormat"); EPylonPixelType pixelType = Pylon.PixelTypeFromString(pixelFormat); if (pixelType == EPylonPixelType.PixelType_Undefined) { return(ImageDescriptor.Invalid); } // Note: the image provider will perform the Bayer conversion itself and only output two formats. // - Y800 for anything monochrome. // - RGB32 for anything color. imageProvider.SetDebayering(specific.Bayer8Conversion); bool isBayer = Pylon.IsBayer(pixelType); bool isBayer8 = PylonHelper.IsBayer8(pixelType); bool bayerColor = (isBayer && !isBayer8) || (isBayer8 && specific.Bayer8Conversion == Bayer8Conversion.Color); bool color = !Pylon.IsMono(pixelType) || bayerColor; ImageFormat format = color ? ImageFormat.RGB32 : ImageFormat.Y800; finishline.Prepare(width, height, format, resultingFramerate); if (finishline.Enabled) { height = finishline.Height; resultingFramerate = finishline.ResultingFramerate; } int bufferSize = ImageFormatHelper.ComputeBufferSize(width, height, format); bool topDown = true; return(new ImageDescriptor(format, width, height, topDown, bufferSize)); }
/* Close the device */ public void Close() { /* Notify that ImageProvider is about to close the device to give other objects the chance to do clean up operations. */ OnDeviceClosingEvent(); /* Try to close everything even if exceptions occur. Keep the last exception to throw when it is done. */ Exception lastException = null; /* Reset the removed flag. */ m_removed = false; if (m_hGrabber.IsValid) { /* Try to close the stream grabber. */ try { Pylon.StreamGrabberClose(m_hGrabber); } catch (Exception e) { lastException = e; UpdateLastError(); } } if (m_hDevice.IsValid) { /* Try to deregister the removal callback. */ try { if (m_hRemovalCallback.IsValid) { Pylon.DeviceDeregisterRemovalCallback(m_hDevice, m_hRemovalCallback); } } catch (Exception e) { lastException = e; UpdateLastError(); } /* Try to close the device. */ try { /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(m_hDevice)) { Pylon.DeviceClose(m_hDevice); } } catch (Exception e) { lastException = e; UpdateLastError(); } /* Try to destroy the device. */ try { Pylon.DestroyDevice(m_hDevice); } catch (Exception e) { lastException = e; UpdateLastError(); } } m_hGrabber.SetInvalid(); m_hRemovalCallback.SetInvalid(); m_hDevice.SetInvalid(); /* Notify that ImageProvider is now closed.*/ OnDeviceClosedEvent(); /* If an exception occurred throw it. */ if (lastException != null) { throw lastException; } }
/* Prepares everything for grabbing. */ protected void SetupGrab() { /* Clear the grab result queue. This is not done when cleaning up to still be able to provide the * images, e.g. in single frame mode.*/ lock (m_lockObject) /* Lock the grab result queue to avoid that two threads modify the same data. */ { m_grabbedBuffers.Clear(); } /* Set the acquisition mode */ if (m_grabOnce) { /* We will use the single frame mode, to take one image. */ Pylon.DeviceFeatureFromString(m_hDevice, "AcquisitionMode", "SingleFrame"); } else { /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ Pylon.DeviceFeatureFromString(m_hDevice, "AcquisitionMode", "Continuous"); } /* Clear the grab buffers to assure proper operation (because they may * still be filled if the last grab has thrown an exception). */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in m_buffers) { pair.Value.Dispose(); } m_buffers.Clear(); /* Determine the required size of the grab buffer. */ uint payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(m_hDevice, "PayloadSize")); /* We must tell the stream grabber the number and size of the m_buffers * we are using. */ /* .. We will not use more than NUM_m_buffers for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(m_hGrabber, m_numberOfBuffersUsed); /* .. We will not use m_buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(m_hGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(m_hGrabber); /* Before using the m_buffers for grabbing, they must be registered at * the stream grabber. For each buffer registered, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ for (uint i = 0; i < m_numberOfBuffersUsed; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(m_hGrabber, ref buffer); m_buffers.Add(handle, buffer); } /* Feed the m_buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in m_buffers) { Pylon.StreamGrabberQueueBuffer(m_hGrabber, pair.Key, 0); } /* The stream grabber is now prepared. As soon the camera starts acquiring images, * the image data will be grabbed into the provided m_buffers. */ /* Set the handle of the image converter invalid to assure proper operation (because it may * still be valid if the last grab has thrown an exception). */ m_hConverter.SetInvalid(); /* Start the image acquisition engine. */ Pylon.StreamGrabberStartStreamingIfMandatory(m_hGrabber); /* Let the camera acquire images. */ Pylon.DeviceExecuteCommandFeature(m_hDevice, "AcquisitionStart"); }
/* Enumerate all possible entries for an enumerated feature. For every entry, a selection * of properties is displayed. A loop similar to the one shown below may be part of a * GUI program that wants to fill the entries of a menu. */ private static void demonstrateEnumIteration(PYLON_DEVICE_HANDLE hDev) { string featureName = "PixelFormat"; NODEMAP_HANDLE hNodeMap; NODE_HANDLE hNode; EGenApiNodeType nodeType; bool bval; /* Get a handle for the device's node map. */ hNodeMap = Pylon.DeviceGetNodeMap(hDev); /* Look up the feature node. */ hNode = GenApi.NodeMapGetNode(hNodeMap, featureName); if (!hNode.IsValid) { Console.WriteLine("There is no feature named '" + featureName + "'."); return; } /* We want an enumeration feature node. */ nodeType = GenApi.NodeGetType(hNode); if (EGenApiNodeType.EnumerationNode != nodeType) { Console.WriteLine("'" + featureName + "' is not an enumeration feature."); return; } /* Check to see if the feature is readable. */ bval = GenApi.NodeIsReadable(hNode); if (bval) { uint max, i; /* Check entries. */ max = GenApi.EnumerationGetNumEntries(hNode); /* Write out header. */ Console.WriteLine("Allowed values for feature '{0}':\n" + "--------------", featureName); /* A loop to visit every enumeration entry node once. */ for (i = 0; i < max; i++) { NODE_HANDLE hEntry; string name, displayName, description; bool avail; /* Get handle for enumeration entry node. */ hEntry = GenApi.EnumerationGetEntryByIndex(hNode, i); /* Get node name. */ name = GenApi.NodeGetName(hEntry); /* Get display name. */ displayName = GenApi.NodeGetDisplayName(hEntry); /* Get description. */ description = GenApi.NodeGetDescription(hEntry); /* Get availability. */ avail = GenApi.NodeIsAvailable(hEntry); /* Write out results. */ Console.WriteLine("Node name: {0}\n" + "Display name: {1}\n" + "Description: {2}\n" + "Available: {3}\n" + "--------------", name, displayName, description, avail ? "yes" : "no"); } } else { Console.WriteLine("Cannot read feature '{0}' - node not readable.", featureName); } }
static void Main(string[] args) { PYLON_DEVICE_HANDLE hDev = new PYLON_DEVICE_HANDLE(); /* Handle for the pylon device. */ try { uint numDevices; /* Number of devices available. */ #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the programmer's guide. */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * Pylon.EnumerateDevices() before creating a device. */ numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { throw new Exception("No devices found."); } /* Get a handle for the first device found. */ hDev = Pylon.CreateDeviceByIndex(0); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Print out the name of the camera we are using. */ { bool isReadable; isReadable = Pylon.DeviceFeatureIsReadable(hDev, "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev, "DeviceModelName"); Console.WriteLine("Using camera {0}", name); } } /* Demonstrate how to check the accessibility of a feature. */ demonstrateAccessibilityCheck(hDev); Console.WriteLine(""); /* Demonstrate how to handle integer camera parameters. */ demonstrateIntFeature(hDev); Console.WriteLine(""); /* Demonstrate how to handle floating point camera parameters. */ demonstrateFloatFeature(hDev); Console.WriteLine(""); /* Demonstrate how to handle boolean camera parameters. */ demonstrateBooleanFeature(hDev); Console.WriteLine(""); /* Each feature can be read as a string and also set as a string. */ demonstrateFromStringToString(hDev); Console.WriteLine(""); /* Demonstrate how to handle enumeration camera parameters. */ demonstrateEnumFeature(hDev); Console.WriteLine(""); /* Demonstrate how to iterate enumeration entries. */ demonstrateEnumIteration(hDev); Console.WriteLine(""); /* Demonstrate how to execute actions. */ demonstrateCommandFeature(hDev); Console.WriteLine(""); /* Display category nodes. */ demonstrateCategory(hDev); /* Clean up. Close and release the pylon device. */ Pylon.DeviceClose(hDev); Pylon.DestroyDevice(hDev); /* Shut down the pylon runtime system. Don't call any pylon method after * calling Pylon.Terminate(). */ Pylon.Terminate(); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg.Length > 0) { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } try { if (hDev.IsValid) { /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev)) { Pylon.DeviceClose(hDev); } Pylon.DestroyDevice(hDev); } } catch (Exception) { /*No further handling here.*/ } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
/* This function demonstrates how to handle integer camera parameters. */ private static void demonstrateIntFeature(PYLON_DEVICE_HANDLE hDev) { NODEMAP_HANDLE hNodeMap; NODE_HANDLE hNode; string featureName = "Width"; /* Name of the feature used in this sample: AOI Width. */ long val, min, max, incr; /* Properties of the feature. */ EGenApiNodeType nodeType; bool bval; /* Get a handle for the device's node map. */ hNodeMap = Pylon.DeviceGetNodeMap(hDev); /* Look up the feature node. */ hNode = GenApi.NodeMapGetNode(hNodeMap, featureName); if (!hNode.IsValid) { Console.WriteLine("There is no feature named '" + featureName + "'."); return; } /* We want an integer feature node. */ nodeType = GenApi.NodeGetType(hNode); if (EGenApiNodeType.IntegerNode != nodeType) { Console.WriteLine("'" + featureName + "' is not an integer feature."); return; } /* * Query the current value, the range of allowed values, and the increment of the feature. * For some integer features, you are not allowed to set every value within the * value range. For example, for some cameras the Width parameter must be a multiple * of 2. These constraints are expressed by the increment value. Valid values * follow the rule: val >= min && val <= max && val == min + n * inc. */ bval = GenApi.NodeIsReadable(hNode); if (bval) { min = GenApi.IntegerGetMin(hNode); /* Get the minimum value. */ max = GenApi.IntegerGetMax(hNode); /* Get the maximum value. */ incr = GenApi.IntegerGetInc(hNode); /* Get the increment value. */ val = GenApi.IntegerGetValue(hNode); /* Get the current value. */ Console.WriteLine("{0}: min= {1} max= {2} incr={3} Value={4}", featureName, min, max, incr, val); bval = GenApi.NodeIsWritable(hNode); if (bval) { /* Set the Width parameter half-way between minimum and maximum. */ val = min + (max - min) / incr / 2 * incr; Console.WriteLine("Setting {0} to {1}", featureName, val); GenApi.IntegerSetValue(hNode, val); } else { Console.WriteLine("Cannot set value for feature '{0}' - node not writable.", featureName); } } else { Console.WriteLine("Cannot read feature '{0}' - node not readable.", featureName); } }
/* * Regardless of the parameter's type, any parameter value can be retrieved as a string. Likewise, each parameter * can be set by passing in a string. This function illustrates how to set and get the * Width parameter as a string. As demonstrated above, the Width parameter is of the integer type. */ private static void demonstrateFromStringToString(PYLON_DEVICE_HANDLE hDev) { string featureName = "Width"; /* The name of the feature. */ NODEMAP_HANDLE hNodeMap; NODE_HANDLE hNode; EGenApiNodeType nodeType; bool bval; /* Get a handle for the device's node map. */ hNodeMap = Pylon.DeviceGetNodeMap(hDev); /* Look up the feature node. */ hNode = GenApi.NodeMapGetNode(hNodeMap, featureName); if (!hNode.IsValid) { Console.WriteLine("There is no feature named '" + featureName + "'."); return; } /* We want an integer feature node. */ nodeType = GenApi.NodeGetType(hNode); if (EGenApiNodeType.IntegerNode != nodeType) { Console.WriteLine("'" + featureName + "' is not an integer feature."); return; } /* Check to see if the feature is readable. */ bval = GenApi.NodeIsReadable(hNode); if (bval) { string valueString; /* Get the value of a feature as a string. */ valueString = GenApi.NodeToString(hNode); Console.WriteLine("{0}: value string = {1}", featureName, valueString); /* A feature can be set as a string using the GenApi.NodeFromString() function. * If the content of a string can not be converted to the type of the feature, an * error is returned. */ bval = GenApi.NodeIsWritable(hNode); if (bval) { try { GenApi.NodeFromString(hNode, "fourty-two"); /* Can not be converted to an integer. */ } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.WriteLine("Exception caught:"); Console.WriteLine(e.Message); if (msg != "\n") { Console.WriteLine("Last error message:"); Console.WriteLine(msg); } } } else { Console.WriteLine("Cannot set value for feature '{0}' - node not writable.", featureName); } } else { Console.WriteLine("Cannot read feature '{0}' - node not readable.", featureName); } }
/// <summary> /// The main entry point for the application. /// </summary> static void Main() { /* Use a random number as the device key. */ uint DeviceKey = (uint)(new Random()).Next(int.MaxValue); /* In this sample all cameras belong to the same group. */ const uint GroupKey = 0x24; PYLON_DEVICE_HANDLE[] hDev = new PYLON_DEVICE_HANDLE[MAX_NUM_DEVICES]; /* Handles for the pylon devices. */ for (int deviceIndex = 0; deviceIndex < MAX_NUM_DEVICES; ++deviceIndex) { hDev[deviceIndex] = new PYLON_DEVICE_HANDLE(); } try { uint numDevicesEnumerated; /* Number of the devices connected to this PC. */ uint numDevicesToUse; /* Number of the devices to use in this sample. */ bool isAvail; /* Used for checking feature availability. */ bool isReady; /* Used as an output parameter. */ int i; /* Counter. */ uint deviceIndex; /* Index of device used in the following variables. */ PYLON_WAITOBJECTS_HANDLE wos; /* Wait objects. */ /* These are camera specific variables: */ PYLON_STREAMGRABBER_HANDLE[] hGrabber = new PYLON_STREAMGRABBER_HANDLE[MAX_NUM_DEVICES]; /* Handle for the pylon stream grabber. */ PYLON_WAITOBJECT_HANDLE[] hWait = new PYLON_WAITOBJECT_HANDLE[MAX_NUM_DEVICES]; /* Handle used for waiting for a grab to be finished. */ uint[] payloadSize = new uint[MAX_NUM_DEVICES]; /* Size of an image frame in bytes. */ uint[] nStreams = new uint[MAX_NUM_DEVICES]; /* The number of streams provided by the device. */ PYLON_STREAMBUFFER_HANDLE[] hBuffer = new PYLON_STREAMBUFFER_HANDLE[MAX_NUM_DEVICES]; PylonBuffer <Byte>[] buffer = new PylonBuffer <Byte> [MAX_NUM_DEVICES]; #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the Programmer's Guide. */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevicesEnumerated = Pylon.EnumerateDevices(); if (numDevicesEnumerated == 0) { Pylon.Terminate(); Console.Error.WriteLine("No devices found!"); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); return; } /* Create wait objects. This must be done outside of the loop. */ wos = Pylon.WaitObjectsCreate(); /* Open cameras and set parameter */ deviceIndex = 0; for (uint enumeratedDeviceIndex = 0; enumeratedDeviceIndex < numDevicesEnumerated; ++enumeratedDeviceIndex) { /* only open GigE devices */ PYLON_DEVICE_INFO_HANDLE hDI = Pylon.GetDeviceInfoHandle(enumeratedDeviceIndex); if (Pylon.DeviceInfoGetPropertyValueByName(hDI, Pylon.cPylonDeviceInfoDeviceClassKey) != "BaslerGigE") { continue; } /* Get handles for the devices. */ hDev[deviceIndex] = Pylon.CreateDeviceByIndex((uint)enumeratedDeviceIndex); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev[deviceIndex], Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Print out the name of the camera we are using. */ Console.WriteLine("Using camera '{0}'", Pylon.DeviceInfoGetPropertyValueByName(hDI, Pylon.cPylonDeviceInfoModelNameKey)); isAvail = Pylon.DeviceFeatureIsReadable(hDev[deviceIndex], "ActionControl"); if (!isAvail) { throw new Exception("Device doesn't support the Action Command"); } /* Configure the first action */ Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "ActionSelector", 1); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "ActionDeviceKey", DeviceKey); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "ActionGroupKey", GroupKey); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "ActionGroupMask", AllGroupMask); /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev[deviceIndex], "PixelFormat", "Mono8"); /* Disable acquisition start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* Disable line1 trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_Line1"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "Line1"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* Enable frame start trigger with first action */ Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "On"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSource", "Action1"); /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. * * We also set the Inter-Packet and the Frame Transmission delay * so the switch can line up packets better. */ Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCPSPacketSize", GIGE_PACKET_SIZE); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCPD", (GIGE_PACKET_SIZE + GIGE_PROTOCOL_OVERHEAD) * (MAX_NUM_DEVICES - 1)); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCFTD", (GIGE_PACKET_SIZE + GIGE_PROTOCOL_OVERHEAD) * deviceIndex); /* one device opened */ ++deviceIndex; } /* remember how many devices we have actually created */ numDevicesToUse = deviceIndex; /* Remember the number of devices actually created */ numDevicesToUse = deviceIndex; if (numDevicesToUse == 0) { Console.Error.WriteLine("No suitable cameras found!"); Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(0); } if (numDevicesToUse < 2) { Console.Error.WriteLine("WARNING: This sample works best with two or more GigE cameras supporting action commands."); } /* Allocate and register buffers for grab. */ for (deviceIndex = 0; deviceIndex < numDevicesToUse; ++deviceIndex) { /* Determine the required size for the grab buffer. */ payloadSize[deviceIndex] = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev[deviceIndex], "PayloadSize")); /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams[deviceIndex] = Pylon.DeviceGetNumStreamGrabberChannels(hDev[deviceIndex]); if (nStreams[deviceIndex] < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber[deviceIndex] = Pylon.DeviceGetStreamGrabber(hDev[deviceIndex], 0); Pylon.StreamGrabberOpen(hGrabber[deviceIndex]); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait[deviceIndex] = Pylon.StreamGrabberGetWaitObject(hGrabber[deviceIndex]); /* Add the stream grabber's wait object to our wait objects. * This is needed to be able to wait until all cameras have * grabbed an image in our grab loop below. */ Pylon.WaitObjectsAdd(wos, hWait[deviceIndex]); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber[deviceIndex], NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber[deviceIndex], payloadSize[deviceIndex]); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber[deviceIndex]); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffer[deviceIndex] = new PylonBuffer <byte>(payloadSize[deviceIndex], true); hBuffer[deviceIndex] = Pylon.StreamGrabberRegisterBuffer(hGrabber[deviceIndex], ref buffer[deviceIndex]); /* Feed the buffers into the stream grabber's input queue. */ Pylon.StreamGrabberQueueBuffer(hGrabber[deviceIndex], hBuffer[deviceIndex], 0); } /* The stream grabber is now prepared. Start the image acquisition. * The camera won't send any image data, since it's configured to wait * for the action to trigger the acquisition */ for (deviceIndex = 0; deviceIndex < numDevicesToUse; ++deviceIndex) { Pylon.DeviceExecuteCommandFeature(hDev[deviceIndex], "AcquisitionStart"); } /* ====================================================================== * Issue an ActionCommand and retrieve the images. * ====================================================================== */ Console.WriteLine("*** Issuing action command ***"); /* Trigger the camera using an action command (w/o waiting for results). * If your setup support PTP, you could use a scheduled action command. * Pylon.GigEIssueScheduledActionCommand(subnet, DefaultDeviceKey, DefaultGroupKey, 1, triggertime, 0) */ string subnet = Pylon.DeviceInfoGetPropertyValueByName(Pylon.DeviceGetDeviceInfoHandle(hDev[0]), "SubnetAddress"); Pylon.GigEIssueActionCommand(DeviceKey, GroupKey, 1, subnet); /* Grab one image from each camera. */ for (i = 0; i < numDevicesToUse; ++i) { uint woIndex; /* this corresponds to the index in hDev and hGrabber */ /* Wait for the next buffer to be filled. Wait up to 5000 ms. */ isReady = Pylon.WaitObjectsWaitForAny(wos, 5000, out woIndex); if (!isReady) { /* Timeout occurred */ /* Grab Timeout occurred. */ throw new Exception("Grab timeout occurred."); } PylonGrabResult_t grabResult; /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hGrabber[woIndex], out grabResult); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result."); } /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. Since we passed more than one buffer * to the stream grabber, the remaining buffers are filled in the background while * we do the image processing. The processed buffer won't be touched by * the stream grabber until we pass it back to the stream grabber. */ /* We only use one buffer per camera */ System.Diagnostics.Debug.Assert(grabResult.hBuffer == hBuffer[woIndex]); byte pixel = buffer[woIndex].Array[0]; /* Perform processing. */ Console.WriteLine("Grabbed a frame from camera {0}.", woIndex); /* Display image */ if (woIndex < 32) { Pylon.ImageWindowDisplayImage <Byte>(woIndex, buffer[woIndex], grabResult); } } else if (grabResult.Status == EPylonGrabStatus.Failed) { /* If a buffer has been incompletely grabbed the network bandwidth is possibly insufficient for transferring * multiple images simultaneously. See note above MAX_NUM_DEVICES. */ Console.Error.WriteLine("Frame from camera {0} wasn't grabbed successfully. Error code = {1}", woIndex, grabResult.ErrorCode); } } /* Stop the image acquisition on the cameras. */ for (deviceIndex = 0; deviceIndex < numDevicesToUse; ++deviceIndex) { /* Stop the camera. */ Pylon.DeviceExecuteCommandFeature(hDev[deviceIndex], "AcquisitionStop"); } // Remove all wait objects from WaitObjects. Pylon.WaitObjectsRemoveAll(wos); Pylon.WaitObjectsDestroy(wos); for (deviceIndex = 0; deviceIndex < numDevicesToUse; ++deviceIndex) { /* We must issue a cancel call to ensure that all pending buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(hGrabber[deviceIndex]); /* The buffers can now be retrieved from the stream grabber. */ do { PylonGrabResult_t grabResult; isReady = Pylon.StreamGrabberRetrieveResult(hGrabber[deviceIndex], out grabResult); } while (isReady); /* When all buffers are retrieved from the stream grabber, they can be de-registered. * After de-registering the buffers, it is safe to free the memory. */ Pylon.StreamGrabberDeregisterBuffer(hGrabber[deviceIndex], hBuffer[deviceIndex]); buffer[deviceIndex].Dispose(); buffer[deviceIndex] = null; /* Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(hGrabber[deviceIndex]); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ /* Close the stream grabber. */ Pylon.StreamGrabberClose(hGrabber[deviceIndex]); /* Close and release the pylon device. The stream grabber becomes invalid * after closing the pylon device. Don't call stream grabber related methods after * closing or releasing the device. */ Pylon.DeviceClose(hDev[deviceIndex]); Pylon.DestroyDevice(hDev[deviceIndex]); } Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); /* Shut down the pylon runtime system. Don't call any pylon function after * calling PylonTerminate(). */ Pylon.Terminate(); } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } for (uint deviceIndex = 0; deviceIndex < MAX_NUM_DEVICES; ++deviceIndex) { try { if (hDev[deviceIndex].IsValid) { /* Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev[deviceIndex])) { Pylon.DeviceClose(hDev[deviceIndex]); } Pylon.DestroyDevice(hDev[deviceIndex]); } } catch (Exception) { /* No further handling here.*/ } } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
const uint GIGE_PROTOCOL_OVERHEAD = 36; /* Total number of bytes of protocol overhead. */ static void Main(string[] args) { PYLON_DEVICE_HANDLE[] hDev = new PYLON_DEVICE_HANDLE[NUM_DEVICES]; /* Handles for the pylon devices. */ for (int deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { hDev[deviceIndex] = new PYLON_DEVICE_HANDLE(); } try { uint numDevicesAvail; /* Number of the available devices. */ bool isAvail; /* Used for checking feature availability. */ bool isReady; /* Used as an output parameter. */ int i; /* Counter. */ int deviceIndex; /* Index of device used in the following variables. */ PYLON_WAITOBJECTS_HANDLE wos; /* Wait objects. */ int nGrabs; /* Counts the number of grab iterations. */ PYLON_WAITOBJECT_HANDLE woTimer; /* Timer wait object. */ /* These are camera specific variables: */ PYLON_STREAMGRABBER_HANDLE[] hGrabber = new PYLON_STREAMGRABBER_HANDLE[NUM_DEVICES]; /* Handle for the pylon stream grabber. */ PYLON_WAITOBJECT_HANDLE[] hWait = new PYLON_WAITOBJECT_HANDLE[NUM_DEVICES]; /* Handle used for waiting for a grab to be finished. */ uint[] payloadSize = new uint[NUM_DEVICES]; /* Size of an image frame in bytes. */ PylonGrabResult_t[] grabResult = new PylonGrabResult_t[NUM_DEVICES]; /* Stores the result of a grab operation. */ uint[] nStreams = new uint[NUM_DEVICES]; /* The number of streams provided by the device. */ Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >[] buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > [NUM_DEVICES]; /* Holds handles and buffers used for grabbing. */ #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the Programmer's Guide. */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevicesAvail = Pylon.EnumerateDevices(); if (numDevicesAvail < NUM_DEVICES) { Console.Error.WriteLine("Found {0} devices. At least {1} devices needed to run this sample.", numDevicesAvail, NUM_DEVICES); throw new Exception("Not enough devices found."); } /* Create wait objects. This must be done outside of the loop. */ wos = Pylon.WaitObjectsCreate(); /* In this sample, we want to grab for a given amount of time, then stop. * Create a timer that tiggers an AutoResetEvent, wrap the AutoResetEvent in a pylon C.NET wait object, and add it to * the wait object set. */ AutoResetEvent timoutEvent = new AutoResetEvent(false); /* The timeout event to wait for. */ TimerCallbackWrapper timerCallbackWrapper = new TimerCallbackWrapper(timoutEvent); /* Receives the timer callback and sets the timeout event. */ Timer timer = new Timer(timerCallbackWrapper.TimerCallback); /* The timeout timer. */ woTimer = Pylon.WaitObjectFromW32(timoutEvent.SafeWaitHandle, true); Pylon.WaitObjectsAdd(wos, woTimer); /* Open cameras and set parameters. */ for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* Get handles for the devices. */ hDev[deviceIndex] = Pylon.CreateDeviceByIndex((uint)deviceIndex); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev[deviceIndex], Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Print out the name of the camera we are using. */ { bool isReadable = Pylon.DeviceFeatureIsReadable(hDev[deviceIndex], "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev[deviceIndex], "DeviceModelName"); Console.WriteLine("Using camera '{0}'", name); } } /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev[deviceIndex], "PixelFormat", "Mono8"); /* Disable acquisition start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* Disable frame start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev[deviceIndex], "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev[deviceIndex], "TriggerMode", "Off"); } /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ Pylon.DeviceFeatureFromString(hDev[deviceIndex], "AcquisitionMode", "Continuous"); PYLON_DEVICE_INFO_HANDLE hDi = Pylon.GetDeviceInfoHandle((uint)deviceIndex); string deviceClass = Pylon.DeviceInfoGetPropertyValueByName(hDi, Pylon.cPylonDeviceInfoDeviceClassKey); if (deviceClass == "BaslerGigE") { /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. * * We also set the Inter-Packet and the Frame Transmission delay * so the switch can line up packets better. */ Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCPSPacketSize", GIGE_PACKET_SIZE); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCPD", (GIGE_PACKET_SIZE + GIGE_PROTOCOL_OVERHEAD) * (NUM_DEVICES - 1)); Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "GevSCFTD", (GIGE_PACKET_SIZE + GIGE_PROTOCOL_OVERHEAD) * deviceIndex); } else if (deviceClass == "Basler1394") { /* For FireWire we just set the PacketSize node to limit the bandwidth we're using. */ /* We first divide the available bandwidth (4915 for FW400, 9830 for FW800) * by the number of devices we are using. */ long newPacketSize = 4915 / NUM_DEVICES; long recommendedPacketSize = 0; /* Get the recommended packet size from the camera. */ recommendedPacketSize = Pylon.DeviceGetIntegerFeature(hDev[deviceIndex], "RecommendedPacketSize"); if (newPacketSize < recommendedPacketSize) { /* Get the increment value for the packet size. * We must make sure that the new value we're setting is divisible by the increment of that feature. */ long packetSizeInc = 0; packetSizeInc = Pylon.DeviceGetIntegerFeatureInc(hDev[deviceIndex], "PacketSize"); /* Adjust the new packet size so is divisible by its increment. */ newPacketSize -= newPacketSize % packetSizeInc; } else { /* The recommended packet size should always be valid. No need to check against the increment. */ newPacketSize = recommendedPacketSize; } /* Set the new packet size. */ Pylon.DeviceSetIntegerFeature(hDev[deviceIndex], "PacketSize", newPacketSize); Console.WriteLine("Using packetsize: {0}", newPacketSize); } } /* Allocate and register buffers for grab. */ for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* Determine the required size for the grab buffer. */ payloadSize[deviceIndex] = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev[deviceIndex], "PayloadSize")); /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams[deviceIndex] = Pylon.DeviceGetNumStreamGrabberChannels(hDev[deviceIndex]); if (nStreams[deviceIndex] < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber[deviceIndex] = Pylon.DeviceGetStreamGrabber(hDev[deviceIndex], 0); Pylon.StreamGrabberOpen(hGrabber[deviceIndex]); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait[deviceIndex] = Pylon.StreamGrabberGetWaitObject(hGrabber[deviceIndex]); /* Add the stream grabber's wait object to our wait objects. * This is needed to be able to wait until all cameras have * grabbed an image in our grab loop below. */ Pylon.WaitObjectsAdd(wos, hWait[deviceIndex]); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber[deviceIndex], NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber[deviceIndex], payloadSize[deviceIndex]); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber[deviceIndex]); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers[deviceIndex] = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); for (i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize[deviceIndex], true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber[deviceIndex], ref buffer); buffers[deviceIndex].Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers[deviceIndex]) { Pylon.StreamGrabberQueueBuffer(hGrabber[deviceIndex], pair.Key, i++); } } /* The stream grabber is now prepared. As soon the camera starts acquiring images, * the image data will be grabbed into the provided buffers. */ for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* Let the camera acquire images. */ Pylon.DeviceExecuteCommandFeature(hDev[deviceIndex], "AcquisitionStart"); } /* Set the timer to 5 s and start it. */ timer.Change(5000, Timeout.Infinite); /* Counts the number of grabbed images. */ nGrabs = 0; /* Grab until the timer expires. */ for (;;) { int bufferIndex; /* Index of the buffer. */ Byte min, max; uint woIndex; /* Wait for the next buffer to be filled. Wait up to 1000 ms. */ isReady = Pylon.WaitObjectsWaitForAny(wos, 1000, out woIndex); if (!isReady) { /* Timeout occurred. */ throw new Exception("Grab timeout occurred."); } /* If the timer has expired, exit the grab loop. */ if (woIndex == 0) { Console.Error.WriteLine("Game over."); break; /* Timer expired. */ } /* Account for the timer. */ --woIndex; /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hGrabber[woIndex], out grabResult[woIndex]); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result."); } /* Get the buffer index from the context information. */ bufferIndex = grabResult[woIndex].Context; /* Check to see if the image was grabbed successfully. */ if (grabResult[woIndex].Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. Since we passed more than one buffer * to the stream grabber, the remaining buffers are filled in the background while * we do the image processing. The processed buffer won't be touched by * the stream grabber until we pass it back to the stream grabber. */ PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. Since we also got the buffer index, * we could alternatively use an array, e.g. buffers[bufferIndex]. */ if (!buffers[woIndex].TryGetValue(grabResult[woIndex].hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } /* Perform processing. */ getMinMax(buffer.Array, grabResult[woIndex].SizeX, grabResult[woIndex].SizeY, out min, out max); Console.WriteLine("Grabbed frame {0} from camera {1} into buffer {2}. Min. val={3}, Max. val={4}", nGrabs, woIndex, bufferIndex, min, max); /* Display image */ Pylon.ImageWindowDisplayImage <Byte>(woIndex, buffer, grabResult[woIndex]); } else if (grabResult[woIndex].Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", nGrabs, grabResult[woIndex].ErrorCode); } /* Once finished with the processing, requeue the buffer to be filled again. */ Pylon.StreamGrabberQueueBuffer(hGrabber[woIndex], grabResult[woIndex].hBuffer, bufferIndex); nGrabs++; } /* Clean up. */ /* Stop the image aquisition on the cameras. */ for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* ... Stop the camera. */ Pylon.DeviceExecuteCommandFeature(hDev[deviceIndex], "AcquisitionStop"); } // Remove all wait objects from WaitObjects. Pylon.WaitObjectsRemoveAll(wos); Pylon.WaitObjectDestroy(woTimer); Pylon.WaitObjectsDestroy(wos); for (deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { /* ... We must issue a cancel call to ensure that all pending buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(hGrabber[deviceIndex]); /* ... The buffers can now be retrieved from the stream grabber. */ do { isReady = Pylon.StreamGrabberRetrieveResult(hGrabber[deviceIndex], out grabResult[deviceIndex]); } while (isReady); /* ... When all buffers are retrieved from the stream grabber, they can be deregistered. * After deregistering the buffers, it is safe to free the memory. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers[deviceIndex]) { Pylon.StreamGrabberDeregisterBuffer(hGrabber[deviceIndex], pair.Key); pair.Value.Dispose(); } buffers[deviceIndex] = null; /* ... Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(hGrabber[deviceIndex]); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ /* ... Close the stream grabber. */ Pylon.StreamGrabberClose(hGrabber[deviceIndex]); /* ... Close and release the pylon device. The stream grabber becomes invalid * after closing the pylon device. Don't call stream grabber related methods after * closing or releasing the device. */ Pylon.DeviceClose(hDev[deviceIndex]); Pylon.DestroyDevice(hDev[deviceIndex]); } /* Dispose timer and event. */ timer.Dispose(); timoutEvent.Close(); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); /* ... Shut down the pylon runtime system. Don't call any pylon function after * calling PylonTerminate(). */ Pylon.Terminate(); } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } for (uint deviceIndex = 0; deviceIndex < NUM_DEVICES; ++deviceIndex) { try { if (hDev[deviceIndex].IsValid) { /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev[deviceIndex])) { Pylon.DeviceClose(hDev[deviceIndex]); } Pylon.DestroyDevice(hDev[deviceIndex]); } } catch (Exception) { /*No further handling here.*/ } } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
private bool isReady; /* Used as an output parameter. */ #endregion #region 私有方法 private void init() { Pylon.Initialize(); }
public void Dispose() { /* ... Shut down the pylon runtime system. Don't call any pylon method after * calling Pylon.Terminate(). */ Pylon.Terminate(); }
private void streamModeStopMethod() { Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStop"); }
private void streamModeStartMethod() { if (_startFlag) { Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStart"); _startFlag = false; } int bufferIndex; /* Index of the buffer. */ /* Wait for the next buffer to be filled. Wait up to 1000 ms. */ isReady = Pylon.WaitObjectWait(hWait, 1000); if (!isReady) { /* Timeout occurred. */ throw new Exception("Grab timeout occurred."); } /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result"); } nGrabs++; /* Get the buffer index from the context information. */ bufferIndex = (int)grabResult.Context; /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. Since we passed more than one buffer * to the stream grabber, the remaining buffers are filled in the background while * we do the image processing. The processed buffer won't be touched by * the stream grabber until we pass it back to the stream grabber. */ PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. Since we also got the buffer index, * we could alternatively use an array, e.g. buffers[bufferIndex]. */ if (!buffers.TryGetValue(grabResult.hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } /* Perform processing. */ _lastestImage = new HImage("byte", _width, _height, buffer.Pointer); } else if (grabResult.Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", nGrabs, grabResult.ErrorCode); } /* Once finished with the processing, requeue the buffer to be filled again. */ Pylon.StreamGrabberQueueBuffer(hGrabber, grabResult.hBuffer, bufferIndex); }
private void initStreamMode() { _width = (int)Pylon.DeviceGetIntegerFeature(hDev, "Width"); _height = (int)Pylon.DeviceGetIntegerFeature(hDev, "Height"); /* Print out the name of the camera we are using. */ bool isReadable = Pylon.DeviceFeatureIsReadable(hDev, "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev, "DeviceModelName"); Console.WriteLine("Using camera {0}.", name); } /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ bool isWritable = Pylon.DeviceFeatureIsWritable(hDev, "PixelFormat"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); } /* Disable acquisition start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { isWritable = Pylon.DeviceFeatureIsWritable(hDev, "TriggerSelector"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); } isWritable = Pylon.DeviceFeatureIsWritable(hDev, "TriggerMode"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } } /* Disable frame start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { isWritable = Pylon.DeviceFeatureIsWritable(hDev, "TriggerSelector"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); } isWritable = Pylon.DeviceFeatureIsWritable(hDev, "TriggerMode"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } } /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ isWritable = Pylon.DeviceFeatureIsWritable(hDev, "AcquisitionMode"); if (isWritable) { Pylon.DeviceFeatureFromString(hDev, "AcquisitionMode", "Continuous"); } /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ isWritable = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isWritable) { Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", GevSCPSPacketSize); } } _isModelValid = isAvail; /* Determine the required size of the grab buffer. */ payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev, "PayloadSize")); /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams = Pylon.DeviceGetNumStreamGrabberChannels(hDev); if (nStreams < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber = Pylon.DeviceGetStreamGrabber(hDev, 0); Pylon.StreamGrabberOpen(hGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait = Pylon.StreamGrabberGetWaitObject(hGrabber); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber, NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); int i; for (i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber, ref buffer); buffers.Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberQueueBuffer(hGrabber, pair.Key, i++); } }
protected void EnqueueTakenImage(PylonGrabResult_t grabResult) { PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. */ if (!m_buffers.TryGetValue(grabResult.hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } /* Create a new grab result to enqueue to the grabbed buffers list. */ GrabResult newGrabResultInternal = new GrabResult(); newGrabResultInternal.Handle = grabResult.hBuffer; /* Add the handle to requeue the buffer in the stream grabber queue. */ /* If already in output format add the image data. */ if (grabResult.PixelType == EPylonPixelType.PixelType_Mono8 || grabResult.PixelType == EPylonPixelType.PixelType_RGBA8packed) { newGrabResultInternal.ImageData = new Image(grabResult.SizeX, grabResult.SizeY, buffer.Array, grabResult.PixelType == EPylonPixelType.PixelType_RGBA8packed); } else /* Conversion is required. */ { /* Create a new format converter if needed. */ if (!m_hConverter.IsValid) { m_convertedBuffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <byte> >(); /* Create a new dictionary for the converted buffers. */ m_hConverter = Pylon.ImageFormatConverterCreate(); /* Create the converter. */ m_converterOutputFormatIsColor = !Pylon.IsMono(grabResult.PixelType) || Pylon.IsBayer(grabResult.PixelType); } /* Reference to the buffer attached to the grab result handle. */ PylonBuffer <Byte> convertedBuffer = null; /* Look up if a buffer is already attached to the handle. */ bool bufferListed = m_convertedBuffers.TryGetValue(grabResult.hBuffer, out convertedBuffer); /* Perform the conversion. If the buffer is null a new one is automatically created. */ Pylon.ImageFormatConverterSetOutputPixelFormat(m_hConverter, m_converterOutputFormatIsColor ? EPylonPixelType.PixelType_BGRA8packed : EPylonPixelType.PixelType_Mono8); Pylon.ImageFormatConverterConvert(m_hConverter, ref convertedBuffer, buffer, grabResult.PixelType, (uint)grabResult.SizeX, (uint)grabResult.SizeY, (uint)grabResult.PaddingX, EPylonImageOrientation.ImageOrientation_TopDown); if (!bufferListed) /* A new buffer has been created. Add it to the dictionary. */ { m_convertedBuffers.Add(grabResult.hBuffer, convertedBuffer); } /* Add the image data. */ newGrabResultInternal.ImageData = new Image(grabResult.SizeX, grabResult.SizeY, convertedBuffer.Array, m_converterOutputFormatIsColor); } lock (m_lockObject) /* Lock the grab result queue to avoid that two threads modify the same data. */ { m_grabbedBuffers.Add(newGrabResultInternal); /* Add the new grab result to the queue. */ } }
const uint NUM_BUFFERS = 2; /* Number of buffers used for grabbing. */ static void Main(string[] args) { PYLON_DEVICE_HANDLE hDev = new PYLON_DEVICE_HANDLE(); /* Handle for the pylon device. */ try { uint numDevices; /* Number of available devices. */ PYLON_STREAMGRABBER_HANDLE hGrabber; /* Handle for the pylon stream grabber. */ PYLON_CHUNKPARSER_HANDLE hChunkParser; /* Handle for the parser extracting the chunk data. */ PYLON_WAITOBJECT_HANDLE hWait; /* Handle used for waiting for a grab to be finished. */ uint payloadSize; /* Size of an image frame in bytes. */ Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > buffers; /* Holds handles and buffers used for grabbing. */ PylonGrabResult_t grabResult; /* Stores the result of a grab operation. */ int nGrabs; /* Counts the number of buffers grabbed. */ uint nStreams; /* The number of streams the device provides. */ bool isAvail; /* Used for checking feature availability */ bool isReady; /* Used as an output parameter */ int i; /* Counter. */ string triggerSelectorValue = "FrameStart"; /* Preselect the trigger for image acquisition */ bool isAvailFrameStart; /* Used for checking feature availability */ bool isAvailAcquisitionStart; /* Used for checking feature availability */ #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the programmers guide */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { throw new Exception("No devices found!"); } /* Get a handle for the first device found. */ hDev = Pylon.CreateDeviceByIndex(0); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); /* Check the available camera trigger mode(s) to select the appropriate one: acquisition start trigger mode (used by previous cameras; * do not confuse with acquisition start command) or frame start trigger mode (equivalent to previous acquisition start trigger mode). */ isAvailAcquisitionStart = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); isAvailFrameStart = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); /* Check to see if the camera implements the acquisition start trigger mode only. */ if (isAvailAcquisitionStart && !isAvailFrameStart) { /* Camera uses the acquisition start trigger as the only trigger mode. */ Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "On"); triggerSelectorValue = "AcquisitionStart"; } else { /* Camera may have the acquisition start trigger mode and the frame start trigger mode implemented. * In this case, the acquisition trigger mode must be switched off. */ if (isAvailAcquisitionStart) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* To trigger each single frame by software or external hardware trigger: Enable the frame start trigger mode. */ Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "On"); } /* Note: the trigger selector must be set to the appropriate trigger mode * before setting the trigger source or issuing software triggers. * Frame start trigger mode for newer cameras, acquisition start trigger mode for previous cameras. */ Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", triggerSelectorValue); /* Enable software triggering. */ /* ... Select the software trigger as the trigger source. */ Pylon.DeviceFeatureFromString(hDev, "TriggerSource", "Software"); /* When using software triggering, the Continuous frame mode should be used. Once * acquisition is started, the camera sends one image each time a software trigger is * issued. */ Pylon.DeviceFeatureFromString(hDev, "AcquisitionMode", "Continuous"); /* For GigE cameras, we recommend increasing the packet size for better * performance. If the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", 1500); } /* Before enabling individual chunks, the chunk mode in general must be activated. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "ChunkModeActive"); if (!isAvail) { throw new Exception("The device doesn't support the chunk mode."); } /* Activate the chunk mode. */ Pylon.DeviceSetBooleanFeature(hDev, "ChunkModeActive", true); /* Enable some individual chunks... */ /* ... The frame counter chunk feature. */ /* Is the chunk feature available? */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_ChunkSelector_Framecounter"); if (isAvail) { /* Select the frame counter chunk feature. */ Pylon.DeviceFeatureFromString(hDev, "ChunkSelector", "Framecounter"); /* Can the chunk feature be activated? */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "ChunkEnable"); if (isAvail) { /* Activate the chunk feature. */ Pylon.DeviceSetBooleanFeature(hDev, "ChunkEnable", true); } } /* ... The CRC checksum chunk feature. */ /* Note: Enabling the CRC checksum chunk feature is not a prerequisite for using * chunks. Chunks can also be handled when the CRC checksum chunk feature is disabled. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_ChunkSelector_PayloadCRC16"); if (isAvail) { /* Select the CRC checksum chunk feature. */ Pylon.DeviceFeatureFromString(hDev, "ChunkSelector", "PayloadCRC16"); /* Can the chunk feature be activated? */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "ChunkEnable"); if (isAvail) { /* Activate the chunk feature. */ Pylon.DeviceSetBooleanFeature(hDev, "ChunkEnable", true); } } /* The data block containing the image chunk and the other chunks has a self-descriptive layout. * A chunk parser is used to extract the appended chunk data from the grabbed image frame. * Create a chunk parser. */ hChunkParser = Pylon.DeviceCreateChunkParser(hDev); if (!hChunkParser.IsValid) { /* The transport layer doesn't provide a chunk parser. */ throw new Exception("No chunk parser available."); } /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams = Pylon.DeviceGetNumStreamGrabberChannels(hDev); if (nStreams < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber = Pylon.DeviceGetStreamGrabber(hDev, 0); Pylon.StreamGrabberOpen(hGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait = Pylon.StreamGrabberGetWaitObject(hGrabber); /* Determine the required size of the grab buffer. Since activating chunks will increase the * payload size and thus the required buffer size, do this after enabling the chunks. */ payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev, "PayloadSize")); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber, NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); for (i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber, ref buffer); buffers.Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberQueueBuffer(hGrabber, pair.Key, i++); } /* Issue an acquisition start command. Because the trigger mode is enabled, issuing the start command * itself will not trigger any image acquisitions. Issuing the start command simply prepares the camera. * Once the camera is prepared it will acquire one image for every trigger it receives. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStart"); /* Trigger the first image. */ Pylon.DeviceExecuteCommandFeature(hDev, "TriggerSoftware"); /* Grab NUM_GRABS images. */ nGrabs = 0; /* Counts the number of images grabbed. */ while (nGrabs < NUM_GRABS) { int bufferIndex; /* Index of the buffer. */ Byte min = 255, max = 0; long chunkWidth = 0; /* data retrieved from the chunk parser */ long chunkHeight = 0; /* data retrieved from the chunk parser */ /* Wait for the next buffer to be filled. Wait up to 1000 ms. */ isReady = Pylon.WaitObjectWait(hWait, 1000); if (!isReady) { /* Timeout occurred. */ throw new Exception("Grab timeout occurred.\n"); } /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result.\n"); } nGrabs++; /* Trigger the next image. Since we passed more than one buffer to the stream grabber, * the triggered image will be grabbed while the image processing is performed. */ Pylon.DeviceExecuteCommandFeature(hDev, "TriggerSoftware"); /* Get the buffer index from the context information. */ bufferIndex = (int)grabResult.Context; /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* The grab is successful. */ PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. Since we also got the buffer index, * we could alternatively use an array, e.g. buffers[bufferIndex]. */ if (!buffers.TryGetValue(grabResult.hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } Console.WriteLine("Grabbed frame {0} into buffer {1}.", nGrabs, bufferIndex); /* Check to see if we really got image data plus chunk data. */ if (grabResult.PayloadType != EPylonPayloadType.PayloadType_ChunkData) { Console.WriteLine("Received a buffer not containing chunk data?"); } else { /* Process the chunk data. This is done by passing the grabbed image buffer * to the chunk parser. When the chunk parser has processed the buffer, the chunk * data can be accessed in the same manner as "normal" camera parameters. * The only exception is the CRC checksum feature. There are dedicated functions for * checking the CRC checksum. */ bool hasCRC; /* Let the parser extract the data. */ Pylon.ChunkParserAttachBuffer(hChunkParser, buffer); /* Check the CRC checksum. */ hasCRC = Pylon.ChunkParserHasCRC(hChunkParser); if (hasCRC) { bool isOk = Pylon.ChunkParserCheckCRC(hChunkParser); Console.WriteLine("Frame {0} contains a CRC checksum. The checksum {1} ok.", nGrabs, isOk ? "is" : "is not"); } /* Retrieve the frame counter value. */ /* ... Check the availability. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "ChunkFramecounter"); Console.WriteLine("Frame {0} {1} a frame counter chunk.", nGrabs, isAvail ? "contains" : "doesn't contain"); if (isAvail) { /* ... Get the value. */ long counter; counter = Pylon.DeviceGetIntegerFeature(hDev, "ChunkFramecounter"); Console.WriteLine("Frame counter of frame {0}: {1}.", nGrabs, counter); } /* Retrieve the chunk width value. */ /* ... Check the availability. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "ChunkWidth"); Console.WriteLine("Frame {0} {1} a frame width chunk.", nGrabs, isAvail ? "contains" : "doesn't contain"); if (isAvail) { /* ... Get the value. */ chunkWidth = Pylon.DeviceGetIntegerFeature(hDev, "ChunkWidth"); Console.WriteLine("Width of frame {0}: {1}.", nGrabs, chunkWidth); } /* Retrieve the chunk height value. */ /* ... Check the availability. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "ChunkHeight"); Console.WriteLine("Frame {0} {1} a frame height chunk.", nGrabs, isAvail ? "contains" : "doesn't contain"); if (isAvail) { /* ... Get the value. */ chunkHeight = Pylon.DeviceGetIntegerFeature(hDev, "ChunkHeight"); Console.WriteLine("Height of frame {0}: {1}.", nGrabs, chunkHeight); } } /* Perform the image processing. */ getMinMax(buffer.Array, chunkWidth, chunkHeight, out min, out max); Console.WriteLine("Min. gray value = {0}, Max. gray value = {1}", min, max); /* Before requeueing the buffer, you should detach it from the chunk parser. */ Pylon.ChunkParserDetachBuffer(hChunkParser); /* Now the chunk data in the buffer is no longer accessible. */ } else if (grabResult.Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", nGrabs, grabResult.ErrorCode); } /* Once finished with the processing, requeue the buffer to be filled again. */ Pylon.StreamGrabberQueueBuffer(hGrabber, grabResult.hBuffer, bufferIndex); } /* Clean up. */ /* ... Stop the camera. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStop"); /* ... We must issue a cancel call to ensure that all pending buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(hGrabber); /* ... The buffers can now be retrieved from the stream grabber. */ do { isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); } while (isReady); /* ... When all buffers are retrieved from the stream grabber, they can be deregistered. * After deregistering the buffers, it is safe to free the memory */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberDeregisterBuffer(hGrabber, pair.Key); pair.Value.Dispose(); } buffers = null; /* ... Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(hGrabber); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ /* ... Close the stream grabber. */ Pylon.StreamGrabberClose(hGrabber); /* ... Release the chunk parser. */ Pylon.DeviceDestroyChunkParser(hDev, hChunkParser); /* Disable the software trigger and chunk mode. */ if (hDev.IsValid) { Pylon.DeviceSetBooleanFeature(hDev, "ChunkModeActive", false); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* ... Close and release the pylon device. The stream grabber becomes invalid * after closing the pylon device. Don't call stream grabber related methods after * closing or releasing the device. */ Pylon.DeviceClose(hDev); Pylon.DestroyDevice(hDev); /* ... Shut down the pylon runtime system. Don't call any pylon method after * calling PylonTerminate(). */ Pylon.Terminate(); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); } catch (Exception e) { /* Retrieve more details about the error. * /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } try { if (hDev.IsValid) { /* Disable the software trigger. */ Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev)) { Pylon.DeviceClose(hDev); } Pylon.DestroyDevice(hDev); } } catch (Exception) { /*No further handling here.*/ } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
public static void Write(PYLON_DEVICE_HANDLE deviceHandle, CameraProperty property) { if (!property.Supported || string.IsNullOrEmpty(property.Identifier)) { return; } // If "auto" flag is OFF we should write it first. On some cameras the value is not writable until the corresponding auto flag is off. // If it's ON (continuous), it doesn't matter as our value will be overwritten soon anyway. if (!string.IsNullOrEmpty(property.AutomaticIdentifier)) { string enumValue = property.Automatic ? "Continuous" : "Off"; PylonHelper.WriteEnum(deviceHandle, property.AutomaticIdentifier, enumValue); } NODEMAP_HANDLE nodeMapHandle = Pylon.DeviceGetNodeMap(deviceHandle); NODE_HANDLE nodeHandle = GenApi.NodeMapGetNode(nodeMapHandle, property.Identifier); if (!nodeHandle.IsValid) { return; } EGenApiAccessMode accessMode = GenApi.NodeGetAccessMode(nodeHandle); if (accessMode != EGenApiAccessMode.RW) { if (!string.IsNullOrEmpty(property.AutomaticIdentifier) && !property.Automatic) { log.ErrorFormat("Error while writing Basler Pylon GenICam property {0}", property.Identifier); log.ErrorFormat("The property is not writable."); } return; } try { switch (property.Type) { case CameraPropertyType.Integer: { long value = long.Parse(property.CurrentValue, CultureInfo.InvariantCulture); long step = long.Parse(property.Step, CultureInfo.InvariantCulture); long remainder = value % step; if (remainder > 0) { value = value - remainder; } GenApi.IntegerSetValue(nodeHandle, value); break; } case CameraPropertyType.Float: { double max = GenApi.FloatGetMax(nodeHandle); double min = GenApi.FloatGetMin(nodeHandle); double value = double.Parse(property.CurrentValue, CultureInfo.InvariantCulture); value = Math.Min(Math.Max(value, min), max); GenApi.FloatSetValue(nodeHandle, value); break; } case CameraPropertyType.Boolean: { bool value = bool.Parse(property.CurrentValue); GenApi.BooleanSetValue(nodeHandle, value); break; } default: break; } } catch { log.ErrorFormat("Error while writing Basler Pylon GenICam property {0}", property.Identifier); } }
/* There are camera features that behave like enumerations. These features can take a value from a fixed * set of possible values. One example is the pixel format feature. This function illustrates how to deal with * enumeration features. * */ private static void demonstrateEnumFeature(PYLON_DEVICE_HANDLE hDev) { string value; /* The current value of the feature. */ bool isWritable, supportsMono8, supportsYUV422Packed, supportsMono16; /* The allowed values for an enumeration feature are represented as strings. Use the * PylonDeviceFeatureFromString() and PylonDeviceFeatureToString() methods for setting and getting * the value of an enumeration feature. */ /* Get the current value of the enumeration feature. */ value = Pylon.DeviceFeatureToString(hDev, "PixelFormat"); Console.WriteLine("PixelFormat: {0}", value); /* * For an enumeration feature, the pylon Viewer's "Feature Documentation" window lists the * names of the possible values. Some of the values might not be supported by the device. * To check if a certain "SomeValue" value for a "SomeFeature" feature can be set, call the * PylonDeviceFeatureIsAvailable() function with "EnumEntry_SomeFeature_SomeValue" as an argument. */ /* Check to see if the Mono8 pixel format can be set. */ supportsMono8 = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); Console.WriteLine("Mono8 {0} a supported value for the PixelFormat feature.", supportsMono8 ? "is" : "isn't"); /* Check to see if the YUV422Packed pixel format can be set. */ supportsYUV422Packed = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_YUV422Packed"); Console.WriteLine("YUV422Packed {0} a supported value for the PixelFormat feature.", supportsYUV422Packed ? "is" : "isn't"); /* Check to see if the Mono16 pixel format can be set. */ supportsMono16 = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono16"); Console.WriteLine("Mono16 {0} a supported value for the PixelFormat feature.", supportsMono16 ? "is" : "isn't"); /* Before writing a value, we recommend checking to see if the enumeration feature itself is * currently writable. */ isWritable = Pylon.DeviceFeatureIsWritable(hDev, "PixelFormat"); if (isWritable) { /* The PixelFormat feature is writable. Set it to one of the supported values. */ if (supportsMono16) { Console.WriteLine("Setting PixelFormat to Mono16."); Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono16"); } else if (supportsYUV422Packed) { Console.WriteLine("Setting PixelFormat to YUV422Packed."); Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "YUV422Packed"); } else if (supportsMono8) { Console.WriteLine("Setting PixelFormat to Mono8."); Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); } /* Reset the PixelFormat feature to its previous value. */ Pylon.DeviceFeatureFromString(hDev, "PixelFormat", value); } }
/* This function demonstrates how to check the presence, readability, and writability * of a feature. */ private static void demonstrateAccessibilityCheck(PYLON_DEVICE_HANDLE hDev) { NODEMAP_HANDLE hNodeMap; NODE_HANDLE hNode; string featureName; bool val, val_read, val_write; /* Get a handle for the device's node map. */ hNodeMap = Pylon.DeviceGetNodeMap(hDev); /* Check to see if a feature is implemented at all. The 'Width' feature is likely to * be implemented by just about every existing camera. */ featureName = "Width"; hNode = GenApi.NodeMapGetNode(hNodeMap, featureName); if (hNode.IsValid) { /* Node exists. Check whether the feature is implemented. */ val = GenApi.NodeIsImplemented(hNode); } else { /* Node does not exist. Feature is not implemented. */ val = false; } Console.WriteLine("The '{0}' feature {1} implemented", featureName, val ? "is" : "is not"); /* This feature does most likely not exist. */ featureName = "Weirdness"; hNode = GenApi.NodeMapGetNode(hNodeMap, featureName); if (hNode.IsValid) { /* Node exists. Check whether the feature is implemented. */ val = GenApi.NodeIsImplemented(hNode); } else { /* Node does not exist. Feature is not implemented. */ val = false; } Console.WriteLine("The '{0}' feature {1} implemented.", featureName, val ? "is" : "is not"); /* Although a feature is implemented by the device, it may not be available * with the device in its current state. Check to see if the feature is currently * available. The GenApi.NodeIsAvailable sets val to false if either the feature * is not implemented or if the feature is currently not available. */ featureName = "BinningVertical"; hNode = GenApi.NodeMapGetNode(hNodeMap, featureName); if (hNode.IsValid) { /* Node exists. Check whether the feature is available. */ val = GenApi.NodeIsAvailable(hNode); } else { /* Node does not exist. Feature is not implemented, and hence not available. */ val = false; } Console.WriteLine("The '{0}' feature {1} implemented.", featureName, val ? "is" : "is not"); /* If a feature is available, it could be read-only, write-only, or both * readable and writable. Use the Pylon.DeviceFeatureIsReadable() and the * Pylon.DeviceFeatureIsWritable() functions(). It is safe to call these functions * for features that are currently not available or not implemented by the device. * A feature that is not available or not implemented is neither readable nor writable. * The readability and writability of a feature can change depending on the current * state of the device. For example, the Width parameter might not be writable when * the camera is acquiring images. */ featureName = "Width"; hNode = GenApi.NodeMapGetNode(hNodeMap, featureName); if (hNode.IsValid) { /* Node exists. Check whether the feature is readable. */ val_read = GenApi.NodeIsReadable(hNode); val_write = GenApi.NodeIsReadable(hNode); } else { /* Node does not exist. Feature is neither readable nor writable. */ val_read = val_write = false; } Console.WriteLine("The '{0}' feature {1} readable.", featureName, val_read ? "is" : "is not"); Console.WriteLine("The '{0}' feature {1} writable.", featureName, val_write ? "is" : "is not"); Console.WriteLine(""); }
void OnTriggerEnter2D(Collider2D other) { if (other.gameObject.tag == "NPC") { textPanel.SetActive(true); npcScript = other.gameObject.GetComponent <Character>(); textPanel.GetComponentInChildren <Text>().text = npcScript.approach; if (Input.GetKeyDown("space")) { dialoguePanel.SetActive(true); for (int i = 0; i < dialogueText.Length; i++) { dialogueText[i].text = npcScript.buttonText[i]; } textPanel.GetComponentInChildren <Text>().text = npcScript.greeting; anim.SetTrigger("Talk"); npcScript.anim.SetTrigger("Talk"); } } else if (other.gameObject.tag == "Pylon") { textPanel.SetActive(true); pylonScript = other.gameObject.GetComponent <Pylon>(); textPanel.GetComponentInChildren <Text>().text = pylonScript.approach; if (pylonScript.visited == false) { if (puzzleOrder == pylonScript.swordPuzzleNumber) { puzzleOrder += 1; pylonScript.anim.SetTrigger("Glow"); pylonScript.visited = true; if (pylonScript.swordPuzzleNumber == 3) { UnlockSword(); } } else if (puzzleOrder != pylonScript.swordPuzzleNumber) { puzzleOrder = 0; for (int i = 0; i < pylons.Length; i++) { pylons[i].anim.SetTrigger("Idle"); pylons[i].visited = false; } } } } else if (other.gameObject.tag == "Sword") { textPanel.SetActive(true); textPanel.GetComponentInChildren <Text>().text = other.gameObject.GetComponent <Sword>().approach; if (Input.GetKeyDown("space")) { textPanel.GetComponentInChildren <Text>().text = other.gameObject.GetComponent <Sword>().taken; hasSword = true; other.gameObject.GetComponent <SpriteRenderer>().enabled = false; } } }
/* There are camera features that behave like enumerations. These features can take a value from a fixed * set of possible values. One example is the pixel format feature. This function illustrates how to deal with * enumeration features. */ private static void demonstrateEnumFeature(PYLON_DEVICE_HANDLE hDev) { string featureName = "PixelFormat"; NODEMAP_HANDLE hNodeMap; NODE_HANDLE hNode; EGenApiNodeType nodeType; bool bval; /* Get a handle for the device's node map. */ hNodeMap = Pylon.DeviceGetNodeMap(hDev); /* Look up the feature node. */ hNode = GenApi.NodeMapGetNode(hNodeMap, featureName); if (!hNode.IsValid) { Console.WriteLine("There is no feature named '" + featureName + "'."); return; } /* We want an enumeration feature node. */ nodeType = GenApi.NodeGetType(hNode); if (EGenApiNodeType.EnumerationNode != nodeType) { Console.WriteLine("'" + featureName + "' is not an enumeration feature."); return; } /* Check to see if the feature is readable. */ bval = GenApi.NodeIsReadable(hNode); /* The allowed values for an enumeration feature are represented as strings. Use the * GenApi.NodeFromString and GenApi.NodeToString methods for setting and getting * the value of an enumeration feature. */ if (bval) { /* Symbolic names of pixel formats. */ string symMono8 = "Mono8", symMono16 = "Mono16", symYUV422Packed = "YUV422Packed"; string value; /* The current value of the feature. */ bool supportsMono8, supportsYUV422Packed, supportsMono16; NODE_HANDLE hEntry; /* Get the current value of the enumeration feature. */ value = GenApi.NodeToString(hNode); Console.WriteLine("PixelFormat: {0}", value); /* * For an enumeration feature, the pylon Viewer's "Feature Documentation" window lists the * names of the possible values. Some of the values may not be supported by the device. * To check if a certain "SomeValue" value for a "SomeFeature" feature can be set, call the * GenApi.NodeIsAvailable() function on the node of the entry. */ /* Check to see if the Mono8 pixel format can be set. */ hEntry = GenApi.EnumerationGetEntryByName(hNode, symMono8); supportsMono8 = hEntry.IsValid && GenApi.NodeIsAvailable(hEntry); Console.WriteLine("{0} {1} a supported value for the pixel format feature.", symMono8, supportsMono8 ? "is" : "is not"); /* Check to see if the YUV422Packed pixel format can be set. */ hEntry = GenApi.EnumerationGetEntryByName(hNode, symYUV422Packed); supportsYUV422Packed = hEntry.IsValid && GenApi.NodeIsAvailable(hEntry); Console.WriteLine("{0} {1} a supported value for the pixel format feature.", symYUV422Packed, supportsYUV422Packed ? "is" : "is not"); /* Check to see if the Mono16 pixel format can be set. */ hEntry = GenApi.EnumerationGetEntryByName(hNode, symMono16); supportsMono16 = hEntry.IsValid && GenApi.NodeIsAvailable(hEntry); Console.WriteLine("{0} {1} a supported value for the pixel format feature.", symMono16, supportsMono16 ? "is" : "is not"); /* Before writing a value, we recommend checking to see if the enumeration feature is * currently writable. */ bval = GenApi.NodeIsWritable(hNode); if (bval) { /* The PixelFormat feature is writable. Set it to one of the supported values. */ if (supportsMono16) { Console.WriteLine("Setting PixelFormat to Mono16."); GenApi.NodeFromString(hNode, symMono16); } else if (supportsYUV422Packed) { Console.WriteLine("Setting PixelFormat to YUV422Packed."); GenApi.NodeFromString(hNode, symYUV422Packed); } else if (supportsMono8) { Console.WriteLine("Setting PixelFormat to Mono8."); GenApi.NodeFromString(hNode, symMono8); } /* Reset the PixelFormat feature to its previous value. */ GenApi.NodeFromString(hNode, value); } else { Console.WriteLine("Cannot set value for feature '{0}' - node not writable.", featureName); } } else { Console.WriteLine("Cannot read feature '{0}' - node not readable.", featureName); } }
private void Open() { // Unlike in the DirectShow module, we do not backup and restore camera configuration. // If the user configured the camera outside of Kinovea we respect the new settings. // Two reasons: // 1. In DirectShow we must do the backup/restore to work around drivers that inadvertently reset the camera properties. // 2. Industrial cameras have many properties that won't be configurable in Kinovea // so the user is more likely to configure the camera from the outside. if (grabbing) { Stop(); } try { deviceHandle = Pylon.CreateDeviceByIndex(deviceIndex); imageProvider.Open(deviceHandle); } catch (Exception e) { log.Error("Could not open Basler device."); LogError(e, imageProvider.GetLastErrorMessage()); return; } if (deviceHandle.IsValid) { SpecificInfo specific = summary.Specific as SpecificInfo; if (specific == null) { return; } specific.Handle = deviceHandle; StreamFormat currentStreamFormat = PylonHelper.GetCurrentStreamFormat(deviceHandle); // Some properties can only be changed when the camera is opened but not streaming. // We store them in the summary when coming back from FormConfiguration, and we write them to the camera here. // Only do this if it's not the first time we open the camera, to respect any change that could have been done outside Kinovea. if (!firstOpen) { if (specific.StreamFormat != currentStreamFormat.Symbol) { PylonHelper.WriteStreamFormat(deviceHandle, specific.StreamFormat); } if (specific.CameraProperties != null && specific.CameraProperties.ContainsKey("framerate")) { if (specific.CameraProperties.ContainsKey("enableFramerate")) { bool enabled = bool.Parse(specific.CameraProperties["enableFramerate"].CurrentValue); if (!enabled && !specific.CameraProperties["enableFramerate"].ReadOnly) { specific.CameraProperties["enableFramerate"].CurrentValue = "true"; CameraPropertyManager.Write(deviceHandle, specific.CameraProperties["enableFramerate"]); } } CameraPropertyManager.Write(deviceHandle, specific.CameraProperties["framerate"]); } if (specific.CameraProperties != null && specific.CameraProperties.ContainsKey("width") && specific.CameraProperties.ContainsKey("height")) { CameraPropertyManager.Write(deviceHandle, specific.CameraProperties["width"]); CameraPropertyManager.Write(deviceHandle, specific.CameraProperties["height"]); } } else { specific.StreamFormat = currentStreamFormat.Symbol; } } }
/* There are camera features, such as AcquisitionStart, that represent a command. * This function that loads the default set, illustrates how to execute a command feature. */ private static void demonstrateCommandFeature(PYLON_DEVICE_HANDLE hDev) { string selectorName = "UserSetSelector", commandName = "UserSetLoad"; NODEMAP_HANDLE hNodeMap; NODE_HANDLE hCommand, hSelector; EGenApiNodeType nodeType; bool bval; /* Get a handle for the device's node map. */ hNodeMap = Pylon.DeviceGetNodeMap(hDev); /* Look up the command node. */ hCommand = GenApi.NodeMapGetNode(hNodeMap, commandName); if (!hCommand.IsValid) { Console.WriteLine("There is no node named '" + commandName + "'."); return; } /* Look up the selector node. */ hSelector = GenApi.NodeMapGetNode(hNodeMap, selectorName); if (!hSelector.IsValid) { Console.WriteLine("There is no node named '" + selectorName + "'."); return; } /* We want a command feature node. */ nodeType = GenApi.NodeGetType(hCommand); if (EGenApiNodeType.CommandNode != nodeType) { Console.WriteLine("'" + selectorName + "' is not a command feature."); return; } /* Before executing the user set load command, the user set selector must be * set to the default set. */ /* Check to see if the selector is writable. */ bval = GenApi.NodeIsWritable(hSelector); if (bval) { /* Choose the default set (which includes one of the factory setups). */ GenApi.NodeFromString(hSelector, "Default"); } else { Console.WriteLine("Cannot set selector '{0}' - node not writable.", selectorName); } /* Check to see if the command is writable. */ bval = GenApi.NodeIsWritable(hCommand); if (bval) { /* Execute the user set load command. */ Console.WriteLine("Loading the default set."); GenApi.CommandExecute(hCommand); } else { Console.WriteLine("Cannot execute command '{0}' - node not writable.", commandName); } }
/* This method is executed using the grab thread and is responsible for grabbing, possible conversion of the image * ,and queuing the image to the result queue. */ protected void Grab() { /* Notify that grabbing has started. This event can be used to update the state of the GUI. */ OnGrabbingStartedEvent(); try { /* Set up everything needed for grabbing. */ SetupGrab(); while (m_grabThreadRun) /* Is set to false when stopping to end the grab thread. */ { //Thread.Sleep(100); /* Wait for the next buffer to be filled. Wait up to 1000 ms. */ //ho_Image.Dispose(); ////BitmapToHalcon(m_bitmap, out ho_Image); //HOperatorSet.ReadImage(out ho_Image,@"h:\tp3\ok2.bmp"); ////} //OnImageReadyEvent(); if (!Pylon.WaitObjectWait(m_hWait, 1000)) { lock (m_lockObject) { if (m_grabbedBuffers.Count != m_numberOfBuffersUsed) { /* Timeout occurred. */ //throw new Exception("A grab timeout occurred."); } continue; } } PylonGrabResult_t grabResult; /* Stores the result of a grab operation. */ /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ if (!Pylon.StreamGrabberRetrieveResult(m_hGrabber, out grabResult)) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result."); } /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* Add result to the ready list. */ EnqueueTakenImage(grabResult); /* Notify that an image has been added to the output queue. The receiver of the event can use GetCurrentImage() to acquire and process the image * and ReleaseImage() to remove the image from the queue and return it to the stream grabber.*/ ImageProvider.Image image = GetLatestImage(); Bitmap m_bitmap = null; if (image != null) { /* Check if the image is compatible with the currently used bitmap. */ if (BitmapFactory.IsCompatible(m_bitmap, image.Width, image.Height, image.Color)) { /* Update the bitmap with the image data. */ BitmapFactory.UpdateBitmap(m_bitmap, image.Buffer, image.Width, image.Height, image.Color); /* To show the new image, request the display control to update itself. */ //pictureBox1.Refresh(); } else /* A new bitmap is required. */ { BitmapFactory.CreateBitmap(out m_bitmap, image.Width, image.Height, image.Color); BitmapFactory.UpdateBitmap(m_bitmap, image.Buffer, image.Width, image.Height, image.Color); /* We have to dispose the bitmap after assigning the new one to the display control. */ //Bitmap bitmap = pictureBox1.Image as Bitmap; ///* Provide the display control with the new bitmap. This action automatically updates the display. */ //pictureBox1.Image = m_bitmap; //if (bitmap != null) //{ // /* Dispose the bitmap. */ // bitmap.Dispose(); //} } //lock (m_lockObject) /* Lock the grab result queue to avoid that two threads modify the same data. */ //{ /* Release all images but the latest. */ ho_Image.Dispose(); BitmapToHalcon(m_bitmap, out ho_Image); //} OnImageReadyEvent(); //ho_Image.Dispose(); } /* Exit here for single frame mode. */ if (m_grabOnce) { m_grabThreadRun = false; break; } } else if (grabResult.Status == EPylonGrabStatus.Failed) { /* * Grabbing an image can fail if the used network hardware, i.e. network adapter, * switch or Ethernet cable, experiences performance problems. * Increase the Inter-Packet Delay to reduce the required bandwidth. * It is recommended to enable Jumbo Frames on the network adapter and switch. * Adjust the Packet Size on the camera to the highest supported frame size. * If this did not resolve the problem, check if the recommended hardware is used. * Aggressive power saving settings for the CPU can also cause the image grab to fail. */ throw new Exception(string.Format("A grab failure occurred. The error code is {0}.", grabResult.ErrorCode)); } } /* Tear down everything needed for grabbing. */ CleanUpGrab(); } catch (Exception e) { MyDebug.ShowMessage(e, "抓图异常:=Pylon"); /* The grabbing stops due to an error. Set m_grabThreadRun to false to avoid that any more buffers are queued for grabbing. */ m_grabThreadRun = false; /* Get the last error message here, because it could be overwritten by cleaning up. */ string lastErrorMessage = GetLastErrorText(); try { /* Try to tear down everything needed for grabbing. */ CleanUpGrab(); } catch { /* Another exception cannot be handled. */ } /* Notify that grabbing has stopped. This event could be used to update the state of the GUI. */ OnGrabbingStoppedEvent(); if (!m_removed) /* In case the device was removed from the PC suppress the notification. */ { /* Notify that the grabbing had errors and deliver the information. */ OnGrabErrorEvent(e, lastErrorMessage); } return; } /* Notify that grabbing has stopped. This event could be used to update the state of the GUI. */ OnGrabbingStoppedEvent(); }
/* Open using index. Before ImageProvider can be opened using the index, Pylon.EnumerateDevices() needs to be called. */ public void Open(uint index) { /* Get a handle for the device and proceed. */ Open(Pylon.CreateDeviceByIndex(index)); }
protected void CleanUpGrab() { try { /* ... Stop the camera. */ //Pylon.DeviceExecuteCommandFeature(m_hDevice, "AcquisitionStop"); /* Destroy the format converter if one was used. */ if (m_hConverter.IsValid) { /* Destroy the converter. */ Pylon.PixelFormatConverterDestroy(m_hConverter); /* Set the handle invalid. The next grab cycle may not need a converter. */ m_hConverter.SetInvalid(); /* Release the converted image buffers. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in m_convertedBuffers) { pair.Value.Dispose(); } m_convertedBuffers = null; } /* ... We must issue a cancel call to ensure that all pending m_buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(m_hGrabber); /* ... The m_buffers can now be retrieved from the stream grabber. */ { bool isReady; /* Used as an output parameter. */ do { PylonGrabResult_t grabResult; /* Stores the result of a grab operation. */ isReady = Pylon.StreamGrabberRetrieveResult(m_hGrabber, out grabResult); } while (isReady); } /* ... When all m_buffers are retrieved from the stream grabber, they can be deregistered. * After deregistering the m_buffers, it is safe to free the memory. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in m_buffers) { Pylon.StreamGrabberDeregisterBuffer(m_hGrabber, pair.Key); } /* The buffers can now be released. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in m_buffers) { pair.Value.Dispose(); } m_buffers.Clear(); /* ... Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(m_hGrabber); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ } catch (Exception e) { } }
/* Open using device.*/ public void Open(PYLON_DEVICE_HANDLE device) { try { /* Use provided device. */ m_hDevice = device; /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(m_hDevice, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Register the callback function. */ m_hRemovalCallback = Pylon.DeviceRegisterRemovalCallback(m_hDevice, m_callbackHandler); /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ if (Pylon.DeviceFeatureIsWritable(m_hDevice, "GevSCPSPacketSize")) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(m_hDevice, "GevSCPSPacketSize", 1500); } /* The sample does not work in chunk mode. It must be disabled. */ if (Pylon.DeviceFeatureIsWritable(m_hDevice, "ChunkModeActive")) { /* Disable the chunk mode. */ Pylon.DeviceSetBooleanFeature(m_hDevice, "ChunkModeActive", false); } /* Disable acquisition start trigger if available. */ if (Pylon.DeviceFeatureIsAvailable(m_hDevice, "EnumEntry_TriggerSelector_AcquisitionStart")) { Pylon.DeviceFeatureFromString(m_hDevice, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(m_hDevice, "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ if (Pylon.DeviceFeatureIsAvailable(m_hDevice, "EnumEntry_TriggerSelector_FrameBurstStart")) { Pylon.DeviceFeatureFromString(m_hDevice, "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(m_hDevice, "TriggerMode", "Off"); } /* Disable frame start trigger if available. */ if (Pylon.DeviceFeatureIsAvailable(m_hDevice, "EnumEntry_TriggerSelector_FrameStart")) { Pylon.DeviceFeatureFromString(m_hDevice, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(m_hDevice, "TriggerMode", "Off"); } /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ if (Pylon.DeviceGetNumStreamGrabberChannels(m_hDevice) < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ m_hGrabber = Pylon.DeviceGetStreamGrabber(m_hDevice, 0); Pylon.StreamGrabberOpen(m_hGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for m_buffers to be filled with grabbed data. */ m_hWait = Pylon.StreamGrabberGetWaitObject(m_hGrabber); } catch { /* Get the last error message here, because it could be overwritten by cleaning up. */ UpdateLastError(); try { Close(); /* Try to close any open handles. */ } catch { /* Another exception cannot be handled. */ } throw; } /* Notify that the ImageProvider is open and ready for grabbing and configuration. */ OnDeviceOpenedEvent(); }
const uint NUM_BUFFERS = 5; /* Number of buffers used for grabbing. */ static void Main(string[] args) { PYLON_DEVICE_HANDLE hDev = new PYLON_DEVICE_HANDLE(); /* Handle for the pylon device. */ try { uint numDevices; /* Number of available devices. */ PYLON_STREAMGRABBER_HANDLE hGrabber; /* Handle for the pylon stream grabber. */ PYLON_WAITOBJECT_HANDLE hWait; /* Handle used for waiting for a grab to be finished. */ uint payloadSize; /* Size of an image frame in bytes. */ Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > buffers; /* Holds handles and buffers used for grabbing. */ PylonGrabResult_t grabResult; /* Stores the result of a grab operation. */ int nGrabs; /* Counts the number of buffers grabbed. */ uint nStreams; /* The number of streams provides by the device. */ bool isAvail; /* Used for checking feature availability. */ bool isReady; /* Used as an output parameter. */ int i; /* Counter. */ #if DEBUG /* This is a special debug setting needed only for GigE cameras. * See 'Building Applications with pylon' in the programmer's guide. */ Environment.SetEnvironmentVariable("PYLON_GIGE_HEARTBEAT", "300000" /*ms*/); #endif /* Before using any pylon methods, the pylon runtime must be initialized. */ Pylon.Initialize(); /* Enumerate all camera devices. You must call * PylonEnumerateDevices() before creating a device. */ numDevices = Pylon.EnumerateDevices(); if (0 == numDevices) { throw new Exception("No devices found."); } /* Get a handle for the first device found. */ hDev = Pylon.CreateDeviceByIndex(0); /* Before using the device, it must be opened. Open it for configuring * parameters and for grabbing images. */ Pylon.DeviceOpen(hDev, Pylon.cPylonAccessModeControl | Pylon.cPylonAccessModeStream); /* Print out the name of the camera we are using. */ { bool isReadable = Pylon.DeviceFeatureIsReadable(hDev, "DeviceModelName"); if (isReadable) { string name = Pylon.DeviceFeatureToString(hDev, "DeviceModelName"); Console.WriteLine("Using camera {0}.", name); } } /* Set the pixel format to Mono8, where gray values will be output as 8 bit values for each pixel. */ /* ... Check first to see if the device supports the Mono8 format. */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_PixelFormat_Mono8"); if (!isAvail) { /* Feature is not available. */ throw new Exception("Device doesn't support the Mono8 pixel format."); } /* ... Set the pixel format to Mono8. */ Pylon.DeviceFeatureFromString(hDev, "PixelFormat", "Mono8"); /* Disable acquisition start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_AcquisitionStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "AcquisitionStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame burst start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameBurstStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameBurstStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* Disable frame start trigger if available */ isAvail = Pylon.DeviceFeatureIsAvailable(hDev, "EnumEntry_TriggerSelector_FrameStart"); if (isAvail) { Pylon.DeviceFeatureFromString(hDev, "TriggerSelector", "FrameStart"); Pylon.DeviceFeatureFromString(hDev, "TriggerMode", "Off"); } /* We will use the Continuous frame mode, i.e., the camera delivers * images continuously. */ Pylon.DeviceFeatureFromString(hDev, "AcquisitionMode", "Continuous"); /* For GigE cameras, we recommend increasing the packet size for better * performance. When the network adapter supports jumbo frames, set the packet * size to a value > 1500, e.g., to 8192. In this sample, we only set the packet size * to 1500. */ /* ... Check first to see if the GigE camera packet size parameter is supported and if it is writable. */ isAvail = Pylon.DeviceFeatureIsWritable(hDev, "GevSCPSPacketSize"); if (isAvail) { /* ... The device supports the packet size feature. Set a value. */ Pylon.DeviceSetIntegerFeature(hDev, "GevSCPSPacketSize", 1500); } /* Determine the required size of the grab buffer. */ payloadSize = checked ((uint)Pylon.DeviceGetIntegerFeature(hDev, "PayloadSize")); /* Image grabbing is done using a stream grabber. * A device may be able to provide different streams. A separate stream grabber must * be used for each stream. In this sample, we create a stream grabber for the default * stream, i.e., the first stream ( index == 0 ). */ /* Get the number of streams supported by the device and the transport layer. */ nStreams = Pylon.DeviceGetNumStreamGrabberChannels(hDev); if (nStreams < 1) { throw new Exception("The transport layer doesn't support image streams."); } /* Create and open a stream grabber for the first channel. */ hGrabber = Pylon.DeviceGetStreamGrabber(hDev, 0); Pylon.StreamGrabberOpen(hGrabber); /* Get a handle for the stream grabber's wait object. The wait object * allows waiting for buffers to be filled with grabbed data. */ hWait = Pylon.StreamGrabberGetWaitObject(hGrabber); /* We must tell the stream grabber the number and size of the buffers * we are using. */ /* .. We will not use more than NUM_BUFFERS for grabbing. */ Pylon.StreamGrabberSetMaxNumBuffer(hGrabber, NUM_BUFFERS); /* .. We will not use buffers bigger than payloadSize bytes. */ Pylon.StreamGrabberSetMaxBufferSize(hGrabber, payloadSize); /* Allocate the resources required for grabbing. After this, critical parameters * that impact the payload size must not be changed until FinishGrab() is called. */ Pylon.StreamGrabberPrepareGrab(hGrabber); /* Before using the buffers for grabbing, they must be registered at * the stream grabber. For each registered buffer, a buffer handle * is returned. After registering, these handles are used instead of the * buffer objects pointers. The buffer objects are held in a dictionary, * that provides access to the buffer using a handle as key. */ buffers = new Dictionary <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> >(); for (i = 0; i < NUM_BUFFERS; ++i) { PylonBuffer <Byte> buffer = new PylonBuffer <byte>(payloadSize, true); PYLON_STREAMBUFFER_HANDLE handle = Pylon.StreamGrabberRegisterBuffer(hGrabber, ref buffer); buffers.Add(handle, buffer); } /* Feed the buffers into the stream grabber's input queue. For each buffer, the API * allows passing in an integer as additional context information. This integer * will be returned unchanged when the grab is finished. In our example, we use the index of the * buffer as context information. */ i = 0; foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberQueueBuffer(hGrabber, pair.Key, i++); } /* The stream grabber is now prepared. As soon the camera starts acquiring images, * the image data will be grabbed into the provided buffers. */ /* Let the camera acquire images. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStart"); /* Grab NUM_GRABS images */ nGrabs = 0; /* Counts the number of grabbed images. */ while (nGrabs < NUM_GRABS) { int bufferIndex; /* Index of the buffer. */ Byte min, max; /* Wait for the next buffer to be filled. Wait up to 1000 ms. */ isReady = Pylon.WaitObjectWait(hWait, 1000); if (!isReady) { /* Timeout occurred. */ throw new Exception("Grab timeout occurred."); } /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); if (!isReady) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result"); } nGrabs++; /* Get the buffer index from the context information. */ bufferIndex = (int)grabResult.Context; /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* Success. Perform image processing. Since we passed more than one buffer * to the stream grabber, the remaining buffers are filled in the background while * we do the image processing. The processed buffer won't be touched by * the stream grabber until we pass it back to the stream grabber. */ PylonBuffer <Byte> buffer; /* Reference to the buffer attached to the grab result. */ /* Get the buffer from the dictionary. Since we also got the buffer index, * we could alternatively use an array, e.g. buffers[bufferIndex]. */ if (!buffers.TryGetValue(grabResult.hBuffer, out buffer)) { /* Oops. No buffer available? We should never have reached this point. Since all buffers are * in the dictionary. */ throw new Exception("Failed to find the buffer associated with the handle returned in grab result."); } /* Perform processing. */ getMinMax(buffer.Array, grabResult.SizeX, grabResult.SizeY, out min, out max); Console.WriteLine("Grabbed frame {0} into buffer {1}. Min. gray value = {2}, Max. gray value = {3}", nGrabs, bufferIndex, min, max); /* Display image */ Pylon.ImageWindowDisplayImage <Byte>(0, buffer, grabResult); } else if (grabResult.Status == EPylonGrabStatus.Failed) { Console.Error.WriteLine("Frame {0} wasn't grabbed successfully. Error code = {1}", nGrabs, grabResult.ErrorCode); } /* Once finished with the processing, requeue the buffer to be filled again. */ Pylon.StreamGrabberQueueBuffer(hGrabber, grabResult.hBuffer, bufferIndex); } /* Clean up. */ /* ... Stop the camera. */ Pylon.DeviceExecuteCommandFeature(hDev, "AcquisitionStop"); /* ... We must issue a cancel call to ensure that all pending buffers are put into the * stream grabber's output queue. */ Pylon.StreamGrabberCancelGrab(hGrabber); /* ... The buffers can now be retrieved from the stream grabber. */ do { isReady = Pylon.StreamGrabberRetrieveResult(hGrabber, out grabResult); } while (isReady); /* ... When all buffers are retrieved from the stream grabber, they can be deregistered. * After deregistering the buffers, it is safe to free the memory. */ foreach (KeyValuePair <PYLON_STREAMBUFFER_HANDLE, PylonBuffer <Byte> > pair in buffers) { Pylon.StreamGrabberDeregisterBuffer(hGrabber, pair.Key); pair.Value.Dispose(); } buffers = null; /* ... Release grabbing related resources. */ Pylon.StreamGrabberFinishGrab(hGrabber); /* After calling PylonStreamGrabberFinishGrab(), parameters that impact the payload size (e.g., * the AOI width and height parameters) are unlocked and can be modified again. */ /* ... Close the stream grabber. */ Pylon.StreamGrabberClose(hGrabber); /* ... Close and release the pylon device. The stream grabber becomes invalid * after closing the pylon device. Don't call stream grabber related methods after * closing or releasing the device. */ Pylon.DeviceClose(hDev); Pylon.DestroyDevice(hDev); Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); /* ... Shut down the pylon runtime system. Don't call any pylon method after * calling Pylon.Terminate(). */ Pylon.Terminate(); } catch (Exception e) { /* Retrieve the error message. */ string msg = GenApi.GetLastErrorMessage() + "\n" + GenApi.GetLastErrorDetail(); Console.Error.WriteLine("Exception caught:"); Console.Error.WriteLine(e.Message); if (msg != "\n") { Console.Error.WriteLine("Last error message:"); Console.Error.WriteLine(msg); } try { if (hDev.IsValid) { /* ... Close and release the pylon device. */ if (Pylon.DeviceIsOpen(hDev)) { Pylon.DeviceClose(hDev); } Pylon.DestroyDevice(hDev); } } catch (Exception) { /*No further handling here.*/ } Pylon.Terminate(); /* Releases all pylon resources. */ Console.Error.WriteLine("\nPress enter to exit."); Console.ReadLine(); Environment.Exit(1); } }
/* This method is executed using the grab thread and is responsible for grabbing, possible conversion of the image * ,and queuing the image to the result queue. */ protected void Grab() { /* Notify that grabbing has started. This event can be used to update the state of the GUI. */ OnGrabbingStartedEvent(); try { /* Set up everything needed for grabbing. */ SetupGrab(); while (m_grabThreadRun) /* Is set to false when stopping to end the grab thread. */ { /* Wait for the next buffer to be filled. Wait up to 15000 ms. */ if (!Pylon.WaitObjectWait(m_hWait, 15000)) { lock (m_lockObject) { if (m_grabbedBuffers.Count != m_numberOfBuffersUsed) { /* A timeout occurred. This can happen if an external trigger is used or * if the programmed exposure time is longer than the grab timeout. */ throw new Exception("A grab timeout occurred."); } continue; } } PylonGrabResult_t grabResult; /* Stores the result of a grab operation. */ /* Since the wait operation was successful, the result of at least one grab * operation is available. Retrieve it. */ if (!Pylon.StreamGrabberRetrieveResult(m_hGrabber, out grabResult)) { /* Oops. No grab result available? We should never have reached this point. * Since the wait operation above returned without a timeout, a grab result * should be available. */ throw new Exception("Failed to retrieve a grab result."); } /* Check to see if the image was grabbed successfully. */ if (grabResult.Status == EPylonGrabStatus.Grabbed) { /* Add result to the ready list. */ EnqueueTakenImage(grabResult); /* Notify that an image has been added to the output queue. The receiver of the event can use GetCurrentImage() to acquire and process the image * and ReleaseImage() to remove the image from the queue and return it to the stream grabber.*/ OnImageReadyEvent(); /* Exit here for single frame mode. */ if (m_grabOnce) { m_grabThreadRun = false; break; } } else if (grabResult.Status == EPylonGrabStatus.Failed) { /* * Grabbing an image can fail if the used network hardware, i.e. network adapter, * switch or Ethernet cable, experiences performance problems. * Increase the Inter-Packet Delay to reduce the required bandwidth. * It is recommended to enable Jumbo Frames on the network adapter and switch. * Adjust the Packet Size on the camera to the highest supported frame size. * If this did not resolve the problem, check if the recommended hardware is used. * Aggressive power saving settings for the CPU can also cause the image grab to fail. */ throw new Exception(string.Format("A grab failure occurred. See the method ImageProvider::Grab for more information. The error code is {0:X08}.", grabResult.ErrorCode)); } } /* Tear down everything needed for grabbing. */ CleanUpGrab(); } catch (Exception e) { /* The grabbing stops due to an error. Set m_grabThreadRun to false to avoid that any more buffers are queued for grabbing. */ m_grabThreadRun = false; /* Get the last error message here, because it could be overwritten by cleaning up. */ string lastErrorMessage = GetLastErrorText(); try { /* Try to tear down everything needed for grabbing. */ CleanUpGrab(); } catch { /* Another exception cannot be handled. */ } /* Notify that grabbing has stopped. This event could be used to update the state of the GUI. */ OnGrabbingStoppedEvent(); if (!m_removed) /* In case the device was removed from the PC suppress the notification. */ { /* Notify that the grabbing had errors and deliver the information. */ OnGrabErrorEvent(e, lastErrorMessage); } return; } /* Notify that grabbing has stopped. This event could be used to update the state of the GUI. */ OnGrabbingStoppedEvent(); }
/// <summary> /// Write generic property with optional auto flag. /// </summary> private static void WriteProperty(PYLON_DEVICE_HANDLE deviceHandle, CameraProperty property) { if (property.ReadOnly) { return; } NODEMAP_HANDLE nodeMapHandle = Pylon.DeviceGetNodeMap(deviceHandle); // Switch OFF the auto flag if needed, to be able to write the main property. if (!string.IsNullOrEmpty(property.AutomaticIdentifier)) { NODE_HANDLE nodeHandleAuto = GenApi.NodeMapGetNode(nodeMapHandle, property.AutomaticIdentifier); if (nodeHandleAuto.IsValid) { bool writeable = GenApi.NodeIsWritable(nodeHandleAuto); bool currentAuto = ReadAuto(nodeHandleAuto, property.AutomaticIdentifier); if (writeable && property.CanBeAutomatic && currentAuto && !property.Automatic) { WriteAuto(nodeHandleAuto, property.AutomaticIdentifier, false); } } } // At this point the auto flag is off. Write the main property. NODE_HANDLE nodeHandle = GenApi.NodeMapGetNode(nodeMapHandle, property.Identifier); if (!nodeHandle.IsValid) { return; } EGenApiAccessMode accessMode = GenApi.NodeGetAccessMode(nodeHandle); if (accessMode != EGenApiAccessMode.RW) { return; } try { switch (property.Type) { case CameraPropertyType.Integer: { long value = long.Parse(property.CurrentValue, CultureInfo.InvariantCulture); long min = GenApi.IntegerGetMin(nodeHandle); long max = GenApi.IntegerGetMax(nodeHandle); long step = GenApi.IntegerGetInc(nodeHandle); value = FixValue(value, min, max, step); GenApi.IntegerSetValue(nodeHandle, value); break; } case CameraPropertyType.Float: { double value = double.Parse(property.CurrentValue, CultureInfo.InvariantCulture); double min = GenApi.FloatGetMin(nodeHandle); double max = GenApi.FloatGetMax(nodeHandle); value = FixValue(value, min, max); GenApi.FloatSetValue(nodeHandle, value); break; } case CameraPropertyType.Boolean: { bool value = bool.Parse(property.CurrentValue); GenApi.BooleanSetValue(nodeHandle, value); break; } default: break; } } catch { log.ErrorFormat("Error while writing Basler Pylon GenICam property {0}.", property.Identifier); } // Finally, switch ON the auto flag if needed. if (!string.IsNullOrEmpty(property.AutomaticIdentifier)) { NODE_HANDLE nodeHandleAuto = GenApi.NodeMapGetNode(nodeMapHandle, property.AutomaticIdentifier); if (nodeHandleAuto.IsValid && GenApi.NodeIsWritable(nodeHandleAuto) && property.CanBeAutomatic && property.Automatic) { WriteAuto(nodeHandleAuto, property.AutomaticIdentifier, true); } } }