public bool SetStreamBufferCount(long count) { try { // set to manual INodeMap sNodeMap = managedCamera.GetTLStreamNodeMap(); IEnum sBufferCountSelector = sNodeMap.GetNode <IEnum>("StreamBufferCountMode"); if (sBufferCountSelector == null || !sBufferCountSelector.IsWritable) { return(false); } IEnumEntry iBufferCountManual = sBufferCountSelector.GetEntryByName("Manual"); if (iBufferCountManual == null || !iBufferCountManual.IsReadable) { return(false); } sBufferCountSelector.Value = iBufferCountManual.Symbolic; // set the value IInteger streamNode = sNodeMap.GetNode <IInteger>("StreamDefaultBufferCount"); if (streamNode == null || !streamNode.IsWritable) { return(false); } streamNode.Value = count; } catch { return(false); } return(true); }
public static void CloseOryxCamera(IManagedCamera managedCamera, INodeMap nodeMap, int camNumber, CloseCameraMethod closeMethod) { if (!managedCamera.IsInitialized()) { Console.WriteLine("Camera number {0} not initialized. Cannot execute DeviceReset or FactoryReset command", camNumber.ToString()); return; } if (managedCamera.IsStreaming()) { managedCamera.EndAcquisition(); Console.WriteLine("EndAcquisition executed from CloseOryxCamera block on camera {0}", camNumber.ToString()); } if (closeMethod == CloseCameraMethod.DeInit) { managedCamera.DeInit(); Console.WriteLine("Camera number {0} deinitialized.", camNumber.ToString()); } else if (closeMethod == CloseCameraMethod.DeInitAndDeviceReset) { nodeMap.GetNode <ICommand>("DeviceReset").Execute(); Console.WriteLine("DeviceReset command executed on camera number {0}.", camNumber.ToString()); } else if (closeMethod == CloseCameraMethod.DeInitAndFactoryReset) { nodeMap.GetNode <ICommand>("FactoryReset").Execute(); Console.WriteLine("FactoryReset command executed on camera number {0}.", camNumber.ToString()); } }
/// <summary> /// OldestFirst = 0, /// OldestFirstOverwrite = 1, /// NewestFirst = 2, /// NewestFirstOverwrite = 3, /// NUMSTREAMBUFFERHANDLINGMODE = 4 /// </summary> /// <param name="strMode"></param> public override bool SetStreamBufferHandlingMode(string strMode) { bool ret = false; try { INodeMap sNodeMap = m_Camera.GetTLStreamNodeMap(); IEnum iStreamBufferHandlingMode = sNodeMap.GetNode <IEnum>("StreamBufferHandlingMode"); if (iStreamBufferHandlingMode == null || !iStreamBufferHandlingMode.IsWritable) { return(false); } IEnumEntry iMode = iStreamBufferHandlingMode.GetEntryByName(strMode); if (iMode == null || !iMode.IsReadable) { return(false); } iStreamBufferHandlingMode.Value = iMode.Symbolic; ret = true; } catch (Exception ex) { LogHelper.AppLoger.Error(ex); } return(ret); }
// Disables heartbeat on GEV cameras so debugging does not incur timeout errors static int DisableHeartbeat(IManagedCamera cam, INodeMap nodeMap, INodeMap nodeMapTLDevice) { Console.WriteLine("Checking device type to see if we need to disable the camera's heartbeat...\n\n"); // // Write to boolean node controlling the camera's heartbeat // // *** NOTES *** // This applies only to GEV cameras and only applies when in DEBUG mode. // GEV cameras have a heartbeat built in, but when debugging applications the // camera may time out due to its heartbeat. Disabling the heartbeat prevents // this timeout from occurring, enabling us to continue with any necessary debugging. // This procedure does not affect other types of cameras and will prematurely exit // if it determines the device in question is not a GEV camera. // // *** LATER *** // Since we only disable the heartbeat on GEV cameras during debug mode, it is better // to power cycle the camera after debugging. A power cycle will reset the camera // to its default settings. // IEnum iDeviceType = nodeMapTLDevice.GetNode <IEnum>("DeviceType"); IEnumEntry iDeviceTypeGEV = iDeviceType.GetEntryByName("GigEVision"); // We first need to confirm that we're working with a GEV camera if (iDeviceType != null && iDeviceType.IsReadable) { if (iDeviceType.Value == iDeviceTypeGEV.Value) { Console.WriteLine( "Working with a GigE camera. Attempting to disable heartbeat before continuing...\n\n"); IBool iGEVHeartbeatDisable = nodeMap.GetNode <IBool>("GevGVCPHeartbeatDisable"); if (iGEVHeartbeatDisable == null || !iGEVHeartbeatDisable.IsWritable) { Console.WriteLine( "Unable to disable heartbeat on camera. Continuing with execution as this may be non-fatal..."); } else { iGEVHeartbeatDisable.Value = true; Console.WriteLine("WARNING: Heartbeat on GigE camera disabled for the rest of Debug Mode."); Console.WriteLine( " Power cycle camera when done debugging to re-enable the heartbeat..."); } } else { Console.WriteLine("Camera does not use GigE interface. Resuming normal execution...\n\n"); } } else { Console.WriteLine("Unable to access TL device nodemap. Aborting..."); return(-1); } return(0); }
bool RestoreDefaultSettings() { bool result = false; try { for (int i = 0; i < 3; i++) { try { managedCamera.UserSetSelector.Value = UserSetSelectorEnums.Default.ToString(); managedCamera.UserSetLoad.Execute(); result = true; break; } catch (SpinnakerException s) { managedCamera.AcquisitionMode.Value = AcquisitionModeEnums.Continuous.ToString(); managedCamera.BeginAcquisition(); System.Threading.Thread.Sleep(500); managedCamera.EndAcquisition(); } } //TODO: stream buffer default count mode to manual // Set stream buffer Count Mode to manual // Retrieve Stream Parameters device nodemap INodeMap sNodeMap = managedCamera.GetTLStreamNodeMap(); IEnum streamBufferCountMode = sNodeMap.GetNode <IEnum>("StreamBufferCountMode"); if (streamBufferCountMode == null || !streamBufferCountMode.IsWritable) { return(false); } IEnumEntry streamBufferCountModeManual = streamBufferCountMode.GetEntryByName("Manual"); if (streamBufferCountModeManual == null || !streamBufferCountModeManual.IsReadable) { return(false); } streamBufferCountMode.Value = streamBufferCountModeManual.Value; } catch (Exception ex) { result = false; } return(result); }
public override bool Init(int index = 0) { bool ret = false; try { // Retrieve singleton reference to system object ManagedSystem system = new ManagedSystem(); // Retrieve list of cameras from the system IList <IManagedCamera> camList = system.GetCameras(); LogHelper.AppLoger.DebugFormat("Number of cameras detected: {0}", camList.Count); if (camList.Count == 0) { LogHelper.AppLoger.Error("没有发现相机!"); return(ret); } m_Camera = camList[index]; // Retrieve TL device nodemap and print device information INodeMap nodeMapTLDevice = m_Camera.GetTLDeviceNodeMap(); // Initialize camera m_Camera.Init(); // Retrieve GenICam nodemap m_NodeMap = m_Camera.GetNodeMap(); //if (!m_camera.DeviceConnectionStatus.IsRegister) //{ // Dialogs.Show("连接相机失败!"); // return ret; //} //CameraInfo camInfo = m_camera.GetCameraInfo(); IString iDeviceSerialNumber = nodeMapTLDevice.GetNode <IString>("DeviceSerialNumber"); LogHelper.AppLoger.DebugFormat("camera serial number:{0}", iDeviceSerialNumber); //Set embedded timestamp to on //EmbeddedImageInfo embeddedInfo = m_camera.GetEmbeddedImageInfo(); //embeddedInfo.timestamp.onOff = true; //m_camera.SetEmbeddedImageInfo(embeddedInfo); SetAcquisitionMode("Continuous"); ret = true; } catch (Exception ex) { LogHelper.AppLoger.Error(ex); } return(ret); }
public override bool SetStreamBufferCount(int bufCount) { bool ret = false; try { INodeMap sNodeMap = m_Camera.GetTLStreamNodeMap(); IInteger streamNode = sNodeMap.GetNode <IInteger>("StreamDefaultBufferCount"); streamNode.Value = bufCount; ret = true; } catch (Exception ex) { LogHelper.AppLoger.Error(ex); } return(ret); }
public static bool SetEnumValue(this INodeMap nodeMap, string nodeName, string value) { var node = nodeMap.GetNode <IEnum>(nodeName); if (node == null || !node.IsWritable) { return(false); } var entry = node.GetEntryNode(value); if (entry == null) { return(false); } node.IntValue = entry.Value; return(true); }
public bool SetStreamBufferCount(long count) { try { //StreamDefaultBufferCount is the number of images to buffer on PC //default is 10 INodeMap sNodeMap = managedCamera.GetTLStreamNodeMap(); IInteger streamNode = sNodeMap.GetNode <IInteger>("StreamDefaultBufferCount"); if (streamNode == null || !streamNode.IsWritable) { return(false); } streamNode.Value = count; } catch { return(false); } return(true); }
private int SetNodeMapItem(INodeMap nodeMap, String nodeName, String entryName) { try { // Retrieve enumeration node from nodemap IEnum iAcquisitionMode = nodeMap.GetNode <IEnum>(nodeName); if (iAcquisitionMode == null || !iAcquisitionMode.IsWritable) { writeLog(String.Format( "Unable to set {0} to {1} (node retrieval). Aborting...\n\n", nodeName, entryName)); return(-1); } // Retrieve entry node from enumeration node IEnumEntry iAcquisitionModeContinuous = iAcquisitionMode.GetEntryByName(entryName); if (iAcquisitionModeContinuous == null || !iAcquisitionMode.IsReadable) { writeLog(String.Format( "Unable to set {0} to {1} (enum entry retrieval). Aborting...\n\n", nodeName, entryName)); return(-1); } // Set symbolic from entry node as new value for enumeration node iAcquisitionMode.Value = iAcquisitionModeContinuous.Symbolic; } catch (SpinnakerException ex) { writeLog(String.Format("Error: {0}\n", ex.Message)); return(-1); } writeLog(String.Format("{0} set to {1}...\n", nodeName, entryName)); return(0); }
void PrintDeviceInfo(INodeMap nodeMap) { try { Console.WriteLine("\n*** DEVICE INFORMATION ***\n"); ICategory category = nodeMap.GetNode <ICategory>("DeviceInformation"); if (category != null && category.IsReadable) { for (int i = 0; i < category.Children.Length; i++) { Console.WriteLine("{0}: {1}", category.Children[i].Name, (category.Children[i].IsReadable ? category.Children[i].ToString() : "Node not available")); } Console.WriteLine(); } else { Console.WriteLine("设备控制信息无法获取"); } } catch (SpinnakerException ex) { Console.WriteLine("Error: {0}", ex.Message); } }
public bool SetSequencerMode(bool enable) { bool result = false; try { IEnum iSequencerMode = nodeMap.GetNode <IEnum>("SequencerMode"); IEnumEntry iSequencerModeOn = iSequencerMode.GetEntryByName("On"); IEnumEntry iSequencerModeOff = iSequencerMode.GetEntryByName("Off"); if (enable) { // // Turn sequencer mode on // // *** NOTES *** // After sequencer mode has been turned on, the camera will // begin using the saved states in the order that they were set. // // *** LATER *** // Once all images have been captured, disable the sequencer // in order to restore the camera to its initial state. // if (iSequencerMode.Value.Int != iSequencerModeOn.Value) { if (!iSequencerMode.IsWritable) { throw new Exception("entry - SequencerMode 'On'"); } iSequencerMode.Value = iSequencerModeOn.Value; } } else { // // Turn sequencer mode back off // // *** NOTES *** // Between uses, it is best to disable the sequencer until it // is once again required. // if (iSequencerMode.Value.Int != iSequencerModeOff.Value) { if (!iSequencerMode.IsWritable) { throw new Exception("entry - SequencerMode 'Off'"); } iSequencerMode.Value = iSequencerModeOff.Value; } } result = true; } catch (Exception ex) { result = false; } return(result); }
static void Main(string[] args) { try { using (CStApiAutoInit api = new CStApiAutoInit()) // ============================================================================================= // Note: Creating system will generate a broadcast device scan. using (CStSystem system = new CStSystem(eStSystemVendor.Sentech, eStInterfaceType.GigEVision)) { // Input target IP address for scanning. Console.Write("Please input camera IP address for unicast camera scanning: "); string value = Console.ReadLine(); IPAddress ipaddr; // Try parse the input address, if failed exit program. if (!IPAddress.TryParse(value.Trim(), out ipaddr)) { Console.WriteLine("IP address is not valid."); Console.WriteLine("\r\nPress Enter to exit."); Console.ReadLine(); Environment.Exit(0); } // Convert the IP address string to a 32-bit number. byte[] bytes = ipaddr.GetAddressBytes(); uint uiTgtDevIPAddress = (uint)(IPAddress.NetworkToHostOrder(BitConverter.ToUInt32(bytes, 0)) >> 32); // Acquire interface counts uint uiCntInterface = system.InterfaceCount; for (uint i = 0; i < uiCntInterface; i++) { IStInterface pInterface = system.GetIStInterface(i); // Set Discovery Command Distination IP Address to enable the unicast camera scanning. // *This is 255.255.255.255 in default for boradcast device scanning. INodeMap nodeMapInterface = pInterface.GetIStPort().GetINodeMap(); IInteger intDistiantionIPAddress = nodeMapInterface.GetNode <IInteger>("GevDeviceDiscoveryCommandDestinationIPAddress"); intDistiantionIPAddress.SetValue(uiTgtDevIPAddress, false); // After setting distination IP, call update for actual scanning process. // Unicast scanning package will be sent via this function. pInterface.UpdateDeviceList(); } using (CStDevice device = system.CreateFirstStDevice()) { // Print out information of device if found and connected. // Otherwise it will goes into exception handling. Console.WriteLine("Device=" + device.GetIStDeviceInfo().DisplayName); Console.WriteLine("Device found and connected."); } } } catch (Exception e) { Console.Error.WriteLine("An exception occurred. \r\n" + e.Message); } finally { Console.WriteLine("\r\nPress Enter to exit."); Console.ReadLine(); } }
// This function prepares, saves, and cleans up an video from a list of images. int SaveListToVideo(INodeMap nodeMap, INodeMap nodeMapTLDevice, ref List <IManagedImage> images) { int result = 0; Console.WriteLine("\n\n*** CREATING VIDEO ***\n"); try { // Retrieve device serial number for filename String deviceSerialNumber = ""; IString iDeviceSerialNumber = nodeMapTLDevice.GetNode <IString>("DeviceSerialNumber"); if (iDeviceSerialNumber != null && iDeviceSerialNumber.IsReadable) { deviceSerialNumber = iDeviceSerialNumber.Value; Console.WriteLine("Device serial number retrieved as {0}...", deviceSerialNumber); } // // Retrieve the current frame rate; acquisition frame rate recorded in hertz // // *** NOTES *** // The video frame rate can be set to anything; however, in // order to have videos play in real-time, the acquisition frame // rate can be retrieved from the camera. // IFloat iAcquisitionFrameRate = nodeMap.GetNode <IFloat>("AcquisitionFrameRate"); if (iAcquisitionFrameRate == null || !iAcquisitionFrameRate.IsReadable) { Console.WriteLine("Unable to retrieve frame rate. Aborting...\n"); return(-1); } float frameRateToSet = (float)iAcquisitionFrameRate.Value; Console.WriteLine("Frame rate to be set to {0}", frameRateToSet); // // Create a unique filename // // *** NOTES *** // This example creates filenames according to the type of // video being created. Notice that '.avi' does not need to be // appended to the name of the file. This is because the video // recorder object takes care of the file extension // automatically. // string videoFilename; switch (chosenFileType) { case VideoType.Uncompressed: videoFilename = "SaveToAvi-CSharp-Uncompressed"; if (deviceSerialNumber != "") { videoFilename = videoFilename + "-" + deviceSerialNumber; } break; case VideoType.Mjpg: videoFilename = "SaveToAvi-CSharp-MJPG"; if (deviceSerialNumber != "") { videoFilename = videoFilename + "-" + deviceSerialNumber; } break; case VideoType.H264: videoFilename = "SaveToAvi-CSharp-H264"; if (deviceSerialNumber != "") { videoFilename = videoFilename + "-" + deviceSerialNumber; } break; default: videoFilename = "SaveToAvi-CSharp"; break; } // // Select option and open video file type // // *** NOTES *** // Depending on the filetype, a number of settings need to be // set in an object called an option. An uncompressed option // only needs to have the video frame rate set whereas videos // with MJPG or H264 compressions should have more values set. // // Once the desired option object is configured, open the video // file with the option in order to create the image file. // // *** LATER *** // Once all images have been added, it is important to close the // file - this is similar to many other standard file streams. // using (IManagedSpinVideo video = new ManagedSpinVideo()) { // Set maximum video file size to 2GB. A new video file is generated when 2GB // limit is reached. Setting maximum file size to 0 indicates no limit. const uint FileMaxSize = 2048; video.SetMaximumFileSize(FileMaxSize); switch (chosenFileType) { case VideoType.Uncompressed: AviOption uncompressedOption = new AviOption(); uncompressedOption.frameRate = frameRateToSet; video.Open(videoFilename, uncompressedOption); break; case VideoType.Mjpg: MJPGOption mjpgOption = new MJPGOption(); mjpgOption.frameRate = frameRateToSet; mjpgOption.quality = 75; video.Open(videoFilename, mjpgOption); break; case VideoType.H264: H264Option h264Option = new H264Option(); h264Option.frameRate = frameRateToSet; h264Option.bitrate = 1000000; h264Option.height = Convert.ToInt32(images[0].Height); h264Option.width = Convert.ToInt32(images[0].Width); video.Open(videoFilename, h264Option); break; } // // Construct and save video // // *** NOTES *** // Although the video file has been opened, images must be // individually appended in order to construct the video. // Console.WriteLine("Appending {0} images to video file {1}.avi...", images.Count, videoFilename); for (int imageCnt = 0; imageCnt < images.Count; imageCnt++) { video.Append(images[imageCnt]); Console.WriteLine("Appended image {0}...", imageCnt); } Console.WriteLine(); // // Close video file // // *** NOTES *** // Once all images have been appended, it is important to // close the video file. Notice that once an video file has // been closed, no more images can be added. // video.Close(); } } catch (SpinnakerException ex) { Console.WriteLine("Error: {0}", ex.Message); result = -1; } return(result); }
static void Main(string[] args) { try { using (CStApiAutoInit api = new CStApiAutoInit()) using (CStSystem system = new CStSystem(eStSystemVendor.Sentech)) using (CStDevice device = system.CreateFirstStDevice()) using (CStImageDisplayWnd wnd = new CStImageDisplayWnd()) using (CStDataStream dataStream = device.CreateStDataStream(0)) { Console.WriteLine("Device=" + device.GetIStDeviceInfo().DisplayName); // ============================================================================================================== // Demostration of Setting Line2 as Strobe Out // Create NodeMap pointer for accessing parameters INodeMap nodeMap = device.GetRemoteIStPort().GetINodeMap(); // Set Line2 to output IEnum enumLineSelector = nodeMap.GetNode <IEnum>("LineSelector"); enumLineSelector.FromString("Line2"); IEnum enumLineMode = nodeMap.GetNode <IEnum>("LineMode"); enumLineMode.FromString("Output"); // Switch Line2 output source to Timer 0 Active IEnum enumLineSource = nodeMap.GetNode <IEnum>("LineSource"); enumLineSource.FromString("Timer0Active"); // Set Timer 0 trigger source to Exposure Start, which means Timer 0 will output signal when camera start exposure IEnum enumTimerSelector = nodeMap.GetNode <IEnum>("TimerSelector"); enumTimerSelector.FromString("Timer0"); // Set exposure start as the trigger source of Timer0 IEnum enumTimerTriggerSource = nodeMap.GetNode <IEnum>("TimerTriggerSource"); enumTimerTriggerSource.FromString("ExposureStart"); // Set Timer0 output duration to 1000us IFloat floatTimerDuration = nodeMap.GetNode <IFloat>("TimerDuration"); floatTimerDuration.Value = 1000; // ============================================================================================================== dataStream.StartAcquisition(nCountOfImagesToGrab); device.AcquisitionStart(); while (dataStream.IsGrabbing) { using (CStStreamBuffer streamBuffer = dataStream.RetrieveBuffer(5000)) { if (streamBuffer.GetIStStreamBufferInfo().IsImagePresent) { IStImage stImage = streamBuffer.GetIStImage(); string strText = device.GetIStDeviceInfo().DisplayName + " "; strText += stImage.ImageWidth + " x " + stImage.ImageHeight + " "; strText += string.Format("{0:F2}[fps]", dataStream.CurrentFPS); wnd.SetUserStatusBarText(strText); if (!wnd.IsVisible) { wnd.SetPosition(0, 0, (int)stImage.ImageWidth, (int)stImage.ImageHeight); wnd.Show(eStWindowMode.ModalessOnNewThread); } wnd.RegisterIStImage(stImage); } else { Console.WriteLine("Image data does not exist."); } } } device.AcquisitionStop(); dataStream.StopAcquisition(); } } catch (Exception e) { Console.Error.WriteLine("An exception occurred. \r\n" + e.Message); } finally { Console.WriteLine("\r\nPress Enter to exit."); Console.ReadLine(); } }
static void Main(string[] args) { try { using (CStApiAutoInit api = new CStApiAutoInit()) using (CStSystem system = new CStSystem(eStSystemVendor.Sentech)) using (CStDevice device = system.CreateFirstStDevice()) using (CStImageDisplayWnd wnd = new CStImageDisplayWnd()) using (CStDataStream dataStream = device.CreateStDataStream(0)) { Console.WriteLine("Device=" + device.GetIStDeviceInfo().DisplayName); // ============================================================================================================== // Demostration of changing exposure time(us) of camera. // Create NodeMap pointer for accessing parameters INodeMap nodeMap = device.GetRemoteIStPort().GetINodeMap(); // Switch off exposure auto. If exposure auto is on, exposure time cannot be set. IEnum enumExposureAuto = nodeMap.GetNode <IEnum>("ExposureAuto"); enumExposureAuto.FromString("Off"); // For setting camera exposure time, exposure mode must set to Timed to enable value input IEnum enumExpoMode = nodeMap.GetNode <IEnum>("ExposureMode"); enumExpoMode.FromString("Timed"); // Get Node for ExposureTime IFloat floatExpoTime = nodeMap.GetNode <IFloat>("ExposureTime"); // Set Exposure time to 100,000 usec floatExpoTime.Value = 100000; // ============================================================================================================== dataStream.StartAcquisition(nCountOfImagesToGrab); device.AcquisitionStart(); while (dataStream.IsGrabbing) { using (CStStreamBuffer streamBuffer = dataStream.RetrieveBuffer(5000)) { if (streamBuffer.GetIStStreamBufferInfo().IsImagePresent) { IStImage stImage = streamBuffer.GetIStImage(); string strText = device.GetIStDeviceInfo().DisplayName + " "; strText += stImage.ImageWidth + " x " + stImage.ImageHeight + " "; strText += string.Format("{0:F2}[fps]", dataStream.CurrentFPS); wnd.SetUserStatusBarText(strText); if (!wnd.IsVisible) { wnd.SetPosition(0, 0, (int)stImage.ImageWidth, (int)stImage.ImageHeight); wnd.Show(eStWindowMode.ModalessOnNewThread); } wnd.RegisterIStImage(stImage); } else { Console.WriteLine("Image data does not exist."); } } } device.AcquisitionStop(); dataStream.StopAcquisition(); } } catch (Exception e) { Console.Error.WriteLine("An exception occurred. \r\n" + e.Message); } finally { Console.WriteLine("\r\nPress Enter to exit."); Console.ReadLine(); } }
// This function queries an interface for its cameras and then prints // out device information. int QueryInterface(IManagedInterface managedInterface) { int result = 0; try { // // Retrieve TL nodemap from interface // // *** NOTES *** // Each interface has a nodemap that can be retrieved in order // to access information about the interface itself, any devices // connected, or addressing information if applicable. // INodeMap nodeMapInterface = managedInterface.GetTLNodeMap(); // // Print interface display name // // *** NOTES *** // Grabbing node information requires first retrieving the node // and then retrieving its information. There are two things to // keep in mind. First, a node is distinguished by type, which // is related to its value's data type. Second, nodes should be // checked for availability and readability/writability prior to // making an attempt to read from or write to them. // IString iInterfaceDisplayName = nodeMapInterface.GetNode <IString>("InterfaceDisplayName"); if (iInterfaceDisplayName != null && iInterfaceDisplayName.IsReadable) { string interfaceDisplayName = iInterfaceDisplayName.Value; Console.WriteLine("{0}", interfaceDisplayName); } else { Console.WriteLine("Interface display name not readable"); } // // Update list of cameras on the interface // // *** NOTES *** // Updating the cameras on each interface is especially important // if there has been any device arrivals or removals since the // last time UpdateCameras() was called. // managedInterface.UpdateCameras(); // // Retrieve list of cameras from the interface // // *** NOTES *** // Camera lists can be retrieved from an interface or the system // object. Camera lists retrieved from an interface, such as this // one, only return cameras attached on that specific interface // while camera lists retrieved from system returns all cameras // on all interfaces. // // *** LATER *** // Camera lists must be cleared manually. This must be done // prior to releasing the system and while the camera list is // still in scope. // List <IManagedCamera> camList = managedInterface.GetCameras(); // Return if no cameras detected if (camList.Count == 0) { Console.WriteLine("\tNo devices detected.\n"); return(0); } // Print device vendor and model name for each camera on the // interface for (int i = 0; i < camList.Count; i++) { // // Select camera // // *** NOTES *** // Each camera is retrieved from a camera list with an index. // If the index is out of range, an exception is thrown. // IManagedCamera cam = camList[i]; // Retrieve TL device nodemap; please see NodeMapInfo_CSharp // example for additional information on TL device nodemaps INodeMap nodeMapTLDevice = cam.GetTLDeviceNodeMap(); Console.Write("\tDevice {0} ", i); // Print device vendor name and device model name IString iDeviceVendorName = nodeMapTLDevice.GetNode <IString>("DeviceVendorName"); if (iDeviceVendorName != null && iDeviceVendorName.IsReadable) { String deviceVendorName = iDeviceVendorName.Value; Console.Write("{0} ", deviceVendorName); } IString iDeviceModelName = nodeMapTLDevice.GetNode <IString>("DeviceModelName"); if (iDeviceModelName != null && iDeviceModelName.IsReadable) { String deviceModelName = iDeviceModelName.Value; Console.WriteLine("{0}\n", deviceModelName); } // Dispose of managed camera cam.Dispose(); // // Clear camera list before losing scope // // *** NOTES *** // If a camera list (or an interface list) is not cleaned up // manually, the system will do so when the system is // released. // camList.Clear(); } } catch (SpinnakerException ex) { Console.WriteLine("Error " + ex.Message); result = -1; } return(result); }
static void Main(string[] args) { try { using (CStApiAutoInit api = new CStApiAutoInit()) using (CStSystem system = new CStSystem(eStSystemVendor.Sentech)) using (CStDevice device = system.CreateFirstStDevice()) using (CStImageDisplayWnd wnd = new CStImageDisplayWnd()) using (CStDataStream dataStream = device.CreateStDataStream(0)) { Console.WriteLine("Device=" + device.GetIStDeviceInfo().DisplayName); // ============================================================================================================== // Demostration of setting ROI to 640 x 480 with offset (100, 200). // Create NodeMap pointer for accessing parameters INodeMap nodeMap = device.GetRemoteIStPort().GetINodeMap(); // Get Node for Width IInteger intWidth = nodeMap.GetNode <IInteger>("Width"); // Set Width to 640 intWidth.Value = 640; // Get Node for Height IInteger intHeight = nodeMap.GetNode <IInteger>("Height"); // Set Height to 480 intHeight.Value = 480; // Get Node for Offset X IInteger intOffsetX = nodeMap.GetNode <IInteger>("OffsetX"); // Set Offset X to 100 intOffsetX.Value = 100; // Get Node for Offset Y IInteger intOffsetY = nodeMap.GetNode <IInteger>("OffsetY"); // Set Offset Y to 200 intOffsetY.Value = 200; // ============================================================================================================== dataStream.StartAcquisition(nCountOfImagesToGrab); device.AcquisitionStart(); while (dataStream.IsGrabbing) { using (CStStreamBuffer streamBuffer = dataStream.RetrieveBuffer(5000)) { if (streamBuffer.GetIStStreamBufferInfo().IsImagePresent) { IStImage stImage = streamBuffer.GetIStImage(); string strText = device.GetIStDeviceInfo().DisplayName + " "; strText += stImage.ImageWidth + " x " + stImage.ImageHeight + " "; strText += string.Format("{0:F2}[fps]", dataStream.CurrentFPS); wnd.SetUserStatusBarText(strText); if (!wnd.IsVisible) { wnd.SetPosition(0, 0, (int)stImage.ImageWidth, (int)stImage.ImageHeight); wnd.Show(eStWindowMode.ModalessOnNewThread); } wnd.RegisterIStImage(stImage); } else { Console.WriteLine("Image data does not exist."); } } } device.AcquisitionStop(); dataStream.StopAcquisition(); } } catch (Exception e) { Console.Error.WriteLine("An exception occurred. \r\n" + e.Message); } finally { Console.WriteLine("\r\nPress Enter to exit."); Console.ReadLine(); } }
static void Main(string[] args) { try { using (CStApiAutoInit api = new CStApiAutoInit()) using (CStSystem system = new CStSystem(eStSystemVendor.Sentech)) using (CStDevice device = system.CreateFirstStDevice()) using (CStImageDisplayWnd wnd = new CStImageDisplayWnd()) using (CStDataStream dataStream = device.CreateStDataStream(0)) { Console.WriteLine("Device=" + device.GetIStDeviceInfo().DisplayName); // ============================================================================================================== // Demostration of changing acquisition frame rate (FPS) // Create NodeMap pointer for accessing parameters INodeMap nodeMap = device.GetRemoteIStPort().GetINodeMap(); // Switch off exposure auto. If exposure auto is on, FPS setting may not able to implemented. IEnum enumExposureAuto = nodeMap.GetNode <IEnum>("ExposureAuto"); enumExposureAuto.FromString("Off"); // Also switch Exposure Mode to Off for not letting exposure time to influence the actual FPS. IEnum enumExposureMode = nodeMap.GetNode <IEnum>("ExposureMode"); enumExposureMode.FromString("Off"); // Get Node for Acquisition Frame Rate then set FPS to 2. IFloat floatFPS = nodeMap.GetNode <IFloat>("AcquisitionFrameRate"); floatFPS.SetValue(2, false); // ============================================================================================================== dataStream.StartAcquisition(nCountOfImagesToGrab); device.AcquisitionStart(); while (dataStream.IsGrabbing) { using (CStStreamBuffer streamBuffer = dataStream.RetrieveBuffer(5000)) { if (streamBuffer.GetIStStreamBufferInfo().IsImagePresent) { IStImage stImage = streamBuffer.GetIStImage(); string strText = device.GetIStDeviceInfo().DisplayName + " "; strText += stImage.ImageWidth + " x " + stImage.ImageHeight + " "; strText += string.Format("{0:F2}[fps]", dataStream.CurrentFPS); wnd.SetUserStatusBarText(strText); if (!wnd.IsVisible) { wnd.SetPosition(0, 0, (int)stImage.ImageWidth, (int)stImage.ImageHeight); wnd.Show(eStWindowMode.ModalessOnNewThread); } wnd.RegisterIStImage(stImage); } else { Console.WriteLine("Image data does not exist."); } } } device.AcquisitionStop(); dataStream.StopAcquisition(); } } catch (Exception e) { Console.Error.WriteLine("An exception occurred. \r\n" + e.Message); } finally { Console.WriteLine("\r\nPress Enter to exit."); Console.ReadLine(); } }
static void Main(string[] args) { try { using (CStApiAutoInit api = new CStApiAutoInit()) using (CStSystem system = new CStSystem(eStSystemVendor.Sentech)) using (CStDevice device = system.CreateFirstStDevice()) using (CStImageDisplayWnd wnd = new CStImageDisplayWnd()) using (CStDataStream dataStream = device.CreateStDataStream(0)) { Console.WriteLine("Device=" + device.GetIStDeviceInfo().DisplayName); // ============================================================================================================== // Demostration of set digital Gain to 2 times. // Create NodeMap pointer for accessing parameters INodeMap nodeMap = device.GetRemoteIStPort().GetINodeMap(); // Switch off gain auto. If gain auto is on, gain value cannot be set. IEnum enumGainAuto = nodeMap.GetNode <IEnum>("GainAuto"); enumGainAuto.FromString("Off"); // For setting digital gain, gain selector need to be set to DigitalAll to access digital gain. IEnum enumGainSelector = nodeMap.GetNode <IEnum>("GainSelector"); enumGainSelector.FromString("DigitalAll"); // Get Node for Gain IFloat floatGain = nodeMap.GetNode <IFloat>("Gain"); // Set digital gain to 2 times(128). floatGain.Value = 128; // ============================================================================================================== dataStream.StartAcquisition(nCountOfImagesToGrab); device.AcquisitionStart(); while (dataStream.IsGrabbing) { using (CStStreamBuffer streamBuffer = dataStream.RetrieveBuffer(5000)) { if (streamBuffer.GetIStStreamBufferInfo().IsImagePresent) { IStImage stImage = streamBuffer.GetIStImage(); string strText = device.GetIStDeviceInfo().DisplayName + " "; strText += stImage.ImageWidth + " x " + stImage.ImageHeight + " "; strText += string.Format("{0:F2}[fps]", dataStream.CurrentFPS); wnd.SetUserStatusBarText(strText); if (!wnd.IsVisible) { wnd.SetPosition(0, 0, (int)stImage.ImageWidth, (int)stImage.ImageHeight); wnd.Show(eStWindowMode.ModalessOnNewThread); } wnd.RegisterIStImage(stImage); } else { Console.WriteLine("Image data does not exist."); } } } device.AcquisitionStop(); dataStream.StopAcquisition(); } } catch (Exception e) { Console.Error.WriteLine("An exception occurred. \r\n" + e.Message); } finally { Console.WriteLine("\r\nPress Enter to exit."); Console.ReadLine(); } }
static void Main(string[] args) { try { using (CStApiAutoInit api = new CStApiAutoInit()) using (CStSystem system = new CStSystem(eStSystemVendor.Sentech)) using (CStDevice device = system.CreateFirstStDevice()) using (CStImageDisplayWnd wnd = new CStImageDisplayWnd()) using (CStDataStream dataStream = device.CreateStDataStream(0)) { Console.WriteLine("Device=" + device.GetIStDeviceInfo().DisplayName); // ============================================================================================================== // Demostration of Setting Hardware Trigger ON with active high // Create NodeMap pointer for accessing parameters INodeMap nodeMap = device.GetRemoteIStPort().GetINodeMap(); // Switch BalanceWhiteAuto to Preset0 for manual input value IEnum enumBalanceWhiteAuto = nodeMap.GetNode <IEnum>("BalanceWhiteAuto"); enumBalanceWhiteAuto.FromString("Preset0"); // Switch balance ration selector to Red for access Red ratio value IEnum enumBalanceRatioSelector = nodeMap.GetNode <IEnum>("BalanceRatioSelector"); enumBalanceRatioSelector.FromString("Red"); // Get Node for BalanceRatio IFloat floatBalanceRatio = nodeMap.GetNode <IFloat>("BalanceRatio"); // Set BalanceRatio to 10 floatBalanceRatio.Value = 10; // Switch balance ration selector to Blue for access Blue ratio value enumBalanceRatioSelector.FromString("Blue"); // Set BalanceRatio to 10 floatBalanceRatio.Value = 10; // Switch balance ration selector to Green for access Green ratio value enumBalanceRatioSelector.FromString("Green"); // Set BalanceRatio to 10 floatBalanceRatio.Value = 10; // ============================================================================================================== dataStream.StartAcquisition(nCountOfImagesToGrab); device.AcquisitionStart(); while (dataStream.IsGrabbing) { using (CStStreamBuffer streamBuffer = dataStream.RetrieveBuffer(5000)) { if (streamBuffer.GetIStStreamBufferInfo().IsImagePresent) { IStImage stImage = streamBuffer.GetIStImage(); string strText = device.GetIStDeviceInfo().DisplayName + " "; strText += stImage.ImageWidth + " x " + stImage.ImageHeight + " "; strText += string.Format("{0:F2}[fps]", dataStream.CurrentFPS); wnd.SetUserStatusBarText(strText); if (!wnd.IsVisible) { wnd.SetPosition(0, 0, (int)stImage.ImageWidth, (int)stImage.ImageHeight); wnd.Show(eStWindowMode.ModalessOnNewThread); } wnd.RegisterIStImage(stImage); } else { Console.WriteLine("Image data does not exist."); } } } device.AcquisitionStop(); dataStream.StopAcquisition(); } } catch (Exception e) { Console.Error.WriteLine("An exception occurred. \r\n" + e.Message); } finally { Console.WriteLine("\r\nPress Enter to exit."); Console.ReadLine(); } }
static void Main(string[] args) { try { using (CStApiAutoInit api = new CStApiAutoInit()) using (CStSystem system = new CStSystem(eStSystemVendor.Sentech)) using (CStDevice device = system.CreateFirstStDevice()) using (CStImageDisplayWnd wnd = new CStImageDisplayWnd()) using (CStDataStream dataStream = device.CreateStDataStream(0)) { Console.WriteLine("Device=" + device.GetIStDeviceInfo().DisplayName); // ============================================================================================================== // Demostration of Setting Auto Gain Control with dedicated range. // Create NodeMap pointer for accessing parameters INodeMap nodeMap = device.GetRemoteIStPort().GetINodeMap(); // Switch on Gain Auto(IEnumeration). IEnum enumGainAuto = nodeMap.GetNode <IEnum>("GainAuto"); enumGainAuto.FromString("Continuous"); // Get Node for Auto Luminance Target(IInteger) IInteger intAutoLuminTgt = nodeMap.GetNode <IInteger>("AutoLuminanceTarget"); // Set Auto Luminance Target to 128 intAutoLuminTgt.Value = 128; // For setting analog gain, gain selector need to be set to AnalogAll to access analog gain. IEnum enumGainSelector = nodeMap.GetNode <IEnum>("GainSelector"); enumGainSelector.FromString("AnalogAll"); // Get Node for GainAutoLimitMin(IFloat). IFloat floatGainAutoMin = nodeMap.GetNode <IFloat>("GainAutoLimitMin"); // Set Auto Gain Min to 0 dB (0). floatGainAutoMin.Value = 20; // Get Node for GainAutoLimitMax(IFloat). IFloat floatGainAutoMax = nodeMap.GetNode <IFloat>("GainAutoLimitMax"); // Set Auto Gain Max to 10 dB (100). floatGainAutoMax.Value = 100; // ============================================================================================================== dataStream.StartAcquisition(nCountOfImagesToGrab); device.AcquisitionStart(); while (dataStream.IsGrabbing) { using (CStStreamBuffer streamBuffer = dataStream.RetrieveBuffer(5000)) { if (streamBuffer.GetIStStreamBufferInfo().IsImagePresent) { IStImage stImage = streamBuffer.GetIStImage(); string strText = device.GetIStDeviceInfo().DisplayName + " "; strText += stImage.ImageWidth + " x " + stImage.ImageHeight + " "; strText += string.Format("{0:F2}[fps]", dataStream.CurrentFPS); wnd.SetUserStatusBarText(strText); if (!wnd.IsVisible) { wnd.SetPosition(0, 0, (int)stImage.ImageWidth, (int)stImage.ImageHeight); wnd.Show(eStWindowMode.ModalessOnNewThread); } wnd.RegisterIStImage(stImage); } else { Console.WriteLine("Image data does not exist."); } } } device.AcquisitionStop(); dataStream.StopAcquisition(); } } catch (Exception e) { Console.Error.WriteLine("An exception occurred. \r\n" + e.Message); } finally { Console.WriteLine("\r\nPress Enter to exit."); Console.ReadLine(); } }
/// <summary> /// AcquisitionMode: Continuous/SingleFrame/MutipleFrame /// </summary> /// <param name="strMode"></param> public override bool SetAcquisitionMode(string strMode) { bool ret = false; try { IEnum iAcquisitionMode = m_NodeMap.GetNode <IEnum>("AcquisitionMode"); if (iAcquisitionMode == null || !iAcquisitionMode.IsWritable) { return(ret); } IEnumEntry iAcquisitionModeContinuous; switch (strMode) { case "Continuous": // Retrieve entry node from enumeration node iAcquisitionModeContinuous = iAcquisitionMode.GetEntryByName("Continuous"); if (iAcquisitionModeContinuous == null || !iAcquisitionMode.IsReadable) { return(ret); } break; case "SingleFrame": iAcquisitionModeContinuous = iAcquisitionMode.GetEntryByName("SingleFrame"); if (iAcquisitionModeContinuous == null || !iAcquisitionMode.IsReadable) { return(ret); } break; case "MutipleFrame": iAcquisitionModeContinuous = iAcquisitionMode.GetEntryByName("MutipleFrame"); if (iAcquisitionModeContinuous == null || !iAcquisitionMode.IsReadable) { return(ret); } break; default: return(ret); } // Set symbolic from entry node as new value for enumeration node iAcquisitionMode.Value = iAcquisitionModeContinuous.Symbolic; ret = true; } catch (Exception ex) { LogHelper.AppLoger.Error(ex); } return(ret); }
static void Main(string[] args) { try { using (CStApiAutoInit api = new CStApiAutoInit()) using (CStSystem system = new CStSystem(eStSystemVendor.Sentech)) using (CStDevice device = system.CreateFirstStDevice()) using (CStImageDisplayWnd wnd = new CStImageDisplayWnd()) using (CStDataStream dataStream = device.CreateStDataStream(0)) { Console.WriteLine("Device=" + device.GetIStDeviceInfo().DisplayName); // ============================================================================================================== // Demostration of Setting Software Trigger ON // Create NodeMap pointer for accessing parameters INodeMap nodeMap = device.GetRemoteIStPort().GetINodeMap(); // Switch on Trigger Mode(IEnumeration). IEnum enumTriggerMode = nodeMap.GetNode <IEnum>("TriggerMode"); enumTriggerMode.FromString("On"); // Set Trigger Source to Software IEnum enumTriggerSource = nodeMap.GetNode <IEnum>("TriggerSource"); enumTriggerSource.FromString("Software"); // Prepear Software Trigger Command for later calling ICommand cmdTriggerSoftware = nodeMap.GetNode <ICommand>("TriggerSoftware"); // ============================================================================================================== dataStream.StartAcquisition(nCountOfImagesToGrab); device.AcquisitionStart(); while (dataStream.IsGrabbing) { // =============================================================================== // Demostration sending software trigger cmdTriggerSoftware.Execute(); Console.WriteLine("Software Trigger Sent."); // =============================================================================== using (CStStreamBuffer streamBuffer = dataStream.RetrieveBuffer(5000)) { if (streamBuffer.GetIStStreamBufferInfo().IsImagePresent) { IStImage stImage = streamBuffer.GetIStImage(); string strText = device.GetIStDeviceInfo().DisplayName + " "; strText += stImage.ImageWidth + " x " + stImage.ImageHeight + " "; strText += string.Format("{0:F2}[fps]", dataStream.CurrentFPS); wnd.SetUserStatusBarText(strText); if (!wnd.IsVisible) { wnd.SetPosition(0, 0, (int)stImage.ImageWidth, (int)stImage.ImageHeight); wnd.Show(eStWindowMode.ModalessOnNewThread); } wnd.RegisterIStImage(stImage); } else { Console.WriteLine("Image data does not exist."); } } } device.AcquisitionStop(); dataStream.StopAcquisition(); // ============================================================================================================== // Set Software Trigger OFF after using // Switch off Trigger Mode(IEnumeration) after acquiring. enumTriggerMode.FromString("Off"); // ============================================================================================================== } } catch (Exception e) { Console.Error.WriteLine("An exception occurred. \r\n" + e.Message); } finally { Console.WriteLine("\r\nPress Enter to exit."); Console.ReadLine(); } }
static void Main(string[] args) { try { using (CStApiAutoInit api = new CStApiAutoInit()) using (CStSystem system = new CStSystem(eStSystemVendor.Sentech)) using (CStDevice device = system.CreateFirstStDevice()) using (CStImageDisplayWnd wnd = new CStImageDisplayWnd()) using (CStDataStream dataStream = device.CreateStDataStream(0)) { Console.WriteLine("Device=" + device.GetIStDeviceInfo().DisplayName); // ============================================================================================================== // Demostration of PixelFormat change. // Create NodeMap pointer for accessing parameters INodeMap nodeMap = device.GetRemoteIStPort().GetINodeMap(); // Set Pixel Format to Mono8. IEnum enumPixelFormat = nodeMap.GetNode <IEnum>("PixelFormat"); enumPixelFormat.FromString("Mono8"); // ============================================================================================================== dataStream.StartAcquisition(nCountOfImagesToGrab); device.AcquisitionStart(); while (dataStream.IsGrabbing) { using (CStStreamBuffer streamBuffer = dataStream.RetrieveBuffer(5000)) { if (streamBuffer.GetIStStreamBufferInfo().IsImagePresent) { IStImage stImage = streamBuffer.GetIStImage(); string strText = device.GetIStDeviceInfo().DisplayName + " "; strText += stImage.ImageWidth + " x " + stImage.ImageHeight + " "; strText += string.Format("{0:F2}[fps]", dataStream.CurrentFPS); wnd.SetUserStatusBarText(strText); if (!wnd.IsVisible) { wnd.SetPosition(0, 0, (int)stImage.ImageWidth, (int)stImage.ImageHeight); wnd.Show(eStWindowMode.ModalessOnNewThread); } wnd.RegisterIStImage(stImage); } else { Console.WriteLine("Image data does not exist."); } } } device.AcquisitionStop(); dataStream.StopAcquisition(); } } catch (Exception e) { Console.Error.WriteLine("An exception occurred. \r\n" + e.Message); } finally { Console.WriteLine("\r\nPress Enter to exit."); Console.ReadLine(); } }
// Code below is directly copied from example_acquisition // This function acquires and saves 10 images from a device. public int AcquireImages(IManagedCamera cam, INodeMap nodeMap, INodeMap nodeMapTLDevice) { int result = 0; writeLog(String.Format("\n*** IMAGE ACQUISITION ***\n\n")); try { // // Set acquisition mode to continuous // // *** NOTES *** // Because the example acquires and saves 10 images, setting // acquisition mode to continuous lets the example finish. If // set to single frame or multiframe (at a lower number of // images), the example would just hang. This is because the // example has been written to acquire 10 images while the // camera would have been programmed to retrieve less than that. // // Setting the value of an enumeration node is slightly more // complicated than other node types. Two nodes are required: // first, the enumeration node is retrieved from the nodemap and // second, the entry node is retrieved from the enumeration node. // The symbolic of the entry node is then set as the new value // of the enumeration node. // // Notice that both the enumeration and entry nodes are checked // for availability and readability/writability. Enumeration // nodes are generally readable and writable whereas entry // nodes are only ever readable. // // Retrieve enumeration node from nodemap IEnum iAcquisitionMode = nodeMap.GetNode <IEnum>("AcquisitionMode"); if (iAcquisitionMode == null || !iAcquisitionMode.IsWritable) { writeLog(String.Format( "Unable to set acquisition mode to continuous (node retrieval). Aborting...\n\n")); return(-1); } // Retrieve entry node from enumeration node IEnumEntry iAcquisitionModeContinuous = iAcquisitionMode.GetEntryByName("Continuous"); if (iAcquisitionModeContinuous == null || !iAcquisitionMode.IsReadable) { writeLog(String.Format( "Unable to set acquisition mode to continuous (enum entry retrieval). Aborting...\n\n")); return(-1); } // Set symbolic from entry node as new value for enumeration node iAcquisitionMode.Value = iAcquisitionModeContinuous.Symbolic; writeLog(String.Format("Acquisition mode set to continuous...\n")); // // Begin acquiring images // // *** NOTES *** // What happens when the camera begins acquiring images depends // on which acquisition mode has been set. Single frame captures // only a single image, multi frame catures a set number of // images, and continuous captures a continuous stream of images. // Because the example calls for the retrieval of 10 images, // continuous mode has been set for the example. // // *** LATER *** // Image acquisition must be ended when no more images are needed. // cam.BeginAcquisition(); writeLog(String.Format("Acquiring images...\n")); // // Retrieve device serial number for filename // // *** NOTES *** // The device serial number is retrieved in order to keep // different cameras from overwriting each other's images. // Grabbing image IDs and frame IDs make good alternatives for // this purpose. // String deviceSerialNumber = ""; IString iDeviceSerialNumber = nodeMapTLDevice.GetNode <IString>("DeviceSerialNumber"); if (iDeviceSerialNumber != null && iDeviceSerialNumber.IsReadable) { deviceSerialNumber = iDeviceSerialNumber.Value; writeLog(String.Format( "Device serial number retrieved as {0}...\n", deviceSerialNumber)); } writeLog(String.Format("\n")); // Retrieve, convert, and save images const int NumImages = 10; for (int imageCnt = 0; imageCnt < NumImages; imageCnt++) { try { // // Retrieve next received image // // *** NOTES *** // Capturing an image houses images on the camera buffer. // Trying to capture an image that does not exist will // hang the camera. // // Using-statements help ensure that images are released. // If too many images remain unreleased, the buffer will // fill, causing the camera to hang. Images can also be // released manually by calling Release(). // using (IManagedImage rawImage = cam.GetNextImage()) { // // Ensure image completion // // *** NOTES *** // Images can easily be checked for completion. This // should be done whenever a complete image is // expected or required. Alternatively, check image // status for a little more insight into what // happened. // if (rawImage.IsIncomplete) { writeLog(String.Format( "Image incomplete with image status {0}...\n", rawImage.ImageStatus)); } else { // // Print image information; width and height // recorded in pixels // // *** NOTES *** // Images have quite a bit of available metadata // including CRC, image status, and offset // values to name a few. // uint width = rawImage.Width; uint height = rawImage.Height; writeLog(String.Format( "Grabbed image {0}, width = {1}, height = {1}\n", imageCnt, width, height)); writeLog(String.Format( "Pixel format is {0}\n", rawImage.PixelFormatName)); // // Convert image to mono 8 // // *** NOTES *** // Images can be converted between pixel formats // by using the appropriate enumeration value. // Unlike the original image, the converted one // does not need to be released as it does not // affect the camera buffer. // // Using statements are a great way to ensure code // stays clean and avoids memory leaks. // leaks. // using (IManagedImage convertedImage = rawImage.Convert(PixelFormatEnums.Mono8)) { // Create a unique filename String filename = "Acquisition-CSharp-"; if (deviceSerialNumber != "") { filename = filename + deviceSerialNumber + "-"; } filename = filename + imageCnt + ".jpg"; // // Save image // // *** NOTES *** // The standard practice of the examples is // to use device serial numbers to keep // images of one device from overwriting // those of another. // convertedImage.Save(filename); writeLog(String.Format("Image saved at {0}\n\n", filename)); } } } } catch (SpinnakerException ex) { writeLog(String.Format("Error: {0}\n", ex.Message)); result = -1; } } // // End acquisition // // *** NOTES *** // Ending acquisition appropriately helps ensure that devices // clean up properly and do not need to be power-cycled to // maintain integrity. // cam.EndAcquisition(); } catch (SpinnakerException ex) { writeLog(String.Format("Error: {0}\n", ex.Message)); result = -1; } return(result); }
//void SetCameraVideoModeAndFrameRate(VideoMode newVideoMode, FrameRate newFrameRate) //{ // bool restartCapture = true; // try // { // camera.StopCapture(); // } // catch (FC2Exception ex) // { // if (ex.Type != ErrorType.IsochNotStarted) // { // throw; // } // else // restartCapture = false; // } // try // { // camera.SetVideoModeAndFrameRate(newVideoMode, newFrameRate); // } // catch (FC2Exception /*ex*/) // { // throw; // } // if (restartCapture) // { // camera.StartCapture(); // } //} //void SetAbsolutePropertyValue(PropertyType property, float newValue) //{ // CameraProperty camProp = camera.GetProperty(property); // CameraPropertyInfo propInfo = camera.GetPropertyInfo(property); // if (!camProp.autoManualMode && propInfo.manualSupported && propInfo.absValSupported) // { // float difference = camProp.absValue - newValue; // if (difference != 0) // { // // The brightness abs register sometimes starts drifting // // due to a rounding error between the camera and the // // actual value being held by the adjustment. To prevent // // this, only apply the change to the camera if the // // difference is greater than a specified amount. // // Check if the difference is greater than 0.005f. // if (property != PropertyType.Brightness || // Math.Abs(difference) > 0.005f) // { // camProp.absControl = true; // camProp.absValue = newValue; // camera.SetProperty(camProp); // } // } // } // else // { // throw new ApplicationException("Trying to set a property that cannot be adjusted"); // } //} public void SetAbsolutePropertyValue(string property, string newValue) { try { if (property == "Hue") { IFloat hue = nodeMap.GetNode <IFloat>("Hue"); hue.Value = Convert.ToDouble(newValue); } else if (property == "Gamma") { IFloat gamma = nodeMap.GetNode <IFloat>("Gamma"); gamma.Value = Convert.ToDouble(newValue); } else if (property == "Width") { IInteger width = nodeMap.GetNode <IInteger>("Width"); width.Value = Convert.ToInt32(newValue); } else if (property == "Height") { IInteger height = nodeMap.GetNode <IInteger>("Height"); height.Value = Convert.ToInt32(newValue); } else if (property == "Gain") { IEnum gainAuto = nodeMap.GetNode <IEnum>("GainAuto"); gainAuto.Value = "Off"; IFloat gainValue = nodeMap.GetNode <IFloat>("Gain"); gainValue.Value = Convert.ToDouble(newValue); } else if (property == "Saturation") { IEnum saturationAuto = nodeMap.GetNode <IEnum>("SaturationAuto"); saturationAuto.Value = "Off"; IFloat saturationValue = nodeMap.GetNode <IFloat>("Saturation"); saturationValue.Value = Convert.ToDouble(newValue); } else if (property == "Binning") { IInteger binningValue = nodeMap.GetNode <IInteger>("BinningVertical"); binningValue.Value = Convert.ToInt32(newValue); } else if (property == "FrameRate") { IEnum frameRateAuto = nodeMap.GetNode <IEnum>("AcquisitionFrameRateAuto"); frameRateAuto.Value = "Off"; IFloat frameRateValue = nodeMap.GetNode <IFloat>("AcquisitionFrameRate"); frameRateValue.Value = Convert.ToDouble(newValue); } else if (property == "PixelFormat") { IEnum pixelFormat = nodeMap.GetNode <IEnum>("PixelFormat"); IEnumEntry pixelFormatItem = pixelFormat.GetEntryByName(newValue); if (pixelFormatItem?.IsReadable == true) { pixelFormat.Value = pixelFormatItem.Symbolic; } } else if (property == "VideoMode") { IEnum acquisitionMode = nodeMap.GetNode <IEnum>("AcquisitionMode"); if (acquisitionMode?.IsWritable == true) { IEnumEntry acquisitionModeItem = acquisitionMode.GetEntryByName(newValue); if (acquisitionModeItem?.IsReadable == true) { acquisitionMode.Value = acquisitionModeItem.Symbolic; } else { Debug.WriteLine("Error: SetAbsolutePropertyValue for " + property); } } else { Debug.WriteLine("Error: SetAbsolutePropertyValue for " + property); } } else if (property == "ShutterMode") { IEnum exposureMode = nodeMap.GetNode <IEnum>("ExposureMode"); if (exposureMode?.IsWritable == true) { IEnumEntry exposureModeItem = exposureMode.GetEntryByName(newValue); if (exposureModeItem?.IsReadable == true) { exposureMode.Value = exposureModeItem.Symbolic; } else { Debug.WriteLine("Error: SetAbsolutePropertyValue for " + property); } } else { Debug.WriteLine("Error: SetAbsolutePropertyValue for " + property); } } else if (property == "StreamBufferMode") { INodeMap nodeMapStream = camera.GetTLStreamNodeMap(); IEnum bufferMode = nodeMapStream.GetNode <IEnum>("StreamBufferHandlingMode"); if (bufferMode?.IsWritable == true) { IEnumEntry bufferModeItem = bufferMode.GetEntryByName(newValue); if (bufferModeItem?.IsReadable == true) { bufferMode.Value = bufferModeItem.Symbolic; } else { Debug.WriteLine("Error: SetAbsolutePropertyValue for " + property); } } else { Debug.WriteLine("Error: SetAbsolutePropertyValue for " + property); } } else if (property == "ExposureCompensation") { IFloat expoCompensation = nodeMap.GetNode <IFloat>("pgrExposureCompensation"); expoCompensation.Value = Convert.ToDouble(newValue); } else { Debug.WriteLine("Error: SetAbsolutePropertyValue for " + property + " not implemented."); } } catch (SpinnakerException e) { Debug.WriteLine("Error: SetAbsolutePropertyValue for " + property + " exceptoin: " + e.Message); } }
static void Main(string[] args) { try { using (CStApiAutoInit api = new CStApiAutoInit()) using (CStSystem system = new CStSystem(eStSystemVendor.Sentech)) using (CStDevice device = system.CreateFirstStDevice()) using (CStImageDisplayWnd wnd = new CStImageDisplayWnd()) using (CStDataStream dataStream = device.CreateStDataStream(0)) { Console.WriteLine("Device=" + device.GetIStDeviceInfo().DisplayName); // ============================================================================================================== // Saving current setting to UserSet1 of camera, with setting it as default when camera power on. // Please notice the UserSet saving can be only avaliable when camera is not in acquiring. // Create NodeMap pointer for accessing parameters INodeMap nodeMap = device.GetRemoteIStPort().GetINodeMap(); // Select which UserSet to save the setting (UserSet1) IEnum enumUserSetSelector = nodeMap.GetNode <IEnum>("UserSetSelector"); enumUserSetSelector.FromString("UserSet1"); // Acquire and execute saving setting to UserSet ICommand cmdSaveToUserSet = nodeMap.GetNode <ICommand>("UserSetSave"); cmdSaveToUserSet.Execute(); Console.WriteLine("Save Current setting to UserSet1 succeed."); // Set UserSetDefault to UsetSet1 for using this setting when camera power on. IEnum enumUserSetDefault = nodeMap.GetNode <IEnum>("UserSetDefault"); enumUserSetDefault.FromString("UserSet1"); Console.WriteLine("Set UserSetDefault to UserSet1 succeed."); // ============================================================================================================== } } catch (Exception e) { Console.Error.WriteLine("An exception occurred. \r\n" + e.Message); } finally { Console.WriteLine("\r\nPress Enter to exit."); Console.ReadLine(); } }
// This function prints the device information of the camera from the // transport layer; please see NodeMapInfo_CSharp example for more // in-depth comments on printing device information from the nodemap. public int PrintDeviceInfo(INodeMap nodeMap) { int result = 0; try { writeLog(String.Format("\n*** DEVICE INFORMATION ***\n")); ICategory category = nodeMap.GetNode <ICategory>("DeviceInformation"); if (category != null && category.IsReadable) { for (int i = 0; i < category.Children.Length; i++) { writeLog(String.Format( "{0}: {1}\n", category.Children[i].Name, (category.Children[i].IsReadable ? category.Children[i].ToString() : "Node not available"))); } writeLog(String.Format("\n")); } else { writeLog(String.Format("Device control information not available.\n")); } } catch (SpinnakerException ex) { writeLog(String.Format("Error: {0}\n", ex.Message)); result = -1; } return(result); }
/// <summary>Throws Spinnaker Exception</summary> /// <returns>The property node.</returns> protected bool GetNode(ref NodeType newNode) { try { if (camera == null) { throw new SpinnakerException("Camera is null."); } INodeMap map = camera.GetNodeMap(); if (map == null) { throw new SpinnakerException("Could not retrieve node map."); } newNode = map.GetNode <NodeType>(NodeName); if (newNode == null) { return(false); } return(true); } catch (SpinnakerException ex) { Console.Error.WriteLine("Unable to retrieve node: " + ex.Message); return(false); } }