public static CameraResults InitializeCamera() { Camera = new uEye.Camera(); CameraStatus = new CameraResults(); statusRet = Camera.Init(1); if (statusRet != uEye.Defines.Status.Success) { CameraStatus.ErrorCode = "0x01"; CameraStatus.Status = "Error"; CameraStatus.ErrorDescription = "Can not be initialized uEye camera. Please check the camera connection, and try again!"; return(CameraStatus); } statusRet = Camera.IO.Pwm.SetMode(0); statusRet = Camera.Memory.Allocate(); if (statusRet != uEye.Defines.Status.Success) { CameraStatus.ErrorCode = "0x02"; CameraStatus.Status = "Error"; CameraStatus.ErrorDescription = "Can not be allocate memory to uEye camera. Please check the camera connection, and try again!"; return(CameraStatus); } else { CameraStatus.ErrorCode = ""; CameraStatus.Status = "Initialized"; CameraStatus.ErrorDescription = ""; return(CameraStatus); } }
public SnapshotRetriever(CameraSummary summary, long deviceId) { this.summary = summary; camera.EventFrame += camera_EventFrame; camera.EventDeviceRemove += camera_EventDeviceRemove; camera.EventDeviceUnPlugged += camera_EventDeviceUnPlugged; try { uEye.Defines.Status status = camera.Init((Int32)deviceId | (Int32)uEye.Defines.DeviceEnumeration.UseDeviceID); if (status != uEye.Defines.Status.SUCCESS) { log.ErrorFormat("Camera {0} could not be opened for thumbnail capture.", summary.Alias); return; } // We do not load the camera-specific profile for the thumbnail at the moment. // For some reason the .ToBitmap method doesn't work well on the RGB32 format, so in order to at least have something we // load the camera on the default profile for the thumbnail. //ProfileHelper.Load(camera, ProfileHelper.GetProfileFilename(summary.Identifier)); status = camera.Memory.Allocate(); if (status != uEye.Defines.Status.SUCCESS) { log.ErrorFormat("Camera {0} could not have its buffer allocated for thumbnail capture.", summary.Alias); return; } } catch (Exception e) { LogError(e, ""); } }
//Function to initialize camera private uEye.Defines.Status CameraInit(Int32 camID) { uEye.Defines.Status statusRet = uEye.Defines.Status.NO_SUCCESS; statusRet = m_Camera.Init(camID | (Int32)uEye.Defines.DeviceEnumeration.UseDeviceID); //check if camera initialized well and there is enough memory if (statusRet != uEye.Defines.Status.SUCCESS) { MessageBox.Show("Initializing the Camera Failed"); return(statusRet); } statusRet = m_Camera.Memory.Allocate(); if (statusRet != uEye.Defines.Status.SUCCESS) { MessageBox.Show("Allocating Memory Failed"); return(statusRet); } //if we have to move the mirrors if (pulsePal != null) { //move mirrors when new frame is captured: this triggering serves //the porpuse of not moving the mirrors during frame acquisition m_Camera.EventFirstPacket += TriggerMirror; } //reset the frame count m_s32FrameCoutTotal = 0; m_Camera.Video.ResetCount(); return(statusRet); }
/// <summary> /// 初始化相机 /// </summary> /// <returns></returns> public override bool Init() { camera = new uEye.Camera(); // Open Camera statusRet = camera.Init(); if (statusRet != uEye.Defines.Status.Success) { SetError("Camera initializing failed"); return(false); } // Allocate Memory statusRet = camera.Memory.Allocate(); if (statusRet != uEye.Defines.Status.Success) { SetError("Allocate Memory failed"); return(false); } // start capture if (uEye.Defines.Status.Success == camera.Acquisition.Capture()) { return(true); } else { SetError("Capture live show from camera failed!"); return(false); } }
private void InitCamera(int deviceId = -1) { _camera = new uEye.Camera(); // Open Camera uEye.Defines.Status stat = uEye.Defines.Status.NoSuccess; if (deviceId != -1) { stat = _camera.Init(deviceId | (Int32)uEye.Defines.DeviceEnumeration.UseDeviceID); } else { stat = _camera.Init(); } if (stat != uEye.Defines.Status.Success) { throw new Exception("Camera initializing failed"); } // max-min exposure this._camera.Timing.Exposure.GetRange(out _minExp, out _maxExp, out _incExp); // Default this.MasterGain = 100; // Trigger Mode this._camera.Trigger.Set(uEye.Defines.TriggerMode.Software); this._camera.PixelFormat.Set(uEye.Defines.ColorMode.Mono8); // Allocate Memory stat = _camera.Memory.Allocate(); if (stat != uEye.Defines.Status.Success) { throw new Exception("Allocate Memory failed"); } // Connect Event if (this._displayHandle != IntPtr.Zero) { _camera.EventFrame += onFrameEvent; } }
// FUNCTION: InitCamera: Initialize the Camera // // private void InitCamera() { cam = new uEye.Camera(); uEye.Defines.Status statusRet = 0; cam.Size.ImageFormat.Set(7); // Open Camera statusRet = cam.Init(); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Camera initializing failed"); Environment.Exit(-1); } // Set Colour Mode uEye.Types.SensorInfo SensorInfo; statusRet = cam.Information.GetSensorInfo(out SensorInfo); if (SensorInfo.SensorColorMode == uEye.Defines.SensorColorMode.Bayer) { statusRet = cam.PixelFormat.Set(uEye.Defines.ColorMode.BGR8Packed); } else { statusRet = cam.PixelFormat.Set(uEye.Defines.ColorMode.Mono8); MessageBox.Show("Black and white?!"); Environment.Exit(-1); } // Allocate Memory statusRet = cam.Memory.Allocate(); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Allocate Memory failed"); Environment.Exit(-1); } SetCameraParameters(); // Start Live Video statusRet = cam.Acquisition.Capture(); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Start Live Video failed"); Environment.Exit(-1); } // Connect video to window cam.EventFrame += onFrameEvent; }
public void InitCamera() { Camera = new uEye.Camera(); uEye.Defines.Status statusRet = 0; // Open Camera statusRet = Camera.Init(); if (statusRet != uEye.Defines.Status.Success) { //MessageBox.Show("Camera initializing failed"); Environment.Exit(-1); } Camera.Trigger.Set(uEye.Defines.TriggerMode.Software); uEye.Types.ImageFormatInfo[] FormatInfoList; Camera.Size.ImageFormat.GetList(out FormatInfoList); int count = FormatInfoList.Count(); Camera.Size.ImageFormat.Set((uint)FormatInfoList[10].FormatID); // Allocate Memory statusRet = Camera.Memory.Allocate(); if (statusRet != uEye.Defines.Status.Success) { // MessageBox.Show("Allocate Memory failed"); Environment.Exit(-1); } // //Camera.Size.ImageFormat.Set(4192*3104); //// Start Live Video //statusRet = Camera.Acquisition.Capture(); //if (statusRet != uEye.Defines.Status.Success) //{ // //MessageBox.Show("Start Live Video failed"); //} //else //{ // bLive = true; //} // Connect Event Camera.EventFrame += onFrameEvent; }
private void InitCamera() { m_Camera = new uEye.Camera(); uEye.Defines.Status statusRet = 0; // Open Camera statusRet = m_Camera.Init(0); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Camera initializing failed"); Environment.Exit(-1); } // Allocate Memory statusRet = AllocImageMems(); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Allocate Memory failed"); Environment.Exit(-1); } statusRet = InitSequence(); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Add to sequence failed"); Environment.Exit(-1); } // Start Live Video statusRet = m_Camera.Acquisition.Capture(); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Start Live Video failed"); } else { m_bLive = true; } // Connect Event m_Camera.EventFrame += onFrameEvent; m_Camera.EventAutoBrightnessFinished += onAutoShutterFinished; CB_Auto_Gain_Balance.Enabled = m_Camera.AutoFeatures.Software.Gain.Supported; CB_Auto_White_Balance.Enabled = m_Camera.AutoFeatures.Software.WhiteBalance.Supported; }
private uEye.Defines.Status initCamera() { CameraChoose chooseForm = new CameraChoose(); uEye.Defines.Status statusRet = uEye.Defines.Status.NO_SUCCESS; if (chooseForm.ShowDialog() == DialogResult.OK) { statusRet = m_Camera.Init(chooseForm.DeviceID | (Int32)uEye.Defines.DeviceEnumeration.UseDeviceID, pictureBoxDisplay.Handle); if (statusRet != uEye.Defines.Status.SUCCESS) { MessageBox.Show("Initializing the camera failed"); return(statusRet); } statusRet = MemoryHelper.AllocImageMems(m_Camera, m_cnNumberOfSeqBuffers); if (statusRet != uEye.Defines.Status.SUCCESS) { MessageBox.Show("Allocating memory failed"); return(statusRet); } statusRet = MemoryHelper.InitSequence(m_Camera); if (statusRet != uEye.Defines.Status.SUCCESS) { MessageBox.Show("Add to sequence failed"); return(statusRet); } // set event m_Camera.EventFrame += onFrameEvent; // reset framecount m_s32FrameCount = 0; // start update timer for our statusbar m_UpdateTimer.Start(); uEye.Types.SensorInfo sensorInfo; m_Camera.Information.GetSensorInfo(out sensorInfo); pictureBoxDisplay.SizeMode = PictureBoxSizeMode.Normal; toolStripStatusLabelCamera.Text = sensorInfo.SensorName; } return(statusRet); }
private uEye.Defines.Status initCamera() { uEye.Defines.Status statusRet = m_Camera.Init(m_DeviceID | (Int32)uEye.Defines.DeviceEnumeration.UseDeviceID, IntPtr.Zero); if (statusRet != uEye.Defines.Status.SUCCESS) { MessageBox.Show("Initializing the camera failed"); return(statusRet); } uEye.DeviceFeatureJpegCompression compressor = new uEye.DeviceFeatureJpegCompression(m_Camera); compressor.Set(100); uEye.ColorConverter conv = new uEye.ColorConverter(m_Camera); conv.Set(uEye.Defines.ColorMode.BGR8Packed, uEye.Defines.ColorConvertMode.Jpeg); SetImageFormat(ImageFormat.Foto); // set event m_Camera.EventFrame += onFrameEvent; Ready = true; return(statusRet); }
private void InitCamera() { Camera = new uEye.Camera(); uEye.Defines.Status statusRet = 0; // Open Camera statusRet = Camera.Init(); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Camera initializing failed"); Environment.Exit(-1); } // Allocate Memory statusRet = Camera.Memory.Allocate(); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Allocate Memory failed"); Environment.Exit(-1); } // Start Live Video statusRet = Camera.Acquisition.Capture(); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Start Live Video failed"); } else { bLive = true; } // Connect Event Camera.EventFrame += onFrameEvent; CB_Auto_Gain_Balance.Enabled = Camera.AutoFeatures.Software.Gain.Supported; CB_Auto_White_Balance.Enabled = Camera.AutoFeatures.Software.WhiteBalance.Supported; }
public SnapshotRetriever(CameraSummary summary, long deviceId) { this.summary = summary; camera.EventFrame += camera_EventFrame; camera.EventDeviceRemove += camera_EventDeviceRemove; camera.EventDeviceUnPlugged += camera_EventDeviceUnPlugged; try { stopwatch.Start(); uEye.Defines.Status status = camera.Init((Int32)deviceId | (Int32)uEye.Defines.DeviceEnumeration.UseDeviceID); if (status != uEye.Defines.Status.SUCCESS) { log.ErrorFormat("Camera {0} could not be opened for thumbnail capture.", summary.Alias); return; } log.DebugFormat("{0} initialized in {1} ms.", summary.Alias, stopwatch.ElapsedMilliseconds); stopwatch.Stop(); // We do not load the camera-specific profile for the thumbnail at the moment. // For some reason the .ToBitmap method doesn't work well on the RGB32 format, so in order to at least have something we // load the camera on the default profile for the thumbnail. status = camera.Memory.Allocate(); if (status != uEye.Defines.Status.SUCCESS) { log.ErrorFormat("Camera {0} could not have its buffer allocated for thumbnail capture.", summary.Alias); return; } } catch (Exception e) { LogError(e, ""); } }
/// <summary> /// initialize camera /// </summary> /// <returns></returns> public bool InitCamera(int s32Cam) { // Open Camera uEye.Defines.Status status = camera.Init(s32Cam, displayHandle); if (status != uEye.Defines.Status.SUCCESS) { SetError("Camera initializing failed"); return(false); } // Allocate Memory status = camera.Memory.Allocate(); if (status != uEye.Defines.Status.SUCCESS) { SetError("Allocate Memory failed"); return(false); } camera.EventFrame += Camera_EventFrame; // cleanup on any camera error if (status != uEye.Defines.Status.SUCCESS && camera.IsOpened) { camera.Exit(); } return(true); }
// Initialization Routine for Camera object etc. private void Form1_Load(object sender, EventArgs e) { camera = new uEye.Camera(); // Initialize Camera uEye.Defines.Status statusRet = 0; statusRet = camera.Init(); if (statusRet != uEye.Defines.Status.Success) { camStatusLabel.Text = "Failed to initialize camera"; return; } // Get information about AOI, Exposure Time, Framerate etc. statusRet = camera.Timing.Exposure.GetRange(out etrange); statusRet = camera.Size.AOI.GetPosRange(out aoix0range, out aoiy0range); statusRet = camera.Size.AOI.GetSizeRange(out aoiwidthrange, out aoiheightrange); statusRet = camera.Size.AOI.Get(out aoix0, out aoiy0, out aoiwidth, out aoiheight); statusRet = camera.Timing.Exposure.Get(out exptime); if (statusRet != uEye.Defines.Status.Success) { camStatusLabel.Text = "Failed to retrieve parameters"; return; } // Allocate memory statusRet = camera.Memory.Allocate(); if (statusRet != uEye.Defines.Status.Success) { camStatusLabel.Text = "Failed to allocate memory"; return; } // Update graphics etc. aoiX0NumericUpDown.Maximum = aoix0range.Maximum; aoiX0NumericUpDown.Minimum = aoix0range.Minimum; aoiX0NumericUpDown.Increment = aoix0range.Increment; aoiX0NumericUpDown.Value = aoix0; aoiY0NumericUpDown.Maximum = aoiy0range.Maximum; aoiY0NumericUpDown.Minimum = aoiy0range.Minimum; aoiY0NumericUpDown.Increment = aoiy0range.Increment; aoiY0NumericUpDown.Value = aoiy0; aoiWidthNumericUpDown.Maximum = aoiwidthrange.Maximum; aoiWidthNumericUpDown.Minimum = aoiwidthrange.Minimum; aoiWidthNumericUpDown.Increment = aoiwidthrange.Increment; aoiWidthNumericUpDown.Value = aoiwidth; aoiHeightNumericUpDown.Maximum = aoiheightrange.Maximum; aoiHeightNumericUpDown.Minimum = aoiheightrange.Minimum; aoiHeightNumericUpDown.Increment = aoiheightrange.Increment; aoiHeightNumericUpDown.Value = aoiheight; expTimeNumericUpDown.Maximum = System.Convert.ToDecimal(etrange.Maximum); expTimeNumericUpDown.Minimum = System.Convert.ToDecimal(etrange.Minimum); expTimeNumericUpDown.Increment = System.Convert.ToDecimal(etrange.Increment); expTimeNumericUpDown.Value = System.Convert.ToDecimal(exptime); expTimeTrackBar.Maximum = (int)((etrange.Maximum - etrange.Minimum) / etrange.Increment); expTimeTrackBar.Minimum = 0; expTimeTrackBar.Value = (int)((exptime - etrange.Minimum) / etrange.Increment); pboxorigin = aoiPictureBox.Location; camStatusLabel.Text = "Camera Ready"; guiup2date = true; }
public override bool Open() { uEye.Defines.Status status; _camera = new uEye.Camera(); status = _camera.Init(Convert.ToInt32(CameraID)); if (status != uEye.Defines.Status.SUCCESS) { throw new APXExeception(String.Format ("Error Code: {0}", Convert.ToInt32(status))); } if (status != uEye.Defines.Status.Success) { if (status != uEye.Defines.Status.STARTER_FW_UPLOAD_NEEDED) { throw new APXExeception(String.Format( "{0}:{1}", Name, status.ToString())); } if (this._camera.Init(0x10000) != uEye.Defines.Status.Success) { throw new APXExeception(String.Format( "{0}:{1}", Name, status.ToString())); } } this._camera.Parameter.Load(); if (!this.InitMemory()) { return false; } this._camera.Memory.SetActive(this._menIDArray[this._curMemIndex]); this._camera.EventFrame += new EventHandler(this.onFrameEvent); if (this._camera.Acquisition.Capture() != uEye.Defines.Status.Success) { throw new APXExeception(String.Format("{0} Continous capture fail..", Name)); } _isAcquisition = true; Bitmap bitmap = null; this._camera.Memory.ToBitmap(this._menIDArray[this._curMemIndex], out bitmap); VideoSize = bitmap.Size; this._pixelFormat = bitmap.PixelFormat; IsOpen = true; RaiseOnOpenEvent(); return true; }
private void Open() { // Unlike in the DirectShow module, we do not backup and restore camera configuration. // If the user configured the camera outside of Kinovea we respect the new settings. // Two reasons: // 1. In DirectShow we must do the backup/restore to work around drivers that inadvertently reset the camera properties. // 2. Industrial cameras have many properties that won't be configurable in Kinovea // so the user is more likely to configure the camera from the outside. if (grabbing) { Stop(); } try { uEye.Defines.Status status = camera.Init((Int32)deviceId | (Int32)uEye.Defines.DeviceEnumeration.UseDeviceID); if (status != uEye.Defines.Status.SUCCESS) { log.ErrorFormat("Error trying to open IDS uEye camera."); return; } // Load parameter set. ProfileHelper.Load(camera, summary.Identifier); } catch (Exception e) { log.Error("Could not open IDS uEye camera.", e); return; } SpecificInfo specific = summary.Specific as SpecificInfo; if (specific == null) { return; } // Store the camera object into the specific info so that we can retrieve device informations from the configuration dialog. specific.Camera = camera; int currentColorMode = IDSHelper.ReadCurrentStreamFormat(camera); // Some properties can only be changed when the camera is opened but not streaming. Now is the time. // We store them in the summary when coming back from FormConfiguration, and we write them to the camera here. // Only do this if it's not the first time we open the camera, to respect any change that could have been done outside Kinovea. if (firstOpen) { specific.StreamFormat = currentColorMode; } else { if (specific.StreamFormat != currentColorMode) { IDSHelper.WriteStreamFormat(camera, specific.StreamFormat); } CameraPropertyManager.WriteCriticalProperties(camera, specific.CameraProperties); // Save parameter set. ProfileHelper.Save(camera, ProfileHelper.GetProfileFilename(summary.Identifier)); } // Reallocate IDS internal buffers after changing the format. Int32[] memList; camera.Memory.GetList(out memList); camera.Memory.Free(memList); camera.Memory.Allocate(); int memId; camera.Memory.GetActive(out memId); int width, height, bitsPerPixel, pitch; camera.Memory.Inquire(memId, out width, out height, out bitsPerPixel, out pitch); log.DebugFormat("IDS internal buffers allocated: {0}x{1}, {2} bits per pixel, pitch:{3}.", width, height, bitsPerPixel, pitch); }
private void BtnImport_Click(object sender, EventArgs e) { // Locate an .ini file. OpenFileDialog openFileDialog = new OpenFileDialog(); openFileDialog.Title = CameraLang.FormConfiguration_ImportParameters; //openFileDialog.InitialDirectory = Path.GetDirectoryName(ProfileHelper.GetProfileFilename(summary.Identifier)); openFileDialog.RestoreDirectory = true; openFileDialog.Filter = "Ini file (*.ini)" + "|*.ini;"; openFileDialog.FilterIndex = 0; if (openFileDialog.ShowDialog() != DialogResult.OK) { return; } string filename = openFileDialog.FileName; if (string.IsNullOrEmpty(filename) || !File.Exists(filename)) { return; } // The timing here is finnicky. // connect() will start the delay buffer allocation on the current image size and start receiving frames. // disconnect prevents reading the new values from the camera. // Load with new sizes while the camera is streaming will fail because the buffers are wrong. // So we need to load the new values with the camera opened but not streaming. this.SuspendLayout(); disconnect(); ProfileHelper.Replace(summary.Identifier, filename); // Reopen the camera but do not start grabbing. uEye.Defines.Status status = camera.Init((Int32)deviceId | (Int32)uEye.Defines.DeviceEnumeration.UseDeviceID); if (status != uEye.Defines.Status.SUCCESS) { log.ErrorFormat("Error trying to open IDS uEye camera."); return; } // Load new parameters. ProfileHelper.Load(camera, summary.Identifier); cameraProperties = CameraPropertyManager.Read(camera, deviceId); SpecificInfo info = summary.Specific as SpecificInfo; PopulateStreamFormat(); info.StreamFormat = this.SelectedStreamFormat.Value; info.CameraProperties = cameraProperties; summary.UpdateDisplayRectangle(Rectangle.Empty); CameraTypeManager.UpdatedCameraSummary(summary); // Reconnect. camera.Exit(); connect(); // Reload UI. RemoveCameraControls(); PopulateCameraControls(); this.ResumeLayout(); }
private void CameraInitialize() { uEye.Defines.Status statusRet; // Create a instance of ids camera ids_Camera = new uEye.Camera(); // Open the first available camera statusRet = ids_Camera.Init(0, this.pb_IdsLive.Handle.ToInt32()); if (statusRet == uEye.Defines.Status.SUCCESS) { uEye.Defines.DisplayMode supportedMode; statusRet = ids_Camera.DirectRenderer.GetSupported(out supportedMode); if ((supportedMode & uEye.Defines.DisplayMode.Direct3D) == uEye.Defines.DisplayMode.Direct3D) { bDirect3d = true; label1.Text += "bDirect3d"; } else if ((supportedMode & uEye.Defines.DisplayMode.OpenGL) == uEye.Defines.DisplayMode.OpenGL) { bOpenGL = true; //MessageBox.Show("OpenGL"); label1.Text += "OpenGL"; } else { label1.Text += "Null"; } } else { MessageBox.Show("Fail to open the IDS camera, please check the USB connector and reslaunch the software."); Close(); } // Set display mode if (true == bDirect3d) { statusRet = ids_Camera.Display.Mode.Set(uEye.Defines.DisplayMode.Direct3D); } else if (true == bOpenGL) { statusRet = ids_Camera.Display.Mode.Set(uEye.Defines.DisplayMode.OpenGL); } // Enables real-time scaling of the image to the size of the display window. The overlay is scaled together with the camera image. statusRet = ids_Camera.DirectRenderer.EnableScaling(); // Set the color mode statusRet = ids_Camera.PixelFormat.Set(uEye.Defines.ColorMode.Mono8); // Defines the color format for the steal function statusRet = ids_Camera.DirectRenderer.SetStealFormat(uEye.Defines.ColorMode.Mono8); // Get the color format for the steal function statusRet = ids_Camera.DirectRenderer.GetStealFormat(out stealColorMode); label4.Text += stealColorMode.ToString(); // Allocates an image memory for an image and set it active ids_Camera.Memory.Allocate(1280, 1024, 8, true, out memoryID); ids_Camera.PixelFormat.Get(out displayColorMode); ids_Camera.PixelFormat.GetBitsPerPixel(out colorDepth); label2.Text += colorDepth.ToString(); label3.Text += displayColorMode.ToString(); // Set overlay graphics SetOverlayGraphic(640.0f, 512.0f, 90.0f); }
private void uEye_DotNet_Direct_Renderer_Shown(object sender, EventArgs e) { bool bDirect3D = false; bool bOpenGL = false; uEye.Defines.Status statusRet; m_Camera = new uEye.Camera(); cB_Semi_transparent.Enabled = false; // open first available camera statusRet = m_Camera.Init(0, DisplayWindow.Handle.ToInt32()); if (statusRet == uEye.Defines.Status.SUCCESS) { m_OverlayMoveTimer.Interval = 10; m_OverlayMoveTimer.Tick += new EventHandler(OnOverlayMove); uEye.Defines.DisplayMode supportedMode; statusRet = m_Camera.DirectRenderer.GetSupported(out supportedMode); if ((supportedMode & uEye.Defines.DisplayMode.Direct3D) == uEye.Defines.DisplayMode.Direct3D) { rB_Direct3D.Enabled = true; bDirect3D = true; } else { rB_Direct3D.Enabled = false; bDirect3D = false; } if ((supportedMode & uEye.Defines.DisplayMode.OpenGL) == uEye.Defines.DisplayMode.OpenGL) { rB_OpenGL.Enabled = true; bOpenGL = true; if (rB_Direct3D.Enabled != true) { rB_OpenGL.Checked = true; } } else { rB_OpenGL.Enabled = false; bOpenGL = false; } if (((supportedMode & uEye.Defines.DisplayMode.Direct3D) == uEye.Defines.DisplayMode.Direct3D) || ((supportedMode & uEye.Defines.DisplayMode.OpenGL) == uEye.Defines.DisplayMode.OpenGL)) { if (bOpenGL == true) { // set display mode statusRet = m_Camera.Display.Mode.Set(uEye.Defines.DisplayMode.OpenGL); } if (bDirect3D == true) { // set display mode statusRet = m_Camera.Display.Mode.Set(uEye.Defines.DisplayMode.Direct3D); } // start live Button_Camera_Open_Freerun_Click(null, EventArgs.Empty); // update information UpdateOverlayInformation(); UpdateImageInformation(); // set default key color m_OverlayColor = System.Drawing.Color.Black; Button_Key_Color.BackColor = m_OverlayColor; } else { MessageBox.Show("Direct3D and OpenGL are not supported"); Close(); } } else { MessageBox.Show("Could not open camera..."); Close(); } }