private void Camera_EventFrame(object sender, EventArgs e) { uEye.Camera camera = sender as uEye.Camera; if (camera != null && camera.IsOpened) { Int32 s32MemID; camera.Memory.GetActive(out s32MemID); camera.Memory.Lock(s32MemID); Bitmap bitmap; camera.Memory.ToBitmap(s32MemID, out bitmap); if (bitmap != null && bitmap.PixelFormat != System.Drawing.Imaging.PixelFormat.Format8bppIndexed) { Graphics graphics = Graphics.FromImage(bitmap); DoDrawing(ref graphics, s32MemID); if (videoControl.GraphicsList != null) { //videoControl.GraphicsList.Draw(graphics, videoControl); } //if (Program.EntryForm.PictureBox.LaserFunction) //{ // if (Program.EntryForm.Laser != null) // { // Program.EntryForm.Laser.OnPaint(graphics); // } //} graphics.Dispose(); bitmap.Dispose(); } camera.Memory.Unlock(s32MemID); camera.Display.Render(s32MemID, uEye.Defines.DisplayRenderMode.FitToWindow); } }
private static CameraProperty ReadExposure(uEye.Camera camera, Dictionary <string, CameraProperty> properties) { uEye.Types.Range <Double> range; camera.Timing.Exposure.GetRange(out range); //camera.Timing.Exposure.Fine.GetRange(out range); // Not supported on test camera. double currentValue; camera.Timing.Exposure.Get(out currentValue); // Switch to microseconds. double min = range.Minimum * 1000; double max = range.Maximum * 1000; double step = range.Increment * 1000; double val = currentValue * 1000; CameraProperty p = new CameraProperty(); p.Identifier = "exposure"; p.Supported = true; p.ReadOnly = false; p.Type = CameraPropertyType.Float; p.Minimum = min.ToString(CultureInfo.InvariantCulture); p.Maximum = max.ToString(CultureInfo.InvariantCulture); p.Step = step.ToString(CultureInfo.InvariantCulture); p.Representation = CameraPropertyRepresentation.LinearSlider; p.CurrentValue = val.ToString(CultureInfo.InvariantCulture); if (properties != null) { properties.Add(p.Identifier, p); } return(p); }
private void onFrameEvent(object sender, EventArgs e) { // convert sender object to our camera object uEye.Camera camera = sender as uEye.Camera; if (camera.IsOpened) { uEye.Defines.DisplayMode mode; camera.Display.Mode.Get(out mode); // only display in dib mode if (mode == uEye.Defines.DisplayMode.DiB) { Int32 s32MemID; camera.Memory.GetActive(out s32MemID); camera.Memory.Lock(s32MemID); // do any drawings? Bitmap bitmap; m_Camera.Memory.ToBitmap(s32MemID, out bitmap); FrameEventArgs fe = new FrameEventArgs(bitmap); OnFrameRecived(fe); camera.Memory.Unlock(s32MemID); ++m_FrameCount; } } }
private static CameraProperty ReadGainBoost(uEye.Camera camera, Dictionary <string, CameraProperty> properties) { bool supported; camera.Gain.Hardware.Boost.GetSupported(out supported); CameraProperty p = new CameraProperty(); p.Identifier = "gainboost"; p.Supported = supported; p.ReadOnly = false; p.Type = CameraPropertyType.Boolean; p.Representation = CameraPropertyRepresentation.Checkbox; if (supported) { bool enable; camera.Gain.Hardware.Boost.GetEnable(out enable); p.CurrentValue = enable ? "true" : "false"; } else { log.DebugFormat("Gain boost is not supported by the camera."); } if (properties != null) { properties.Add(p.Identifier, p); } return(p); }
/// <summary> /// Read a single property and return it. /// This is used in the context of dependent properties, to update the master list with new values. /// </summary> public static CameraProperty Read(uEye.Camera camera, long deviceId, string key) { if (key == "pixelclock") { return(ReadPixelClock(camera, null)); } else if (key == "framerate") { return(ReadFramerate(camera, null)); } else if (key == "exposure") { return(ReadExposure(camera, null)); } else if (key == "gain") { return(ReadGain(camera, null)); } else if (key == "gainboost") { return(ReadGainBoost(camera, null)); } else { return(null); } }
private static void WriteExposure(uEye.Camera camera, CameraProperty property) { float value = float.Parse(property.CurrentValue, CultureInfo.InvariantCulture); value /= 1000; camera.Timing.Exposure.Set(value); }
/// <summary> /// 初始化相机 /// </summary> /// <returns></returns> public override bool Init() { camera = new uEye.Camera(); // Open Camera statusRet = camera.Init(); if (statusRet != uEye.Defines.Status.Success) { SetError("Camera initializing failed"); return(false); } // Allocate Memory statusRet = camera.Memory.Allocate(); if (statusRet != uEye.Defines.Status.Success) { SetError("Allocate Memory failed"); return(false); } // start capture if (uEye.Defines.Status.Success == camera.Acquisition.Capture()) { return(true); } else { SetError("Capture live show from camera failed!"); return(false); } }
private static CameraProperty ReadGain(uEye.Camera camera, Dictionary <string, CameraProperty> properties) { int gain; camera.Gain.Hardware.Scaled.GetMaster(out gain); CameraProperty p = new CameraProperty(); p.Identifier = "gain"; p.Supported = true; p.ReadOnly = false; p.Type = CameraPropertyType.Float; p.Minimum = "0"; p.Maximum = "100"; p.Step = "1"; p.Representation = CameraPropertyRepresentation.LinearSlider; p.CurrentValue = gain.ToString(CultureInfo.InvariantCulture); if (properties != null) { properties.Add(p.Identifier, p); } return(p); }
public static uEye.Defines.Status FreeImageMems(uEye.Camera Camera) { int[] idList; uEye.Defines.Status statusRet = Camera.Memory.GetList(out idList); if (uEye.Defines.Status.SUCCESS == statusRet) { foreach (int nMemID in idList) { do { statusRet = Camera.Memory.Free(nMemID); if (uEye.Defines.Status.SEQ_BUFFER_IS_LOCKED == statusRet) { Thread.Sleep(1); continue; } break; }while (true); } } return(statusRet); }
private static CameraProperty ReadFramerate(uEye.Camera camera, Dictionary <string, CameraProperty> properties) { uEye.Types.Range <Double> range; camera.Timing.Framerate.GetFrameRateRange(out range); double currentValue; camera.Timing.Framerate.Get(out currentValue); CameraProperty p = new CameraProperty(); p.Identifier = "framerate"; p.Supported = true; p.ReadOnly = false; p.Type = CameraPropertyType.Float; p.Minimum = range.Minimum.ToString(CultureInfo.InvariantCulture); p.Maximum = range.Maximum.ToString(CultureInfo.InvariantCulture); p.Step = range.Increment.ToString(CultureInfo.InvariantCulture); p.Representation = CameraPropertyRepresentation.LinearSlider; p.CurrentValue = currentValue.ToString(CultureInfo.InvariantCulture); if (properties != null) { properties.Add(p.Identifier, p); } return(p); }
private static void onFrameEvent(object sender, EventArgs e) { uEye.Camera Camera = sender as uEye.Camera; Int32 s32MemID; Camera.Memory.GetActive(out s32MemID); if (frameCamera != null) { frameCamera.Dispose(); } frameCamera = null; Camera.Memory.ToBitmap(s32MemID, out frameCamera); try { Application.Current.Dispatcher.Invoke(new Action(() => { if (CameraResult.Status == "Live") { OutputImage.Source = BitmapConvert(frameCamera); } else { OutputImage.Source = null; } })); } catch { } }
public static CameraResults InitializeCamera() { Camera = new uEye.Camera(); CameraStatus = new CameraResults(); statusRet = Camera.Init(1); if (statusRet != uEye.Defines.Status.Success) { CameraStatus.ErrorCode = "0x01"; CameraStatus.Status = "Error"; CameraStatus.ErrorDescription = "Can not be initialized uEye camera. Please check the camera connection, and try again!"; return(CameraStatus); } statusRet = Camera.IO.Pwm.SetMode(0); statusRet = Camera.Memory.Allocate(); if (statusRet != uEye.Defines.Status.Success) { CameraStatus.ErrorCode = "0x02"; CameraStatus.Status = "Error"; CameraStatus.ErrorDescription = "Can not be allocate memory to uEye camera. Please check the camera connection, and try again!"; return(CameraStatus); } else { CameraStatus.ErrorCode = ""; CameraStatus.Status = "Initialized"; CameraStatus.ErrorDescription = ""; return(CameraStatus); } }
private static CameraProperty ReadPixelClock(uEye.Camera camera, Dictionary <string, CameraProperty> properties) { uEye.Types.Range <Int32> range; camera.Timing.PixelClock.GetRange(out range); Int32 currentValue; camera.Timing.PixelClock.Get(out currentValue); CameraProperty p = new CameraProperty(); p.Identifier = "pixelclock"; p.Supported = true; p.ReadOnly = false; p.Type = CameraPropertyType.Integer; p.Minimum = range.Minimum.ToString(CultureInfo.InvariantCulture); p.Maximum = range.Maximum.ToString(CultureInfo.InvariantCulture); p.Step = range.Increment.ToString(CultureInfo.InvariantCulture); p.Representation = CameraPropertyRepresentation.LinearSlider; p.CurrentValue = currentValue.ToString(CultureInfo.InvariantCulture); if (properties != null) { properties.Add(p.Identifier, p); } return(p); }
public static bool Load(uEye.Camera camera, string identifier) { string filename = GetProfileFilename(identifier); bool result = false; try { if (File.Exists(filename)) { log.DebugFormat("Loading IDS camera parameters from {0}.", Path.GetFileName(filename)); camera.Parameter.Load(filename); // We do not support all incoming parameters. Incompatible parameters may happen when the // profile is imported from an external source. bool reload = FixAbsoluteAOI(camera, filename); if (reload) { camera.Parameter.Load(filename); } result = true; } else { log.DebugFormat("Camera parameter set not found."); } } catch (Exception e) { log.Error(string.Format("Error while loading camera parameter set at {0}.", filename), e); } return(result); }
public static float GetFramerate(uEye.Camera camera) { double currentValue; camera.Timing.Framerate.Get(out currentValue); return((float)currentValue); }
private static bool FixAbsoluteAOI(uEye.Camera camera, string filename) { // "Show AOI only". // Unfortunately IDS api doesn't support writing the absolute position parameter which controls whether // we get images of just the AOI size (absolute = false) or images of the full size and black borders (absolute = true). // Since we do not want to support this weird feature at the moment, we rewrite the .ini on the fly and reload. bool absX, absY; camera.Size.AOI.GetAbsX(out absX); camera.Size.AOI.GetAbsY(out absY); if (!absX && !absY) { return(false); } string[] lines = File.ReadAllLines(filename); for (int i = 0; i < lines.Length; i++) { if (lines[i].StartsWith("Start X absolute")) { lines[i] = "Start X absolute=0"; continue; } if (lines[i].StartsWith("Start Y absolute")) { lines[i] = "Start Y absolute=0"; continue; } } File.WriteAllLines(filename, lines); return(true); }
public FormConfiguration(CameraSummary summary) { this.summary = summary; InitializeComponent(); tbAlias.AutoSize = false; tbAlias.Height = 20; tbAlias.Text = summary.Alias; lblSystemName.Text = summary.Name; btnIcon.BackgroundImage = summary.Icon; SpecificInfo specific = summary.Specific as SpecificInfo; if (specific == null || specific.Camera == null || !specific.Camera.IsOpened) { return; } camera = specific.Camera; int temp; camera.Device.GetDeviceID(out temp); deviceId = (long)temp; cameraProperties = CameraPropertyManager.Read(camera, deviceId); if (cameraProperties.Count != specific.CameraProperties.Count) { specificChanged = true; } Populate(); }
private void Camera_EventFrame(object sender, EventArgs e) { uEye.Camera camera = sender as uEye.Camera; if (camera != null && camera.IsOpened) { Int32 s32MemID; camera.Memory.GetActive(out s32MemID); camera.Memory.Lock(s32MemID); Bitmap bitmap; camera.Memory.ToBitmap(s32MemID, out bitmap); if (bitmap != null && bitmap.PixelFormat != System.Drawing.Imaging.PixelFormat.Format8bppIndexed) { Graphics graphics = Graphics.FromImage(bitmap); DoDrawing(ref graphics, s32MemID); if (picturebox.GraphicsList != null) { picturebox.GraphicsList.Draw(graphics, picturebox); } graphics.Dispose(); bitmap.Dispose(); } camera.Memory.Unlock(s32MemID); camera.Display.Render(s32MemID, uEye.Defines.DisplayRenderMode.FitToWindow); } }
public void ExitCamera() { if (_camera != null) { _camera.Exit(); } _camera = null; }
public PictureControl(uEye.Camera camera) : base(camera) { InitializeComponent(); m_UpdateTimer.Interval = 100; m_UpdateTimer.Tick += OnUpdateControls; }
private static bool isStreamFormatSupported(uEye.Camera camera, int colorMode) { uEye.Defines.ColorConvertMode converterMode = 0; uEye.Defines.Status statusRet = camera.Color.Converter.GetSupported((uEye.Defines.ColorMode)colorMode, out converterMode); // The value returned in converterMode is a bitfield of supported converters, like Hardware3x3, Software5x5, etc. // As long as one converter is supported we should get the correct frame content. (Not sure about that JPEG converter though). return(statusRet == uEye.Defines.Status.Success && converterMode != uEye.Defines.ColorConvertMode.None); }
private unsafe void camera_EventFrame(object sender, EventArgs e) { uEye.Camera camera = sender as uEye.Camera; if (camera == null || !camera.IsOpened) { return; } uEye.Defines.DisplayMode mode; camera.Display.Mode.Get(out mode); if (mode != uEye.Defines.DisplayMode.DiB) { return; } int memId; camera.Memory.GetActive(out memId); camera.Memory.Lock(memId); System.IntPtr ptrSrc; camera.Memory.ToIntPtr(memId, out ptrSrc); fixed(byte *p = incomingBuffer) { IntPtr ptrDst = (IntPtr)p; camera.Memory.CopyImageMem(ptrSrc, memId, ptrDst); } if (finishline.Enabled) { bool flush = finishline.Consolidate(incomingBuffer); if (flush) { ComputeDataRate(finishline.BufferOutput.Length); if (FrameProduced != null) { FrameProduced(this, new FrameProducedEventArgs(finishline.BufferOutput, finishline.BufferOutput.Length)); } } } else { ComputeDataRate(incomingBufferSize); if (FrameProduced != null) { FrameProduced(this, new FrameProducedEventArgs(incomingBuffer, incomingBufferSize)); } } camera.Memory.Unlock(memId); }
public SettingsForm(uEye.Camera camera) { InitializeComponent(); m_Camera = camera; m_FormatControl = new FormatControl(camera); m_SizeControl = new SizeControl(camera); }
private static CameraProperty ReadGain(uEye.Camera camera, Dictionary <string, CameraProperty> properties) { int gain; camera.Gain.Hardware.Scaled.GetMaster(out gain); CameraProperty p = new CameraProperty(); p.Identifier = "gain"; p.Supported = true; p.ReadOnly = false; p.Type = CameraPropertyType.Float; p.Minimum = "0"; p.Maximum = "100"; p.Step = "1"; p.Representation = CameraPropertyRepresentation.LinearSlider; p.CurrentValue = gain.ToString(CultureInfo.InvariantCulture); // Some cameras support sensor-level auto features, we give this priority over software-level ones. bool supportAutoGainSensor; camera.AutoFeatures.Sensor.Gain.GetSupported(out supportAutoGainSensor); if (supportAutoGainSensor) { p.CanBeAutomatic = true; p.AutomaticIdentifier = "sensor"; bool automatic; camera.AutoFeatures.Sensor.Gain.GetEnable(out automatic); p.Automatic = automatic; } else { bool supportAutoGainSoftware; camera.AutoFeatures.Software.Gain.GetSupported(out supportAutoGainSoftware); if (supportAutoGainSoftware) { p.CanBeAutomatic = true; p.AutomaticIdentifier = "software"; bool automatic; camera.AutoFeatures.Software.Gain.GetEnable(out automatic); p.Automatic = automatic; } else { log.DebugFormat("Auto-gain is not supported by the camera."); } } if (properties != null) { properties.Add(p.Identifier, p); } return(p); }
private void onFrameEvent(object sender, EventArgs e) { uEye.Camera Camera = sender as uEye.Camera; Int32 s32MemID; Camera.Memory.GetActive(out s32MemID); Camera.Display.Render(s32MemID, displayHandle, uEye.Defines.DisplayRenderMode.FitToWindow); }
private void camera_EventFrame(object sender, EventArgs e) { uEye.Camera camera = sender as uEye.Camera; if (camera == null || !camera.IsOpened) { waitHandle.Set(); return; } uEye.Defines.DisplayMode mode; camera.Display.Mode.Get(out mode); if (mode != uEye.Defines.DisplayMode.DiB) { waitHandle.Set(); return; } Int32 s32MemID; camera.Memory.GetActive(out s32MemID); camera.Memory.Lock(s32MemID); Bitmap bitmap; camera.Memory.ToBitmap(s32MemID, out bitmap); if (bitmap != null) { if (image != null) { image.Dispose(); } // Force output into an RGB24 bitmap. image = new Bitmap(bitmap.Width, bitmap.Height, PixelFormat.Format24bppRgb); Rectangle rect = new Rectangle(0, 0, image.Width, image.Height); using (Graphics g = Graphics.FromImage(image)) { g.DrawImage(bitmap, Point.Empty); } int bufferSize = ImageFormatHelper.ComputeBufferSize(image.Width, image.Height, Kinovea.Services.ImageFormat.RGB24); imageDescriptor = new ImageDescriptor(Kinovea.Services.ImageFormat.RGB24, image.Width, image.Height, true, bufferSize); bitmap.Dispose(); } camera.Memory.Unlock(s32MemID); waitHandle.Set(); }
public static void Save(uEye.Camera camera, string filename) { try { camera.Parameter.Save(filename); } catch (Exception e) { log.Error(string.Format("Error while saving camera parameter set at {0}.", filename), e); } }
// IDS event追加 private void onFrameEvent1(object sender, EventArgs e) { uEye.Camera Camera = sender as uEye.Camera; Int32 s32MemID; Camera.Memory.GetActive(out s32MemID); Camera.Display.DisplayImage.Set(s32MemID, u32DisplayID, uEye.Defines.DisplayRenderMode.FitToWindow); ++id; }
// FUNCTION: InitCamera: Initialize the Camera // // private void InitCamera() { cam = new uEye.Camera(); uEye.Defines.Status statusRet = 0; cam.Size.ImageFormat.Set(7); // Open Camera statusRet = cam.Init(); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Camera initializing failed"); Environment.Exit(-1); } // Set Colour Mode uEye.Types.SensorInfo SensorInfo; statusRet = cam.Information.GetSensorInfo(out SensorInfo); if (SensorInfo.SensorColorMode == uEye.Defines.SensorColorMode.Bayer) { statusRet = cam.PixelFormat.Set(uEye.Defines.ColorMode.BGR8Packed); } else { statusRet = cam.PixelFormat.Set(uEye.Defines.ColorMode.Mono8); MessageBox.Show("Black and white?!"); Environment.Exit(-1); } // Allocate Memory statusRet = cam.Memory.Allocate(); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Allocate Memory failed"); Environment.Exit(-1); } SetCameraParameters(); // Start Live Video statusRet = cam.Acquisition.Capture(); if (statusRet != uEye.Defines.Status.Success) { MessageBox.Show("Start Live Video failed"); Environment.Exit(-1); } // Connect video to window cam.EventFrame += onFrameEvent; }
/// <summary> /// 退出相机 /// </summary> /// <returns></returns> public override bool ExitCamera() { if (camera == null) { return(true); } camera.Exit(); camera = null; displayHandle = IntPtr.Zero; return(true); }
public override bool Open() { uEye.Defines.Status status; _camera = new uEye.Camera(); status = _camera.Init(Convert.ToInt32(CameraID)); if (status != uEye.Defines.Status.SUCCESS) { throw new APXExeception(String.Format ("Error Code: {0}", Convert.ToInt32(status))); } if (status != uEye.Defines.Status.Success) { if (status != uEye.Defines.Status.STARTER_FW_UPLOAD_NEEDED) { throw new APXExeception(String.Format( "{0}:{1}", Name, status.ToString())); } if (this._camera.Init(0x10000) != uEye.Defines.Status.Success) { throw new APXExeception(String.Format( "{0}:{1}", Name, status.ToString())); } } this._camera.Parameter.Load(); if (!this.InitMemory()) { return false; } this._camera.Memory.SetActive(this._menIDArray[this._curMemIndex]); this._camera.EventFrame += new EventHandler(this.onFrameEvent); if (this._camera.Acquisition.Capture() != uEye.Defines.Status.Success) { throw new APXExeception(String.Format("{0} Continous capture fail..", Name)); } _isAcquisition = true; Bitmap bitmap = null; this._camera.Memory.ToBitmap(this._menIDArray[this._curMemIndex], out bitmap); VideoSize = bitmap.Size; this._pixelFormat = bitmap.PixelFormat; IsOpen = true; RaiseOnOpenEvent(); return true; }