/// <summary> /// Attempt to preallocate the circular buffer for as many images as possible that fits in available memory. /// </summary> public bool AllocateBuffers(ImageDescriptor imageDescriptor, long availableMemory) { if (!NeedsReallocation(imageDescriptor, availableMemory)) { return(true); } int targetCapacity = (int)(availableMemory / imageDescriptor.BufferSize); bool memoryPressure = minCapacity * imageDescriptor.BufferSize > availableMemory; if (memoryPressure) { // The user explicitly asked to not use enough memory. We try to honor the request by lowering the min levels. // This may result in thread cross talks. reserveCapacity = Math.Max(targetCapacity, 2); minCapacity = Math.Max(targetCapacity + 1, 3); targetCapacity = Math.Max(targetCapacity, minCapacity); } else { reserveCapacity = 8; minCapacity = 12; targetCapacity = Math.Max(targetCapacity, minCapacity); } bool compatible = ImageDescriptor.Compatible(this.imageDescriptor, imageDescriptor); if (compatible && targetCapacity <= fullCapacity) { FreeSome(targetCapacity); this.fullCapacity = frames.Count; this.availableMemory = availableMemory; return(true); } if (!compatible) { FreeAll(); } stopwatch.Restart(); frames.Capacity = targetCapacity; log.DebugFormat("Allocating {0} frames.", targetCapacity - fullCapacity); int bufferSize = ImageFormatHelper.ComputeBufferSize(imageDescriptor.Width, imageDescriptor.Height, imageDescriptor.Format); try { for (int i = fullCapacity; i < targetCapacity; i++) { Frame slot = new Frame(bufferSize); frames.Add(slot); } } catch (Exception e) { log.ErrorFormat("Error while allocating delay buffer."); log.Error(e); } if (frames.Count > 0) { // The following variables are used during frame -> bitmap conversion. this.rect = new Rectangle(0, 0, imageDescriptor.Width, imageDescriptor.Height); this.pitch = imageDescriptor.Width * 3; this.tempJpeg = new byte[bufferSize]; this.allocated = true; this.fullCapacity = frames.Count; this.availableMemory = availableMemory; this.imageDescriptor = imageDescriptor; // Better do the GC now to push everything to gen2 and LOH rather than taking a hit later during normal streaming operations. GC.Collect(2); } log.DebugFormat("Allocated delay buffer: {0} ms. Total: {1} frames.", stopwatch.ElapsedMilliseconds, fullCapacity); return(allocated); }
/// <summary> /// Configure device and report frame format that will be used during streaming. /// This method must return a proper ImageDescriptor so we can pre-allocate buffers. /// </summary> public ImageDescriptor Prepare() { Open(); if (!baumerProvider.IsOpen) { return(ImageDescriptor.Invalid); } firstOpen = false; Device device = baumerProvider.Device; // Get the configured framerate for recording support. resultingFramerate = BaumerHelper.GetResultingFramerate(device); bool hasWidth = BaumerHelper.NodeIsReadable(device, "Width"); bool hasHeight = BaumerHelper.NodeIsReadable(device, "Height"); bool hasPixelFormat = BaumerHelper.NodeIsReadable(device, "PixelFormat"); bool canComputeImageDescriptor = hasWidth && hasHeight && hasPixelFormat; if (!canComputeImageDescriptor) { return(ImageDescriptor.Invalid); } int width = BaumerHelper.GetInteger(device, "Width"); int height = BaumerHelper.GetInteger(device, "Height"); string pixelFormat = BaumerHelper.GetString(device, "PixelFormat"); // We output in three possible formats: Y800, RGB24 or JPEG. // The output format depends on the stream format and the options. // Mono or raw -> Y800, Otherwise -> RGB24. // Camera-side JPEG compression. compression = specific.Compression; if (BaumerHelper.SupportsJPEG(device)) { if (BaumerHelper.FormatCanCompress(device, pixelFormat)) { BaumerHelper.SetJPEG(device, compression); } else { BaumerHelper.SetJPEG(device, false); compression = false; } } else { compression = false; } // Debayering. demosaicing = specific.Demosaicing; if (demosaicing) { if (imgProcessor.NodeList.GetNodePresent("DemosaicingMethod")) { // Options: NearestNeighbor, Bilinear3x3, Baumer5x5 imgProcessor.NodeList["DemosaicingMethod"].Value = "NearestNeighbor"; } else { demosaicing = false; } } imageFormat = BaumerHelper.ConvertImageFormat(pixelFormat, compression, demosaicing); frameBufferSize = ImageFormatHelper.ComputeBufferSize(width, height, imageFormat); frameBuffer = new byte[frameBufferSize]; finishline.Prepare(width, height, imageFormat, resultingFramerate); if (finishline.Enabled) { height = finishline.Height; resultingFramerate = finishline.ResultingFramerate; } int outgoingBufferSize = ImageFormatHelper.ComputeBufferSize(width, height, imageFormat); bool topDown = true; return(new ImageDescriptor(imageFormat, width, height, topDown, outgoingBufferSize)); }
private void device_NewFrameBuffer(object sender, NewFrameBufferEventArgs e) { // As we didn't specify any media type, the buffer is guaranteed to come back in RGB24. image = new Bitmap(e.Width, e.Height, PixelFormat.Format24bppRgb); Rectangle rect = new Rectangle(0, 0, image.Width, image.Height); BitmapHelper.FillFromRGB24(image, rect, false, e.Buffer); imageDescriptor = new ImageDescriptor(Video.ImageFormat.RGB24, image.Width, image.Height, true, ImageFormatHelper.ComputeBufferSize(image.Width, image.Height, Video.ImageFormat.RGB24)); waitHandle.Set(); }
/// <summary> /// Configure device and report frame format that will be used during streaming. /// This method must return a proper ImageDescriptor so we can pre-allocate buffers. /// </summary> public ImageDescriptor Prepare() { Open(); if (deviceHandle == null || !deviceHandle.IsValid) { return(ImageDescriptor.Invalid); } firstOpen = false; // Get the configured framerate for recording support. resultingFramerate = PylonHelper.GetResultingFramerate(deviceHandle); SpecificInfo specific = summary.Specific as SpecificInfo; string streamFormatSymbol = specific.StreamFormat; bool hasWidth = Pylon.DeviceFeatureIsReadable(deviceHandle, "Width"); bool hasHeight = Pylon.DeviceFeatureIsReadable(deviceHandle, "Height"); bool hasPixelFormat = Pylon.DeviceFeatureIsReadable(deviceHandle, "PixelFormat"); bool canComputeImageDescriptor = hasWidth && hasHeight && hasPixelFormat; if (!canComputeImageDescriptor) { return(ImageDescriptor.Invalid); } int width = (int)Pylon.DeviceGetIntegerFeature(deviceHandle, "Width"); int height = (int)Pylon.DeviceGetIntegerFeature(deviceHandle, "Height"); string pixelFormat = Pylon.DeviceFeatureToString(deviceHandle, "PixelFormat"); EPylonPixelType pixelType = Pylon.PixelTypeFromString(pixelFormat); if (pixelType == EPylonPixelType.PixelType_Undefined) { return(ImageDescriptor.Invalid); } // Note: the image provider will perform the Bayer conversion itself and only output two formats. // - Y800 for anything monochrome. // - RGB32 for anything color. imageProvider.SetDebayering(specific.Bayer8Conversion); bool isBayer = Pylon.IsBayer(pixelType); bool isBayer8 = PylonHelper.IsBayer8(pixelType); bool bayerColor = (isBayer && !isBayer8) || (isBayer8 && specific.Bayer8Conversion == Bayer8Conversion.Color); bool color = !Pylon.IsMono(pixelType) || bayerColor; ImageFormat format = color ? ImageFormat.RGB32 : ImageFormat.Y800; finishline.Prepare(width, height, format, resultingFramerate); if (finishline.Enabled) { height = finishline.Height; resultingFramerate = finishline.ResultingFramerate; } int bufferSize = ImageFormatHelper.ComputeBufferSize(width, height, format); bool topDown = true; return(new ImageDescriptor(format, width, height, topDown, bufferSize)); }