public static int BytesPerPixel(BitDepth depth) { switch (depth) { case BitDepth.F32: break; case BitDepth.F64: break; case BitDepth.S16: break; case BitDepth.S32: break; case BitDepth.S8: break; case BitDepth.U16: break; case BitDepth.U8: return 1; break; default: break; } throw new NotSupportedException("enum value is not supported"); }
/// <summary> /// Initializes an instance of the <see cref="GrayscalePixelDataS16"/> class. /// </summary> /// <param name="width">Pixel data width.</param> /// <param name="height">Pixel data height.</param> /// <param name="bitDepth">Bit depth of pixel data.</param> /// <param name="data">Byte data buffer.</param> public GrayscalePixelDataS16(int width, int height, BitDepth bitDepth, IByteBuffer data) { _bits = bitDepth; Width = width; Height = height; var shortData = ByteConverter.ToArray <short>(data, bitDepth.BitsAllocated); if (bitDepth.BitsStored != 16) { // Normally, HighBit == BitsStored-1, and thus shiftLeft == shiftRight, and the two // shifts in the loop below just replaces the top shift bits by the sign bit. // Separating shiftLeft from shiftRight handles exotic cases where low-order bits // should also be discarded. int shiftLeft = bitDepth.BitsAllocated - bitDepth.HighBit - 1; int shiftRight = bitDepth.BitsAllocated - bitDepth.BitsStored; Parallel.For(0, Height, y => { for (int i = Width * y, e = i + Width; i < e; i++) { // Remove masked high and low bits by shifting them out of the data type, // getting the sign correct using arithmetic (sign-extending) right shift. var d = (short)(shortData[i] << shiftLeft); shortData[i] = (short)(d >> shiftRight); } } ); } Data = shortData; }
public void SetupSegmentationDequant() { const BitDepth bitDepth = BitDepth.Bits8; // TODO: Configurable // Build y/uv dequant values based on segmentation. if (Seg.Enabled) { int i; for (i = 0; i < Constants.MaxSegments; ++i) { int qIndex = QuantCommon.GetQIndex(ref Seg, i, BaseQindex); YDequant[i][0] = QuantCommon.DcQuant(qIndex, YDcDeltaQ, bitDepth); YDequant[i][1] = QuantCommon.AcQuant(qIndex, 0, bitDepth); UvDequant[i][0] = QuantCommon.DcQuant(qIndex, UvDcDeltaQ, bitDepth); UvDequant[i][1] = QuantCommon.AcQuant(qIndex, UvAcDeltaQ, bitDepth); } } else { int qIndex = BaseQindex; // When segmentation is disabled, only the first value is used. The // remaining are don't cares. YDequant[0][0] = QuantCommon.DcQuant(qIndex, YDcDeltaQ, bitDepth); YDequant[0][1] = QuantCommon.AcQuant(qIndex, 0, bitDepth); UvDequant[0][0] = QuantCommon.DcQuant(qIndex, UvDcDeltaQ, bitDepth); UvDequant[0][1] = QuantCommon.AcQuant(qIndex, UvAcDeltaQ, bitDepth); } }
/// <summary> /// Initializes an instance of the <see cref="GrayscalePixelDataS32"/> class. /// </summary> /// <param name="width">Pixel data width.</param> /// <param name="height">Pixel data height.</param> /// <param name="bitDepth">Bit depth of pixel data.</param> /// <param name="data">Byte data buffer.</param> public GrayscalePixelDataS32(int width, int height, BitDepth bitDepth, IByteBuffer data) { Width = width; Height = height; var intData = ByteConverter.ToArray<int>(data, bitDepth.BitsAllocated); // Normally, HighBit == BitsStored-1, and thus shiftLeft == shiftRight, and the two // shifts in the loop below just replaces the top shift bits by the sign bit. // Separating shiftLeft from shiftRight handles exotic cases where low-order bits // should also be discarded. int shiftLeft = bitDepth.BitsAllocated - bitDepth.HighBit - 1; int shiftRight = bitDepth.BitsAllocated - bitDepth.BitsStored; #if NET35 for (var y = 0; y < Height; ++y) #else Parallel.For(0, Height, y => #endif { for (int i = Width * y, e = i + Width; i < e; i++) { // Remove masked high and low bits by shifting them out of the data type, // getting the sign correct using arithmetic (sign-extending) right shift. var d = intData[i] << shiftLeft; intData[i] = d >> shiftRight; } } #if !NET35 ); #endif Data = intData; }
/// <summary> /// Export an audio file to be played back channel after channel. /// </summary> /// <param name="path">Output file name</param> /// <param name="data">Samples to write in the file</param> /// <param name="sampleRate">Output sample rate</param> /// <param name="bits">Output bit depth</param> /// <param name="period">Channels separated by this many channels are played simultaneously</param> public static void WriteOffset(string path, float[][] data, int sampleRate, BitDepth bits, int period = -1) { RIFFWaveWriter writer = new RIFFWaveWriter(path, data.Length, data[0].LongLength, sampleRate, bits); writer.WriteOffset(data, period); writer.Dispose(); }
public static void Config(uint samplerate, BitDepth bitness, ushort channels, int lengthLimit) { MelodyGeneration.Samplerate = samplerate; MelodyGeneration.Bitness = bitness; MelodyGeneration.Channels = channels; MelodyGeneration.LengthLimit = lengthLimit; }
public GrayscalePixelDataS16(int width, int height, BitDepth bitDepth, IByteBuffer data) { _bits = bitDepth; _width = width; _height = height; var shortData = Dicom.IO.ByteConverter.ToArray <short>(data); if (bitDepth.BitsStored != 16) { int sign = 1 << bitDepth.HighBit; int mask = (UInt16.MaxValue >> (bitDepth.BitsAllocated - bitDepth.BitsStored)); Parallel.For(0, shortData.Length, (int i) => { short d = shortData[i]; if ((d & sign) != 0) { shortData[i] = (short)-(((-d) & mask) + 1); } else { shortData[i] = (short)(d & mask); } }); } _buffer = new MappedFileBuffer(Dicom.IO.ByteConverter.ToByteBuffer <short>(shortData).Data); }
public Palette(BitDepth bitDepth) { this.BitDepth = bitDepth; switch (this.BitDepth) { case BitDepth.OneBpp: this.colors = new Color[2]; break; case BitDepth.TwoBpp: this.colors = new Color[4]; break; case BitDepth.FourBpp: this.colors = new Color[16]; break; case BitDepth.EightBpp: this.colors = new Color[256]; break; default: throw new Exception(String.Format("Invalid bit depth {0}.", bitDepth.ToString())); } this.colors.Length.Times(i => this.colors[i] = Color.Black); // oh, this is nice. if (!hasAssignedPalettes) { this.AssignPalettes(); } }
public WaveFile(uint sampleRate, BitDepth bitDepth, ushort channels, Stream file) { _format = new FormatChunk(sampleRate, channels, (ushort)bitDepth, file, 12); _data = new DataChunk(file, 12 + _format.Size, _format); _header = new HeaderChunk(file, _format, _data, 0); _file = file; }
/// <summary> /// Export an array of multichannel samples to an audio file. /// </summary> /// <param name="path">Output file name</param> /// <param name="data">Samples to write in the file</param> /// <param name="sampleRate">Output sample rate</param> /// <param name="bits">Output bit depth</param> /// <param name="channels">Output channel information</param> public static void Write(string path, float[][] data, int sampleRate, BitDepth bits, Channel[] channels) { LimitlessAudioFormatWriter writer = new LimitlessAudioFormatWriter(path, data[0].LongLength, sampleRate, bits, channels); writer.Write(data); writer.Dispose(); }
public GrayscalePixelDataS16(int width, int height, BitDepth bitDepth, IByteBuffer data) { _bits = bitDepth; _width = width; _height = height; _data = ByteBufferEnumerator <short> .Create(data).ToArray(); if (bitDepth.BitsStored != 16) { int sign = 1 << bitDepth.HighBit; int mask = (UInt16.MaxValue >> (bitDepth.BitsAllocated - bitDepth.BitsStored)); Parallel.For(0, _data.Length, (int i) => { short d = _data[i]; if ((d & sign) != 0) { _data[i] = (short)-(((-d) & mask) + 1); } else { _data[i] = (short)(d & mask); } }); } }
public GrayscalePixelDataU16(int width, int height, BitDepth bitDepth, IByteBuffer data) { _bits = bitDepth; _width = width; _height = height; var ushortData = Dicom.IO.ByteConverter.ToArray <ushort>(data); if (bitDepth.BitsStored != 16) { // Normally, HighBit == BitsStored-1, and thus shiftLeft == shiftRight, and the two // shifts in the loop below just zeroes the top shift bits. // Separating shiftLeft from shiftRight handles exotic cases where low-order bits // should also be discarded. int shiftLeft = bitDepth.BitsAllocated - bitDepth.HighBit - 1; int shiftRight = bitDepth.BitsAllocated - bitDepth.BitsStored; Parallel.For( 0, _height, y => { for (int i = _width * y, e = i + _width; i < e; i++) { // Remove masked high and low bits by shifting them out of the data type. var d = (ushort)(ushortData[i] << shiftLeft); ushortData[i] = (ushort)(d >> shiftRight); } }); } _data = ushortData; }
public static int BytesPerPixel(BitDepth depth) { switch (depth) { case BitDepth.F32: break; case BitDepth.F64: break; case BitDepth.S16: break; case BitDepth.S32: break; case BitDepth.S8: break; case BitDepth.U16: break; case BitDepth.U8: return(1); break; default: break; } throw new NotSupportedException("enum value is not supported"); }
/// <summary> /// Initializes an instance of the <see cref="GrayscalePixelDataU32"/> class. /// </summary> /// <param name="width">Pixel data width.</param> /// <param name="height">Pixel data height.</param> /// <param name="bitDepth">Bit depth of pixel data.</param> /// <param name="data">Byte data buffer.</param> public GrayscalePixelDataU32(int width, int height, BitDepth bitDepth, IByteBuffer data) { Width = width; Height = height; var uintData = ByteConverter.ToArray <uint>(data, bitDepth.BitsAllocated); if (bitDepth.BitsStored != 32) { // Normally, HighBit == BitsStored-1, and thus shiftLeft == shiftRight, and the two // shifts in the loop below just zeroes the top shift bits. // Separating shiftLeft from shiftRight handles exotic cases where low-order bits // should also be discarded. int shiftLeft = bitDepth.BitsAllocated - bitDepth.HighBit - 1; int shiftRight = bitDepth.BitsAllocated - bitDepth.BitsStored; Parallel.For(0, Height, y => { for (int i = Width * y, e = i + Width; i < e; i++) { // Remove masked high and low bits by shifting them out of the data type. var d = uintData[i] << shiftLeft; uintData[i] = d >> shiftRight; } } ); } Data = uintData; }
public static void Init(string deviceName, int requestedWidth, int requestedHeight, int requestedFps, BitDepth targetBitDepth) { WebCamTexture = new WebCamTexture(deviceName, requestedWidth, requestedHeight, requestedFps); WebCamTexture.Play(); if (!WebCamTextureProxy.instance) new GameObject("_webcamtextureproxy") { hideFlags = HideFlags.HideAndDontSave }.AddComponent<WebCamTextureProxy>().SetTargetTexture(WebCamTexture).SetTargetDepth(targetBitDepth).StartCapture(); }
/// <summary> /// Converts a RIFF WAVE bitstream with header to raw samples. /// </summary> public RIFFWaveDecoder(BinaryReader reader) { // RIFF header if (reader.ReadInt32() != RIFFWave.syncWord1) { throw new SyncException(); } stream = reader.BaseStream; stream.Position += 4; // File length // Format header if (reader.ReadInt64() != RIFFWave.syncWord2) { throw new SyncException(); } stream.Position += 4; // Format header length short sampleFormat = reader.ReadInt16(); // 1 = int, 3 = float, -2 = WAVE EX channelCount = reader.ReadInt16(); sampleRate = reader.ReadInt32(); stream.Position += 4; // Bytes/sec stream.Position += 2; // Block size in bytes short bitDepth = reader.ReadInt16(); if (sampleFormat == -2) { // Extension size (22) - 2 bytes, valid bits per sample - 2 bytes, channel mask - 4 bytes stream.Position += 8; sampleFormat = reader.ReadInt16(); stream.Position += 15; // Skip the rest of the sub format GUID } if (sampleFormat == 1) { Bits = bitDepth switch { 8 => BitDepth.Int8, 16 => BitDepth.Int16, 24 => BitDepth.Int24, _ => throw new IOException($"Unsupported bit depth for signed little endian integer: {bitDepth}.") }; } else if (sampleFormat == 3 && bitDepth == 32) { Bits = BitDepth.Float32; } else { throw new IOException($"Unsupported bit depth ({bitDepth}) for sample format {sampleFormat}."); } // Data header int header = 0; do { header = (header << 8) | reader.ReadByte(); }while (header != RIFFWave.syncWord3BE && stream.Position < stream.Length); length = reader.ReadUInt32() * 8L / (long)Bits / ChannelCount; dataStart = stream.Position; this.reader = BlockBuffer <byte> .Create(reader, FormatConsts.blockSize); }
public byte[] GetSample(uint index, BitDepth bd) { byte[] result = null; result = new byte[(byte)bd / 8]; _file.Position = _dataOffset + index * ((byte)bd / 8); _file.Read(result, 0, result.Length); return(result); }
public CameraImage(IntPtr p, int width, int height, int channels, BitDepth depth) { Width = width; Height = height; Channels = channels; Depth = depth; raw = (void *)p; }
protected CameraImage(int width, int height, int channels, BitDepth depth) { Width = width; Height = height; Channels = channels; Depth = depth; raw = (void *)Marshal.AllocHGlobal(Size); }
/// <summary> /// Abstract audio file writer. /// </summary> /// <param name="writer">File writer object</param> /// <param name="channelCount">Output channel count</param> /// <param name="length">Output length in samples per channel</param> /// <param name="sampleRate">Output sample rate</param> /// <param name="bits">Output bit depth</param> public AudioWriter(BinaryWriter writer, int channelCount, long length, int sampleRate, BitDepth bits) { this.writer = writer; ChannelCount = channelCount; Length = length; SampleRate = sampleRate; Bits = bits; }
public WaveFormatExtensible(SampleRate rate, BitDepth bits, SpeakerConfiguration channelMask, Guid subFormat) : base(rate, bits, channelMask, WaveFormatEncoding.Extensible, Marshal.SizeOf(typeof(WaveFormatExtensible))) { wValidBitsPerSample = (short)bits; dwChannelMask = (int)channelMask; this.subFormat = subFormat; }
/// <summary> /// Creates a new instance of PngHeader /// </summary> public PngHeader(int width, int height) { _width = width; _height = height; _bitdepth = BitDepth.Eight; _colortype = ColorType.TruecolorAlpha; _interlaceMethod = InterlaceMethod.NoInterlacing; }
/// <summary> /// Converts a RIFF WAVE bitstream to raw samples. /// </summary> public RIFFWaveDecoder(BlockBuffer <byte> reader, int channelCount, long length, int sampleRate, BitDepth bits) : base(reader) { this.channelCount = channelCount; this.length = length; this.sampleRate = sampleRate; Bits = bits; }
public void WriteCompressedTiles8(Tile[] tiles) { if (tiles == null) { throw new ArgumentNullException(nameof(tiles)); } WriteCompressedBytes(BitDepth.Encode8(tiles)); }
public void WriteTiles8(Tileset tiles) { if (tiles == null) { throw new ArgumentNullException(nameof(tiles)); } WriteBytes(BitDepth.Encode8(tiles)); }
/// <summary> /// Construct the octree quantizer /// </summary> /// <remarks> /// The Octree quantizer is a two pass algorithm. The initial pass sets up the octree, /// the second pass quantizes a color based on the nodes in the tree /// </remarks> public OctreeQuantizer(BitDepth pBitDepth) : base(false) { var maxColors = GetMaxColors(pBitDepth); var maxColorBits = GetMaxColorBits(pBitDepth); _octree = new Octree(maxColorBits); _maxColors = maxColors; }
public Command(bool active, int exposureμs, double fps, double gain = 0, BitDepth curBitDepth = BitDepth.Mono8bpp, ImageFormat imageformat = ImageFormat.Seperate, string foldername = "") { Active = active; Exposureμs = exposureμs; FPS = fps; Gain = gain; curBitDepth = curBitDepth; CurImageImageFormat = imageformat; Foldername = foldername; }
public Format(string name, string bitDepth, string a, string b, uint smaller) { Name = name; BitDepth = new BitDepth(bitDepth); AStr = a; A = ConvertToDouble(AStr); BStr = b; B = ConvertToDouble(BStr); Smaller = smaller; }
public GrayscaleRenderOptions(BitDepth bits) { BitDepth = bits; RescaleSlope = 1.0; RescaleIntercept = 0.0; WindowWidth = bits.MaximumValue - bits.MinimumValue; WindowCenter = (bits.MaximumValue + bits.MinimumValue) / 2.0; Monochrome1 = false; Invert = false; }
/// <summary> /// Draws a bitmap from a group of files. /// </summary> /// <param name="source">The group of files.</param> /// <param name="bitDepth">The bits per pixel of the resulting bitmap.</param> /// <param name="palette">The palette to use for the resulting bitmap. Pass null if the bit depth is 24 or 32 bits per pixel.</param> /// <param name="worker">A BackgroundWorker which receives progress reports and may cancel this method.</param> /// <returns>A bitmap containing pixels made from all the bytes of all the files in the group.</returns> public static Bitmap Draw(FileSource source, BitDepth bitDepth, int[] palette, BackgroundWorker worker) { if (source == null || source.FilePaths.Count == 0) throw new ArgumentException("The provided file source was null or had no files.", nameof(source)); if (bitDepth < 0 || (int)bitDepth > 7) throw new ArgumentOutOfRangeException(nameof(bitDepth), $"The provided bit depth was not a valid value. Expected a value between 0 and 7, got {bitDepth}."); if (worker == null) throw new ArgumentNullException(nameof(worker), "The provided BackgroundWorker was null."); byte[] sourceBytes = source.GetFiles(); return Draw(sourceBytes, bitDepth, palette, worker); }
public override int GetHashCode() { var hashCode = 1490243033; hashCode = hashCode * -1521134295 + Channels.GetHashCode(); hashCode = hashCode * -1521134295 + SampleRate.GetHashCode(); hashCode = hashCode * -1521134295 + BitDepth.GetHashCode(); hashCode = hashCode * -1521134295 + Format.GetHashCode(); return(hashCode); }
public override void Clear() { if (Image != null) { Image.Dispose(); } Image = null; BitDepth = BitDepth.Bit32; }
public Ihdr(UInt32 width, UInt32 height, BitDepth bitDepth, ColorType colorType, CompressionMethod compressionMethod = CompressionMethod.Default, FilterMethod filterMethod = FilterMethod.Default, InterlaceMethod interlaceMethod = InterlaceMethod.None) : base(ChunkType.IHDR) { #region Sanity if(width == 0 || width > Int32.MaxValue) throw new ArgumentOutOfRangeException("width", "width must be greater than 0 and smaller than In32.MaxValue(2^31-1)"); if(height == 0 || height > Int32.MaxValue) throw new ArgumentOutOfRangeException("height", "height must be greater than 0 and smaller than In32.MaxValue(2^31-1)"); BitDepth[] allowedBitDepths; switch (colorType) { case ColorType.Grayscale: if(!(allowedBitDepths = new[] { BitDepth._1, BitDepth._2, BitDepth._4, BitDepth._8, BitDepth._16 }).Contains(bitDepth)) throw new ArgumentOutOfRangeException("bitDepth", String.Format("bitDepth must be one of {0} for colorType {1}", allowedBitDepths.Aggregate("", (s, bd) => s + bd + ", ", s => s.Trim().Substring(0, s.Length - 2)), colorType)); break; case ColorType.Rgb: if(!(allowedBitDepths = new[]{BitDepth._8, BitDepth._16}).Contains(bitDepth)) throw new ArgumentOutOfRangeException("bitDepth", String.Format("bitDepth must be one of {0} for colorType {1}", allowedBitDepths.Aggregate("", (s, bd) => s + bd + ", ", s => s.Trim().Substring(0, s.Length - 2)), colorType)); break; case ColorType.Palette: if(!(allowedBitDepths = new[] { BitDepth._1, BitDepth._2, BitDepth._4, BitDepth._8}).Contains(bitDepth)) throw new ArgumentOutOfRangeException("bitDepth", String.Format("bitDepth must be one of {0} for colorType {1}", allowedBitDepths.Aggregate("", (s, bd) => s + bd + ", ", s => s.Trim().Substring(0, s.Length - 2)), colorType)); break; case ColorType.GrayscaleWithAlpha: if(!(allowedBitDepths = new[] { BitDepth._8, BitDepth._16}).Contains(bitDepth)) throw new ArgumentOutOfRangeException("bitDepth", String.Format("bitDepth must be one of {0} for colorType {1}", allowedBitDepths.Aggregate("", (s, bd) => s + bd + ", ", s => s.Trim().Substring(0, s.Length - 2)), colorType)); break; case ColorType.Rgba: if(!(allowedBitDepths = new[] { BitDepth._8, BitDepth._16}).Contains(bitDepth)) throw new ArgumentOutOfRangeException("bitDepth", String.Format("bitDepth must be one of {0} for colorType {1}", allowedBitDepths.Aggregate("", (s, bd) => s + bd + ", ", s => s.Trim().Substring(0, s.Length - 2)), colorType)); break; default: throw new ArgumentOutOfRangeException("colorType", String.Format("Unknown colorType: {0}", colorType)); } if(compressionMethod != CompressionMethod.Default) throw new ArgumentOutOfRangeException("compressionMethod", String.Format("Unknown compressionMethod: {0}", compressionMethod)); if(filterMethod != FilterMethod.Default) throw new ArgumentOutOfRangeException("filterMethod", String.Format("Unknown filterMethod: {0}", filterMethod)); var allowedInterlaceMethods = new[] {InterlaceMethod.None, InterlaceMethod.Adam7}; if(!allowedInterlaceMethods.Contains(interlaceMethod)) throw new ArgumentOutOfRangeException("interlaceMethod", String.Format("interlaceMethod must be one of {0}", allowedInterlaceMethods.Aggregate("", (s, bd) => s + bd + ", ", s => s.Trim().Substring(0, s.Length - 2)))); #endregion Width = width; Height = height; BitDepth = bitDepth; ColorType = colorType; CompressionMethod = compressionMethod; FilterMethod = filterMethod; InterlaceMethod = interlaceMethod; }
/// <summary> /// Reads a compressed sprite of the given bit depth. /// </summary> /// <param name="bitDepth"></param> /// <returns>A sprite.</returns> public Sprite ReadCompressedSprite(BitDepth bitDepth) { if (bitDepth == BitDepth.Four) { return(ReadCompressedSprite4()); } else { return(ReadCompressedSprite8()); } }
public Sprite ReadSprite(int tiles, BitDepth bitDepth) { if (bitDepth == BitDepth.Four) { return(ReadSprite4(tiles)); } else { return(ReadSprite8(tiles)); } }
public static int BytesPerPixelCeil(ColorType colorType, BitDepth bitDepth) { int bitsPerPixel; switch (colorType) { case ColorType.Grayscale: bitsPerPixel = (int)bitDepth; break; case ColorType.TrueColor: bitsPerPixel = 3 * (int)bitDepth; break; case ColorType.PaletteColor: bitsPerPixel = (int)bitDepth; break; case ColorType.GrayscaleAlpha: bitsPerPixel = 2 * (int)bitDepth; break; case ColorType.TrueColorAlpha: bitsPerPixel = 4 * (int)bitDepth; break; default: throw new ArgumentOutOfRangeException("colorType"); } return bitsPerPixel % 8 == 0 ? bitsPerPixel / 8 : bitsPerPixel / 8 + 1; }
/// <summary> /// Draws a bitmap of a certain size from a byte array. /// </summary> /// <param name="source">The byte array.</param> /// <param name="bitDepth">The number of bits per pixel in the bitmap.</param> /// <param name="palette">The palette used to draw the image. Pass null for 24 or 32 bit per pixel images.</param> /// <param name="worker">A BackgroundWorker which receives progress reports and may cancel this method.</param> /// <param name="imageSize">The desired size of the bitmap.</param> /// <returns>A bitmap, sized at most to be the desired size, containing pixels made from some or all of the bytes in the array.</returns> /// <remarks>If there aren't enough bytes to fill enough pixels to fill the desired size, all remaining pixels will have all-bits-zero /// (in paletted modes, all-bits-zero pixels use the first color in the provided palette). If there are too many bytes, the image will /// only display enough bytes to fill the image.</remarks> public static Bitmap Draw(byte[] source, BitDepth bitDepth, int[] palette, BackgroundWorker worker, Size imageSize) { if (source == null || source.Length == 0) throw new ArgumentException("The provided source bytes were null or empty.", nameof(source)); if (bitDepth < 0 || (int)bitDepth > 7) throw new ArgumentOutOfRangeException(nameof(bitDepth), $"The provided bit depth was not a valid value. Expected a value between 0 and 7, got {bitDepth}."); if (worker == null) throw new ArgumentNullException(nameof(worker), "The provided BackgroundWorker was null."); if (imageSize.Width == 0 || imageSize.Height == 1) throw new ArgumentOutOfRangeException(nameof(imageSize), "The provided image size has a width or height of 0 pixels."); int paletteSize = (palette != null) ? palette.Length : 0; switch (bitDepth) { case BitDepth.Invalid: throw new InvalidOperationException("Cannot draw a bitmap using an invalid bit depth."); case BitDepth.OneBpp: ValidatePaletteSize(NumberOf1BppColors, paletteSize); return ToBitmap(Create1BppImage(source, palette, worker), worker, imageSize); case BitDepth.TwoBpp: ValidatePaletteSize(NumberOf2BppColors, paletteSize); return ToBitmap(Create2BppImage(source, palette, worker), worker, imageSize); case BitDepth.FourBpp: ValidatePaletteSize(NumberOf4BppColors, paletteSize); return ToBitmap(Create4BppImage(source, palette, worker), worker, imageSize); case BitDepth.EightBpp: ValidatePaletteSize(NumberOf8BppColors, paletteSize); return ToBitmap(Create8BppImage(source, palette, worker), worker, imageSize); case BitDepth.SixteenBpp: ValidatePaletteSize(NumberOf16BppColors, paletteSize); return ToBitmap(Create16BppImage(source, palette, worker), worker, imageSize); case BitDepth.TwentyFourBpp: return ToBitmap(Create24BppImage(source, worker), worker, imageSize); case BitDepth.ThirtyTwoBpp: return ToBitmap(Create32BppImage(source, worker), worker, imageSize); default: return null; } }
public static float[] ResizeIplTo(IplImage Face, int width, int height, BitDepth bitDepth, int channel) { IplImage smallerFace = new IplImage(new OpenCvSharp.CvSize(width, height), bitDepth, channel); Face.Resize(smallerFace, Interpolation.Linear); unsafe { byte* smallFaceData = smallerFace.ImageDataPtr; float[] currentFace = new float[width * height * 8 * channel]; for (int i = 0; i < smallerFace.Height; i++) { for (int j = 0; j < smallerFace.Width; j++) { currentFace[i * smallerFace.WidthStep + j] = (float)smallFaceData[i * smallerFace.WidthStep + j]; } } smallerFace.Dispose(); return currentFace; } }
public static extern IntPtr cvCreateImageHeader(CvSize size, BitDepth depth, int channels);
/// <summary> /// /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btn_OK_Click(object sender, EventArgs e) { if (rbtn_32Bit.Checked) m_nBitDepth = BitDepth.BPP32; else if (rbtn_24Bit.Checked) m_nBitDepth = BitDepth.BPP24; this.Close(); }
public GrayscalePixelDataU32(int width, int height, BitDepth bitDepth, IByteBuffer data) { _width = width; _height = height; _data = ByteBufferEnumerator<uint>.Create(data).ToArray(); if (bitDepth.BitsStored != 32) { int mask = (1 << (bitDepth.HighBit + 1)) - 1; Parallel.For(0, _data.Length, (int i) => { _data[i] = (uint)(_data[i] & mask); }); } }
public GrayscalePixelDataS32(int width, int height, BitDepth bitDepth, IByteBuffer data) { _width = width; _height = height; _data = ByteBufferEnumerator<int>.Create(data).ToArray(); int sign = 1 << bitDepth.HighBit; uint mask = (UInt32.MaxValue >> (bitDepth.BitsAllocated - bitDepth.BitsStored)); Parallel.For(0, _data.Length, (int i) => { int d = _data[i]; if ((d & sign) != 0) _data[i] = (int)-(((-d) & mask) + 1); else _data[i] = (int)(d & mask); }); }
/// <summary> /// ピクセルデータの配列からIplImageを生成する /// </summary> /// <param name="width">画像の幅</param> /// <param name="height">画像の高さ</param> /// <param name="depth">画像要素のビットデプス</param> /// <param name="channels">要素(ピクセル)毎のチャンネル数.1,2,3,4 のいずれか.</param> /// <param name="data">ピクセルデータ配列</param> /// <returns></returns> #else /// <summary> /// Creates an IplImage instance from pixel data /// </summary> /// <param name="width">Image width. </param> /// <param name="height">Image height. </param> /// <param name="depth">Bit depth of image elements.</param> /// <param name="channels">Number of channels per element(pixel).</param> /// <param name="data">Pixel data array</param> /// <returns></returns> #endif public static IplImage FromPixelData(int width, int height, BitDepth depth, int channels, Array data) { if (data == null) { throw new ArgumentNullException("data"); } IplImage image = new IplImage(width, height, depth, channels); using (ScopedGCHandle handle = ScopedGCHandle.Alloc(data, GCHandleType.Pinned)) { Util.CopyMemory(image.ImageData, handle.AddrOfPinnedObject(), image.ImageSize); } return image; }
private void comboBox1_SelectedIndexChanged(object sender, EventArgs e) { switch (ComboBitDepths.SelectedIndex) { case 0: bitDepth = BitDepth.OneBpp; RadioGrayscale.Enabled = true; RadioRGB.Enabled = false; RadioARGB.Enabled = false; RadioPaletted.Enabled = true; RadioGrayscale.Checked = true; break; case 1: bitDepth = BitDepth.TwoBpp; RadioGrayscale.Enabled = true; RadioRGB.Enabled = false; RadioARGB.Enabled = false; RadioPaletted.Enabled = true; RadioGrayscale.Checked = true; break; case 2: bitDepth = BitDepth.FourBpp; RadioGrayscale.Enabled = true; RadioRGB.Enabled = true; RadioARGB.Enabled = false; RadioPaletted.Enabled = true; RadioGrayscale.Checked = true; break; case 3: bitDepth = BitDepth.EightBpp; RadioGrayscale.Enabled = true; RadioRGB.Enabled = true; RadioARGB.Enabled = true; RadioPaletted.Enabled = true; RadioGrayscale.Checked = true; break; case 4: bitDepth = BitDepth.SixteenBpp; RadioGrayscale.Enabled = false; RadioRGB.Enabled = true; RadioARGB.Enabled = true; RadioPaletted.Enabled = false; RadioRGB.Checked = true; break; case 5: bitDepth = BitDepth.TwentyFourBpp; RadioGrayscale.Enabled = false; RadioRGB.Enabled = true; RadioARGB.Enabled = true; RadioPaletted.Enabled = false; RadioRGB.Checked = true; break; case 6: bitDepth = BitDepth.ThirtyTwoBpp; RadioGrayscale.Enabled = false; RadioRGB.Enabled = false; RadioARGB.Enabled = true; RadioPaletted.Enabled = false; RadioARGB.Checked = true; break; default: break; } }
public GrayscalePixelDataU16(int width, int height, BitDepth bitDepth, IByteBuffer data) { _bits = bitDepth; _width = width; _height = height; //_data = ByteBufferEnumerator<ushort>.Create(data).ToArray(); _data = IO.ByteConverter.ToArray<ushort>(data); if (bitDepth.BitsStored != 16) { int mask = (1 << (bitDepth.HighBit + 1)) - 1; Parallel.For(0, _data.Length, (int i) => { _data[i] = (ushort)(_data[i] & mask); }); } }
/// <summary> /// ユーザから渡された参照が指す, ユーザによって確保された画像のヘッダ構造体を初期化し,その参照を返す (cvInitImageHeader). /// </summary> /// <param name="size">画像の幅と高さ</param> /// <param name="depth">画像のカラーデプス</param> /// <param name="channels">チャンネル数</param> /// <returns>初期化された画像ヘッダ</returns> #else /// <summary> /// Initializes allocated by user image header (cvInitImageHeader). /// </summary> /// <param name="size">Image width and height. </param> /// <param name="depth">Image depth. </param> /// <param name="channels">Number of channels. </param> /// <returns>Initialzed IplImage header</returns> #endif public static IplImage InitImageHeader(CvSize size, BitDepth depth, int channels) { IplImage img; return Cv.InitImageHeader(out img, size, depth, channels); }
/// <summary> /// Draws a bitmap from a byte array. /// </summary> /// <param name="bytes">The byte array.</param> /// <param name="bitDepth">The number of bits per pixel.</param> /// <param name="palette">The palette used to draw the image. Pass null for 24 and 32 bit per pixel bitmaps.</param> /// <param name="worker">A BackgroundWorker which receives progress reports and may cancel this method.</param> /// <returns>A bitmap containing pixels from all bytes in the array.</returns> private static Bitmap Draw(byte[] bytes, BitDepth bitDepth, int[] palette, BackgroundWorker worker) { int paletteSize = ((int)bitDepth < 6) ? palette.Length : 0; // not 24 or 32bpp switch (bitDepth) { case BitDepth.Invalid: throw new InvalidOperationException("Cannot draw a bitmap using an invalid bit depth."); case BitDepth.OneBpp: ValidatePaletteSize(NumberOf1BppColors, paletteSize); return ToBitmap(Create1BppImage(bytes, palette, worker), worker); case BitDepth.TwoBpp: ValidatePaletteSize(NumberOf2BppColors, paletteSize); return ToBitmap(Create2BppImage(bytes, palette, worker), worker); case BitDepth.FourBpp: ValidatePaletteSize(NumberOf4BppColors, paletteSize); return ToBitmap(Create4BppImage(bytes, palette, worker), worker); case BitDepth.EightBpp: ValidatePaletteSize(NumberOf8BppColors, paletteSize); return ToBitmap(Create8BppImage(bytes, palette, worker), worker); case BitDepth.SixteenBpp: ValidatePaletteSize(NumberOf16BppColors, paletteSize); return ToBitmap(Create16BppImage(bytes, palette, worker), worker); case BitDepth.TwentyFourBpp: return ToBitmap(Create24BppImage(bytes, worker), worker); case BitDepth.ThirtyTwoBpp: return ToBitmap(Create32BppImage(bytes, worker), worker); default: return null; } }
/// <summary> /// 画像のヘッダを作成し,データ領域を確保する (cvCreateImage). /// </summary> /// <param name="width">画像の幅</param> /// <param name="height">画像の高さ</param> /// <param name="depth">画像要素のビットデプス</param> /// <param name="channels">要素(ピクセル)毎のチャンネル数.1,2,3,4 のいずれか.</param> #else /// <summary> /// Creates header and allocates data (cvCreateImage). /// </summary> /// <param name="width">Image width. </param> /// <param name="height">Image height. </param> /// <param name="depth">Bit depth of image elements.</param> /// <param name="channels">Number of channels per element(pixel).</param> #endif public IplImage(int width, int height, BitDepth depth, int channels) : this(new CvSize(width, height), depth, channels) { }
/// <summary> /// 画像のヘッダを作成し,データ領域を確保する (cvCreateImage). /// </summary> /// <param name="size">画像の幅と高さ</param> /// <param name="depth">画像要素のビットデプス</param> /// <param name="channels">要素(ピクセル)毎のチャンネル数.1,2,3,4 のいずれか.</param> #else /// <summary> /// Creates header and allocates data (cvCreateImage). /// </summary> /// <param name="size">Image width and height. </param> /// <param name="depth">Bit depth of image elements.</param> /// <param name="channels">Number of channels per element(pixel).</param> #endif public IplImage(CvSize size, BitDepth depth, int channels) { ptr = NativeMethods.cvCreateImage(size, depth, channels); if (ptr == IntPtr.Zero) throw new OpenCvSharpException("Failed to create IplImage"); NotifyMemoryPressure(SizeOf + ImageSize); }
/// <summary> /// 指定したIplImageのビット深度・チャンネル数に適合するPixelFormatを返す /// </summary> /// <param name="d"></param> /// <param name="c"></param> /// <returns></returns> private static PixelFormat GetOptimumPixelFormats(BitDepth d, int c) { switch (d) { case BitDepth.U8: case BitDepth.S8: switch (c) { case 1: return PixelFormats.Gray8; case 3: return PixelFormats.Bgr24; case 4: return PixelFormats.Bgra32; default: throw new ArgumentOutOfRangeException("c", "Not supported BitDepth and/or NChannels"); } case BitDepth.U16: case BitDepth.S16: switch (c) { case 1: return PixelFormats.Gray16; case 3: return PixelFormats.Rgb48; case 4: return PixelFormats.Rgba64; default: throw new ArgumentOutOfRangeException("c", "Not supported BitDepth and/or NChannels"); } case BitDepth.S32: switch (c) { case 4: return PixelFormats.Prgba64; default: throw new ArgumentOutOfRangeException("c", "Not supported BitDepth and/or NChannels"); } case BitDepth.F32: switch (c) { case 1: return PixelFormats.Gray32Float; case 3: return PixelFormats.Rgb128Float; case 4: return PixelFormats.Rgba128Float; default: throw new ArgumentOutOfRangeException("c", "Not supported BitDepth and/or NChannels"); } case BitDepth.F64: default: throw new ArgumentOutOfRangeException("c", "Not supported BitDepth"); } }
/// <summary> /// Returns the default palette for a given bit depth and color mode. /// </summary> /// <param name="depth">The given bit depth.</param> /// <param name="mode">The given color mode.</param> /// <returns>The requested default palette.</returns> public static int[] GetPalette(BitDepth depth, ColorMode mode) { if (mode == ColorMode.Grayscale) { if (depth == BitDepth.OneBpp) { return OneBppGrayscale; } else if (depth == BitDepth.TwoBpp) { return TwoBppGrayscale; } else if (depth == BitDepth.FourBpp) { return FourBppGrayscale; } else if (depth == BitDepth.EightBpp) { return EightBppGrayscale; } } else if (mode == ColorMode.RGB) { if (depth == BitDepth.FourBpp) { return FourBppRGB121; } else if (depth == BitDepth.EightBpp) { return EightBppRGB332; } else if (depth == BitDepth.SixteenBpp) { return SixteenBppRGB565; } } else if (mode == ColorMode.ARGB) { if (depth == BitDepth.EightBpp) { return EightBppARGB2222; } else if (depth == BitDepth.SixteenBpp) { return SixteenBppARGB4444; } } else { throw new ArgumentException("No default palette for this color mode."); } throw new InvalidOperationException("You should never see this message."); }
public static extern IntPtr cvInitImageHeader(IntPtr image, CvSize size, BitDepth depth, int channels, ImageOrigin origin, int align);
/// <summary> /// ユーザから渡された参照が指す, ユーザによって確保された画像のヘッダ構造体を初期化し,その参照を返す (cvInitImageHeader). /// </summary> /// <param name="size">画像の幅と高さ</param> /// <param name="depth">画像のカラーデプス</param> /// <param name="channels">チャンネル数</param> /// <param name="origin">初期化される画像ヘッダ</param> /// <param name="align">画像の行のアライメント,通常は4,あるいは 8 バイト.</param> /// <returns>初期化された画像ヘッダ</returns> #else /// <summary> /// Initializes allocated by user image header (cvInitImageHeader). /// </summary> /// <param name="size">Image width and height. </param> /// <param name="depth">Image depth. </param> /// <param name="channels">Number of channels. </param> /// <param name="origin">Origin of image</param> /// <param name="align">Alignment for image rows, typically 4 or 8 bytes. </param> /// <returns>Initialzed IplImage header</returns> #endif public static IplImage InitImageHeader(CvSize size, BitDepth depth, int channels, ImageOrigin origin, int align) { IplImage img; return Cv.InitImageHeader(out img, size, depth, channels, origin, align); }
public GrayscalePixelDataS16(int width, int height, BitDepth bitDepth, IByteBuffer data) { _bits = bitDepth; _width = width; _height = height; //_data = ByteBufferEnumerator<short>.Create(data).ToArray(); _data = IO.ByteConverter.ToArray<short>(data); if (bitDepth.BitsStored != 16) { int sign = 1 << bitDepth.HighBit; int mask = (UInt16.MaxValue >> (bitDepth.BitsAllocated - bitDepth.BitsStored)); Parallel.For(0, _data.Length, (int i) => { short d = _data[i]; if ((d & sign) != 0) _data[i] = (short)-(((-d) & mask) + 1); else _data[i] = (short)(d & mask); }); } }
/// <summary> /// ピクセルデータのbyte配列からIplImageを生成する /// </summary> /// <param name="size">画像の幅と高さ</param> /// <param name="depth">画像要素のビットデプス</param> /// <param name="channels">要素(ピクセル)毎のチャンネル数.1,2,3,4 のいずれか.</param> /// <param name="data">ピクセルデータ配列</param> /// <returns></returns> #else /// <summary> /// Creates an IplImage instance from pixel data /// </summary> /// <param name="size">Image width and height. </param> /// <param name="depth">Bit depth of image elements.</param> /// <param name="channels">Number of channels per element(pixel).</param> /// <param name="data">Pixel data array</param> /// <returns></returns> #endif public static IplImage FromPixelData(CvSize size, BitDepth depth, int channels, Array data) { return FromPixelData(size.Width, size.Height, depth, channels, data); }
/// <summary> /// メモリ確保と初期化を行い,IplImage クラスを返す (cvCreateImageHeader). /// </summary> /// <param name="size">画像の幅と高さ</param> /// <param name="depth">画像要素のビットデプス</param> /// <param name="channels">要素(ピクセル)毎のチャンネル数.1,2,3,4 のいずれか.このチャンネルはインタリーブされる.例えば,通常のカラー画像のデータレイアウトは,b0 g0 r0 b1 g1 r1 ...となっている.</param> /// <returns>画像ポインタ</returns> #else /// <summary> /// Allocates, initializes, and returns structure IplImage (cvCreateImageHeader). /// </summary> /// <param name="size">Image width and height. </param> /// <param name="depth">Image depth. </param> /// <param name="channels">Number of channels. </param> /// <returns>Reference to image header</returns> #endif public static IplImage CreateHeader(CvSize size, BitDepth depth, int channels) { return Cv.CreateImageHeader(size, depth, channels); }
/// <summary> /// /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btn_OK_Click(object sender, EventArgs e) { // Preserve meta data. m_bPreserveMetadata = ckb_PreserveData.Checked; // Bit depth. if (rbtn_32Bits.Checked) m_nBitDepth = BitDepth.BPP32; else if (rbtn_24Bits.Checked) m_nBitDepth = BitDepth.BPP24; //Compression. if (rbtn_CompressionNone.Checked) m_nCompression = TiffCompression.None; else if (rbtn_CompressionCCITT3.Checked) m_nCompression = TiffCompression.CCITT3; else if (rbtn_CompressionCCITT4.Checked) m_nCompression = TiffCompression.CCITT4; else if (rbtn_CompressionLZW.Checked) m_nCompression = TiffCompression.LZW; else if (rbtn_CompressionRle.Checked) m_nCompression = TiffCompression.Rle; this.Close(); }