public static void FromStream(Stream source, MetaDataIO meta, ReadTagParams readTagParams) { string str; byte[] data = new byte[256]; // Description source.Read(data, 0, 256); str = Utils.StripEndingZeroChars(Utils.Latin1Encoding.GetString(data).Trim()); if (str.Length > 0) { meta.SetMetaField("bext.description", str, readTagParams.ReadAllMetaFrames); } // Originator source.Read(data, 0, 32); str = Utils.StripEndingZeroChars(Utils.Latin1Encoding.GetString(data, 0, 32).Trim()); if (str.Length > 0) { meta.SetMetaField("bext.originator", str, readTagParams.ReadAllMetaFrames); } // OriginatorReference source.Read(data, 0, 32); str = Utils.StripEndingZeroChars(Utils.Latin1Encoding.GetString(data, 0, 32).Trim()); if (str.Length > 0) { meta.SetMetaField("bext.originatorReference", str, readTagParams.ReadAllMetaFrames); } // OriginationDate source.Read(data, 0, 10); str = Utils.StripEndingZeroChars(Utils.Latin1Encoding.GetString(data, 0, 10).Trim()); if (str.Length > 0) { meta.SetMetaField("bext.originationDate", str, readTagParams.ReadAllMetaFrames); } // OriginationTime source.Read(data, 0, 8); str = Utils.StripEndingZeroChars(Utils.Latin1Encoding.GetString(data, 0, 8).Trim()); if (str.Length > 0) { meta.SetMetaField("bext.originationTime", str, readTagParams.ReadAllMetaFrames); } // TimeReference source.Read(data, 0, 8); ulong timeReference = StreamUtils.DecodeUInt64(data); meta.SetMetaField("bext.timeReference", timeReference.ToString(), readTagParams.ReadAllMetaFrames); // BEXT version source.Read(data, 0, 2); int intData = StreamUtils.DecodeUInt16(data); meta.SetMetaField("bext.version", intData.ToString(), readTagParams.ReadAllMetaFrames); // UMID source.Read(data, 0, 64); str = ""; int usefulLength = 32; // "basic" UMID if (data[12] > 19) { usefulLength = 64; // data[12] gives the size of remaining UMID } for (int i = 0; i < usefulLength; i++) { str = str + data[i].ToString("X2"); } meta.SetMetaField("bext.UMID", str, readTagParams.ReadAllMetaFrames); // LoudnessValue source.Read(data, 0, 2); intData = StreamUtils.DecodeInt16(data); meta.SetMetaField("bext.loudnessValue", (intData / 100.0).ToString(), readTagParams.ReadAllMetaFrames); // LoudnessRange source.Read(data, 0, 2); intData = StreamUtils.DecodeInt16(data); meta.SetMetaField("bext.loudnessRange", (intData / 100.0).ToString(), readTagParams.ReadAllMetaFrames); // MaxTruePeakLevel source.Read(data, 0, 2); intData = StreamUtils.DecodeInt16(data); meta.SetMetaField("bext.maxTruePeakLevel", (intData / 100.0).ToString(), readTagParams.ReadAllMetaFrames); // MaxMomentaryLoudness source.Read(data, 0, 2); intData = StreamUtils.DecodeInt16(data); meta.SetMetaField("bext.maxMomentaryLoudness", (intData / 100.0).ToString(), readTagParams.ReadAllMetaFrames); // MaxShortTermLoudness source.Read(data, 0, 2); intData = StreamUtils.DecodeInt16(data); meta.SetMetaField("bext.maxShortTermLoudness", (intData / 100.0).ToString(), readTagParams.ReadAllMetaFrames); // Reserved source.Seek(180, SeekOrigin.Current); // CodingHistory long initialPos = source.Position; if (StreamUtils.FindSequence(source, new byte[2] { 13, 10 } /* CR LF */)) { long endPos = source.Position - 2; source.Seek(initialPos, SeekOrigin.Begin); if (data.Length < (int)(endPos - initialPos)) { data = new byte[(int)(endPos - initialPos)]; } source.Read(data, 0, (int)(endPos - initialPos)); str = Utils.StripEndingZeroChars(Utils.Latin1Encoding.GetString(data, 0, (int)(endPos - initialPos)).Trim()); if (str.Length > 0) { meta.SetMetaField("bext.codingHistory", str, readTagParams.ReadAllMetaFrames); } } }
public static ImageProperties GetImageProperties(byte[] imageData, ImageFormat format = ImageFormat.Undefined) { ImageProperties props = new ImageProperties(); if (ImageFormat.Undefined.Equals(format)) { format = GetImageFormatFromPictureHeader(imageData); } if (format.Equals(ImageFormat.Unsupported)) { return(props); } props.NumColorsInPalette = 0; props.Format = format; using (MemoryStream s = new MemoryStream(imageData)) using (BinaryReader r = new BinaryReader(s)) { long limit = (long)Math.Round(s.Length * 0.25); // TODO - test and adjust limit switch (format) { case (ImageFormat.Tiff): bool isBigEndian = (0x4D == r.ReadByte()); s.Seek(3, SeekOrigin.Current); // Skip the rest of the signature long IFDOffset = readInt32(r, isBigEndian); s.Seek(IFDOffset, SeekOrigin.Begin); int nbIFDEntries = readInt16(r, isBigEndian); long initialPos = s.Position; int IFDtag, IFDFieldType, IFDNbValues, IFDValue32, IFDValue16; byte[] IFDValueBinary; int photometricInterpretation = 0; int bitsPerSample = 0; int samplesPerPixel = 0; for (int i = 0; i < nbIFDEntries; i++) { IFDtag = readInt16(r, isBigEndian); IFDFieldType = readInt16(r, isBigEndian); IFDNbValues = readInt32(r, isBigEndian); IFDValueBinary = r.ReadBytes(4); IFDValue32 = isBigEndian? StreamUtils.DecodeBEInt32(IFDValueBinary) : StreamUtils.DecodeInt32(IFDValueBinary); IFDValue16 = isBigEndian ? StreamUtils.DecodeBEInt16(IFDValueBinary) : StreamUtils.DecodeInt16(IFDValueBinary); switch (IFDtag) { // Common properties case (0x0100): props.Width = IFDValue32; // Specs say "SHORT or LONG" but the implementation actually takes up 4 bytes anyway -> we'll assume it's a SHORT if the last two bytes are null if (0 == IFDValueBinary[2] + IFDValueBinary[3]) { props.Width = IFDValue16; } break; case (0x0101): props.Height = IFDValue32; if (0 == IFDValueBinary[2] + IFDValueBinary[3]) { props.Height = IFDValue16; } break; // Specific properties case (0x0106): // PhotometricInterpretation photometricInterpretation = IFDValue32; if (IFDValue32 < 2) { props.ColorDepth = 1; // Bilevel or greyscale image } else if (2 == IFDValue32) { props.ColorDepth = 24; // RGB full color image } // NB : A value of 3 would indicate a palette-color image, but has no effect here break; case (0x0102): // BitsPerSample bitsPerSample = IFDValue16; break; case (0x0115): // SamplesPerPixel samplesPerPixel = IFDValue16; break; } } if (photometricInterpretation < 2) // Bilevel or greyscale { props.ColorDepth = bitsPerSample; } else if (2 == photometricInterpretation) // RGB { props.ColorDepth = 8 * samplesPerPixel; } else if (3 == photometricInterpretation) // Palette { props.ColorDepth = 8 * samplesPerPixel; props.NumColorsInPalette = bitsPerSample; } break; case (ImageFormat.Gif): byte[] GraphicControlExtensionBlockSignature = new byte[2] { 0x21, 0xf9 }; props.ColorDepth = 24; // 1 byte for each component s.Seek(3, SeekOrigin.Current); // Skip GIF signature string version = Utils.Latin1Encoding.GetString(r.ReadBytes(3)); s.Seek(4, SeekOrigin.Current); // Skip logical screen descriptors byte globalPaletteUse = r.ReadByte(); if (((globalPaletteUse & 0x80) >> 7) > 0) // File uses a global color palette { props.NumColorsInPalette = 2 << (globalPaletteUse & 0x07); } /* * v89a means that the first image block should follow the first graphic control extension block * (which may in turn be located after an application extension block if the GIF is animated) * * => The simplest way to get to the 1st image block is to look for the 1st * graphic control extension block, and to skip it */ if ("89a".Equals(version)) { initialPos = s.Position; if (StreamUtils.FindSequence(s, GraphicControlExtensionBlockSignature)) { s.Seek(6, SeekOrigin.Current); } else { LogDelegator.GetLogDelegate()(Log.LV_WARNING, "Invalid v89a GIF file; no graphic control extension block found"); // GIF is malformed; trying to find the image block directly s.Seek(initialPos, SeekOrigin.Begin); if (StreamUtils.FindSequence(s, new byte[1] { 0x2c })) { s.Seek(-1, SeekOrigin.Current); } } } // At this point, we should be at the very beginning of the first image block if (0x2c == r.ReadByte()) { s.Seek(4, SeekOrigin.Current); // Skip image position descriptors props.Width = r.ReadInt16(); props.Height = r.ReadInt16(); // No global palette is set => try and find information in the local palette of the 1st image block if (0 == props.NumColorsInPalette) { props.NumColorsInPalette = (int)Math.Pow(2, ((globalPaletteUse & 0x0F) << 4) + 1); } } else { LogDelegator.GetLogDelegate()(Log.LV_WARNING, "Error parsing GIF file; image block not found"); } break; case (ImageFormat.Bmp): // Skip useless information s.Seek(18, SeekOrigin.Begin); props.Width = r.ReadInt32(); props.Height = r.ReadInt32(); s.Seek(2, SeekOrigin.Current); // Planes props.ColorDepth = r.ReadInt16(); // No support for BMP color palettes, as they seem to be exotic (and ATL has no use of this information) break; case (ImageFormat.Png): byte[] intData = new byte[4]; byte[] IHDRChunkSignature = Utils.Latin1Encoding.GetBytes("IHDR"); byte[] PaletteChunkSignature = Utils.Latin1Encoding.GetBytes("PLTE"); // Skip header s.Seek(8, SeekOrigin.Begin); // Scroll chunks until we find IHDR (that should be the first one to appear, but who knows...) if (0 == findPngChunk(s, IHDRChunkSignature, limit)) { LogDelegator.GetLogDelegate()(Log.LV_WARNING, "Invalid PNG file; no IHDR chunk found"); } else { // Read IHDR chunk s.Read(intData, 0, 4); props.Width = StreamUtils.DecodeBEInt32(intData); s.Read(intData, 0, 4); props.Height = StreamUtils.DecodeBEInt32(intData); props.ColorDepth = r.ReadByte(); int colorType = r.ReadByte(); if (3 == colorType) // PNG file uses a palette { s.Seek(7, SeekOrigin.Current); // 3 last useful data + ending chunk CRC uint paletteChunkSize = findPngChunk(s, PaletteChunkSignature, limit); if (0 == paletteChunkSize) { LogDelegator.GetLogDelegate()(Log.LV_WARNING, "Invalid PNG file; palette declared, but no PLTE chunk found"); } else { props.NumColorsInPalette = (int)Math.Floor(paletteChunkSize / 3.0); } } else { props.NumColorsInPalette = 0; } } break; case (ImageFormat.Jpeg): byte[] shortData = new byte[2]; byte[] SOF0FrameSignature = new byte[2] { 0xFF, 0xC0 }; /* * We just need to reach the SOF0 frame descripting the actual picture * * In order to handle JPEG files that contain multiple SOF0 frames (see test suite), * the simplest way of proceeding is to look for all SOF0 frames in the first 25% of the file, * and then read the very last one */ long lastPos = 0; while (StreamUtils.FindSequence(s, SOF0FrameSignature, limit)) { lastPos = s.Position; } if (0 == lastPos) { LogDelegator.GetLogDelegate()(Log.LV_WARNING, "Invalid JPEG file; no SOF0 frame found"); } else { // Skip frame length s.Seek(2, SeekOrigin.Current); bitsPerSample = r.ReadByte(); s.Read(shortData, 0, 2); props.Height = StreamUtils.DecodeBEUInt16(shortData); s.Read(shortData, 0, 2); props.Width = StreamUtils.DecodeBEUInt16(shortData); byte nbComponents = r.ReadByte(); props.ColorDepth = bitsPerSample * nbComponents; } break; } } return(props); }
public void StreamUtils_Exceptions() { Assert.IsFalse(StreamUtils.ArrEqualsArr(new byte[1], new byte[2])); Assert.IsFalse(StreamUtils.StringEqualsArr(".", new char[2])); try { StreamUtils.DecodeBEUInt16(new byte[1]); Assert.Fail(); } catch { } try { StreamUtils.DecodeUInt16(new byte[1]); Assert.Fail(); } catch { } try { StreamUtils.DecodeInt16(new byte[1]); Assert.Fail(); } catch { } try { StreamUtils.DecodeBEInt16(new byte[1]); Assert.Fail(); } catch { } try { StreamUtils.DecodeBEInt24(new byte[2]); Assert.Fail(); } catch { } try { StreamUtils.DecodeBEUInt24(new byte[2]); Assert.Fail(); } catch { } try { StreamUtils.EncodeBEUInt24(0x01FFFFFF); Assert.Fail(); } catch { } try { StreamUtils.DecodeBEUInt32(new byte[3]); Assert.Fail(); } catch { } try { StreamUtils.DecodeUInt32(new byte[3]); Assert.Fail(); } catch { } try { StreamUtils.DecodeBEInt32(new byte[3]); Assert.Fail(); } catch { } try { StreamUtils.DecodeInt32(new byte[3]); Assert.Fail(); } catch { } try { StreamUtils.DecodeUInt64(new byte[7]); Assert.Fail(); } catch { } try { StreamUtils.DecodeBEInt64(new byte[7]); Assert.Fail(); } catch { } try { StreamUtils.DecodeSynchSafeInt(new byte[6]); Assert.Fail(); } catch { } try { StreamUtils.DecodeSynchSafeInt32(new byte[6]); Assert.Fail(); } catch { } try { StreamUtils.EncodeSynchSafeInt(1, 0); Assert.Fail(); } catch { } try { StreamUtils.EncodeSynchSafeInt(1, 6); Assert.Fail(); } catch { } try { StreamUtils.ReadBits(new BinaryReader(new MemoryStream()), 0, 0); Assert.Fail(); } catch { } try { StreamUtils.ReadBits(new BinaryReader(new MemoryStream()), 0, 33); Assert.Fail(); } catch { } }