/// <summary> /// Read WMA-like formatted fields starting at the given reader's current position, and stopping after the given size /// </summary> /// <param name="source">Source to read the fields from</param> /// <param name="atomDataSize">Max size of the zone to read</param> /// <returns>List of the detected metadata fields</returns> public static IList <KeyValuePair <string, string> > ReadFields(BinaryReader source, long atomDataSize) { IList <KeyValuePair <string, string> > result = new List <KeyValuePair <string, string> >(); long initialPos = source.BaseStream.Position; long pos = initialPos; while (pos < initialPos + atomDataSize) { int fieldSize = StreamUtils.DecodeBEInt32(source.ReadBytes(4)); int stringDataSize = StreamUtils.DecodeBEInt32(source.ReadBytes(4)); string fieldName = Utils.Latin1Encoding.GetString(source.ReadBytes(stringDataSize)); source.BaseStream.Seek(4, SeekOrigin.Current); stringDataSize = StreamUtils.DecodeBEInt32(source.ReadBytes(4)); string fieldValue; int fieldType = StreamUtils.DecodeBEInt16(source.ReadBytes(2)); if (19 == fieldType) // Numeric { fieldValue = source.ReadInt64() + ""; } else { fieldValue = Utils.StripEndingZeroChars(Encoding.Unicode.GetString(source.ReadBytes(stringDataSize - 6))); } result.Add(new KeyValuePair <string, string>(fieldName, fieldValue)); source.BaseStream.Seek(pos + fieldSize, SeekOrigin.Begin); pos += fieldSize; } return(result); }
// =========== HELPERS FOR TIFF FILES private static short readInt16(BinaryReader r, bool isBigEndian) { if (isBigEndian) { return(StreamUtils.DecodeBEInt16(r.ReadBytes(2))); } else { return(r.ReadInt16()); } }
public void StreamUtils_BEInt16Converters() { short intValue = 0x3529; Assert.AreEqual((short)0x00FF, StreamUtils.DecodeBEInt16(new byte[2] { 0x00, 0xFF })); byte[] byteValue = StreamUtils.EncodeBEInt16(intValue); Assert.AreEqual(intValue, StreamUtils.DecodeBEInt16(byteValue)); }
public static ImageProperties GetImageProperties(byte[] imageData, ImageFormat format = ImageFormat.Undefined) { ImageProperties props = new ImageProperties(); if (ImageFormat.Undefined.Equals(format)) { format = GetImageFormatFromPictureHeader(imageData); } if (format.Equals(ImageFormat.Unsupported)) { return(props); } props.NumColorsInPalette = 0; props.Format = format; using (MemoryStream s = new MemoryStream(imageData)) using (BinaryReader r = new BinaryReader(s)) { long limit = (long)Math.Round(s.Length * 0.25); // TODO - test and adjust limit switch (format) { case (ImageFormat.Tiff): bool isBigEndian = (0x4D == r.ReadByte()); s.Seek(3, SeekOrigin.Current); // Skip the rest of the signature long IFDOffset = readInt32(r, isBigEndian); s.Seek(IFDOffset, SeekOrigin.Begin); int nbIFDEntries = readInt16(r, isBigEndian); long initialPos = s.Position; int IFDtag, IFDFieldType, IFDNbValues, IFDValue32, IFDValue16; byte[] IFDValueBinary; int photometricInterpretation = 0; int bitsPerSample = 0; int samplesPerPixel = 0; for (int i = 0; i < nbIFDEntries; i++) { IFDtag = readInt16(r, isBigEndian); IFDFieldType = readInt16(r, isBigEndian); IFDNbValues = readInt32(r, isBigEndian); IFDValueBinary = r.ReadBytes(4); IFDValue32 = isBigEndian? StreamUtils.DecodeBEInt32(IFDValueBinary) : StreamUtils.DecodeInt32(IFDValueBinary); IFDValue16 = isBigEndian ? StreamUtils.DecodeBEInt16(IFDValueBinary) : StreamUtils.DecodeInt16(IFDValueBinary); switch (IFDtag) { // Common properties case (0x0100): props.Width = IFDValue32; // Specs say "SHORT or LONG" but the implementation actually takes up 4 bytes anyway -> we'll assume it's a SHORT if the last two bytes are null if (0 == IFDValueBinary[2] + IFDValueBinary[3]) { props.Width = IFDValue16; } break; case (0x0101): props.Height = IFDValue32; if (0 == IFDValueBinary[2] + IFDValueBinary[3]) { props.Height = IFDValue16; } break; // Specific properties case (0x0106): // PhotometricInterpretation photometricInterpretation = IFDValue32; if (IFDValue32 < 2) { props.ColorDepth = 1; // Bilevel or greyscale image } else if (2 == IFDValue32) { props.ColorDepth = 24; // RGB full color image } // NB : A value of 3 would indicate a palette-color image, but has no effect here break; case (0x0102): // BitsPerSample bitsPerSample = IFDValue16; break; case (0x0115): // SamplesPerPixel samplesPerPixel = IFDValue16; break; } } if (photometricInterpretation < 2) // Bilevel or greyscale { props.ColorDepth = bitsPerSample; } else if (2 == photometricInterpretation) // RGB { props.ColorDepth = 8 * samplesPerPixel; } else if (3 == photometricInterpretation) // Palette { props.ColorDepth = 8 * samplesPerPixel; props.NumColorsInPalette = bitsPerSample; } break; case (ImageFormat.Gif): byte[] GraphicControlExtensionBlockSignature = new byte[2] { 0x21, 0xf9 }; props.ColorDepth = 24; // 1 byte for each component s.Seek(3, SeekOrigin.Current); // Skip GIF signature string version = Utils.Latin1Encoding.GetString(r.ReadBytes(3)); s.Seek(4, SeekOrigin.Current); // Skip logical screen descriptors byte globalPaletteUse = r.ReadByte(); if (((globalPaletteUse & 0x80) >> 7) > 0) // File uses a global color palette { props.NumColorsInPalette = 2 << (globalPaletteUse & 0x07); } /* * v89a means that the first image block should follow the first graphic control extension block * (which may in turn be located after an application extension block if the GIF is animated) * * => The simplest way to get to the 1st image block is to look for the 1st * graphic control extension block, and to skip it */ if ("89a".Equals(version)) { initialPos = s.Position; if (StreamUtils.FindSequence(s, GraphicControlExtensionBlockSignature)) { s.Seek(6, SeekOrigin.Current); } else { LogDelegator.GetLogDelegate()(Log.LV_WARNING, "Invalid v89a GIF file; no graphic control extension block found"); // GIF is malformed; trying to find the image block directly s.Seek(initialPos, SeekOrigin.Begin); if (StreamUtils.FindSequence(s, new byte[1] { 0x2c })) { s.Seek(-1, SeekOrigin.Current); } } } // At this point, we should be at the very beginning of the first image block if (0x2c == r.ReadByte()) { s.Seek(4, SeekOrigin.Current); // Skip image position descriptors props.Width = r.ReadInt16(); props.Height = r.ReadInt16(); // No global palette is set => try and find information in the local palette of the 1st image block if (0 == props.NumColorsInPalette) { props.NumColorsInPalette = (int)Math.Pow(2, ((globalPaletteUse & 0x0F) << 4) + 1); } } else { LogDelegator.GetLogDelegate()(Log.LV_WARNING, "Error parsing GIF file; image block not found"); } break; case (ImageFormat.Bmp): // Skip useless information s.Seek(18, SeekOrigin.Begin); props.Width = r.ReadInt32(); props.Height = r.ReadInt32(); s.Seek(2, SeekOrigin.Current); // Planes props.ColorDepth = r.ReadInt16(); // No support for BMP color palettes, as they seem to be exotic (and ATL has no use of this information) break; case (ImageFormat.Png): byte[] intData = new byte[4]; byte[] IHDRChunkSignature = Utils.Latin1Encoding.GetBytes("IHDR"); byte[] PaletteChunkSignature = Utils.Latin1Encoding.GetBytes("PLTE"); // Skip header s.Seek(8, SeekOrigin.Begin); // Scroll chunks until we find IHDR (that should be the first one to appear, but who knows...) if (0 == findPngChunk(s, IHDRChunkSignature, limit)) { LogDelegator.GetLogDelegate()(Log.LV_WARNING, "Invalid PNG file; no IHDR chunk found"); } else { // Read IHDR chunk s.Read(intData, 0, 4); props.Width = StreamUtils.DecodeBEInt32(intData); s.Read(intData, 0, 4); props.Height = StreamUtils.DecodeBEInt32(intData); props.ColorDepth = r.ReadByte(); int colorType = r.ReadByte(); if (3 == colorType) // PNG file uses a palette { s.Seek(7, SeekOrigin.Current); // 3 last useful data + ending chunk CRC uint paletteChunkSize = findPngChunk(s, PaletteChunkSignature, limit); if (0 == paletteChunkSize) { LogDelegator.GetLogDelegate()(Log.LV_WARNING, "Invalid PNG file; palette declared, but no PLTE chunk found"); } else { props.NumColorsInPalette = (int)Math.Floor(paletteChunkSize / 3.0); } } else { props.NumColorsInPalette = 0; } } break; case (ImageFormat.Jpeg): byte[] shortData = new byte[2]; byte[] SOF0FrameSignature = new byte[2] { 0xFF, 0xC0 }; /* * We just need to reach the SOF0 frame descripting the actual picture * * In order to handle JPEG files that contain multiple SOF0 frames (see test suite), * the simplest way of proceeding is to look for all SOF0 frames in the first 25% of the file, * and then read the very last one */ long lastPos = 0; while (StreamUtils.FindSequence(s, SOF0FrameSignature, limit)) { lastPos = s.Position; } if (0 == lastPos) { LogDelegator.GetLogDelegate()(Log.LV_WARNING, "Invalid JPEG file; no SOF0 frame found"); } else { // Skip frame length s.Seek(2, SeekOrigin.Current); bitsPerSample = r.ReadByte(); s.Read(shortData, 0, 2); props.Height = StreamUtils.DecodeBEUInt16(shortData); s.Read(shortData, 0, 2); props.Width = StreamUtils.DecodeBEUInt16(shortData); byte nbComponents = r.ReadByte(); props.ColorDepth = bitsPerSample * nbComponents; } break; } } return(props); }
// =========== HELPERS FOR TIFF FILES private static Int16 readInt16(BinaryReader r, Boolean isBigEndian) { return(isBigEndian ? StreamUtils.DecodeBEInt16(r.ReadBytes(2)) : r.ReadInt16()); }
protected override bool read(BinaryReader source, MetaDataIO.ReadTagParams readTagParams) { bool result = false; long position; resetData(); source.BaseStream.Seek(0, SeekOrigin.Begin); if (AIFF_CONTAINER_ID.Equals(Utils.Latin1Encoding.GetString(source.ReadBytes(4)))) { // Container chunk size long containerChunkPos = source.BaseStream.Position; int containerChunkSize = StreamUtils.DecodeBEInt32(source.ReadBytes(4)); if (containerChunkPos + containerChunkSize + 4 != source.BaseStream.Length) { LogDelegator.GetLogDelegate()(Log.LV_WARNING, "Header size is incoherent with file size"); } // Form type format = Utils.Latin1Encoding.GetString(source.ReadBytes(4)); if (format.Equals(FORMTYPE_AIFF) || format.Equals(FORMTYPE_AIFC)) { isValid = true; StringBuilder commentStr = new StringBuilder(""); long soundChunkPosition = 0; long soundChunkSize = 0; // Header size included bool nameFound = false; bool authorFound = false; bool copyrightFound = false; bool commentsFound = false; long limit = Math.Min(containerChunkPos + containerChunkSize + 4, source.BaseStream.Length); int annotationIndex = 0; int commentIndex = 0; while (source.BaseStream.Position < limit) { ChunkHeader header = seekNextChunkHeader(source, limit); position = source.BaseStream.Position; if (header.ID.Equals(CHUNKTYPE_COMMON)) { short channels = StreamUtils.DecodeBEInt16(source.ReadBytes(2)); switch (channels) { case 1: channelsArrangement = MONO; break; case 2: channelsArrangement = STEREO; break; case 3: channelsArrangement = ISO_3_0_0; break; case 4: channelsArrangement = ISO_2_2_0; break; // Specs actually allow both 2/2.0 and LRCS case 6: channelsArrangement = LRLcRcCS; break; default: channelsArrangement = UNKNOWN; break; } numSampleFrames = StreamUtils.DecodeBEUInt32(source.ReadBytes(4)); sampleSize = (uint)StreamUtils.DecodeBEInt16(source.ReadBytes(2)); // This sample size is for uncompressed data only byte[] byteArray = source.ReadBytes(10); Array.Reverse(byteArray); double aSampleRate = StreamUtils.ExtendedToDouble(byteArray); if (format.Equals(FORMTYPE_AIFC)) { compression = Utils.Latin1Encoding.GetString(source.ReadBytes(4)); } else // AIFF <=> no compression { compression = COMPRESSION_NONE; } if (aSampleRate > 0) { sampleRate = (int)Math.Round(aSampleRate); duration = (double)numSampleFrames * 1000.0 / sampleRate; if (!compression.Equals(COMPRESSION_NONE)) // Sample size is specific to selected compression method { if (compression.ToLower().Equals("fl32")) { sampleSize = 32; } else if (compression.ToLower().Equals("fl64")) { sampleSize = 64; } else if (compression.ToLower().Equals("alaw")) { sampleSize = 8; } else if (compression.ToLower().Equals("ulaw")) { sampleSize = 8; } } if (duration > 0) { bitrate = sampleSize * numSampleFrames * channelsArrangement.NbChannels / duration; } } } else if (header.ID.Equals(CHUNKTYPE_SOUND)) { soundChunkPosition = source.BaseStream.Position - 8; soundChunkSize = header.Size + 8; } else if (header.ID.Equals(CHUNKTYPE_NAME) || header.ID.Equals(CHUNKTYPE_AUTHOR) || header.ID.Equals(CHUNKTYPE_COPYRIGHT)) { structureHelper.AddZone(source.BaseStream.Position - 8, header.Size + 8, header.ID); structureHelper.AddSize(containerChunkPos, containerChunkSize, header.ID); tagExists = true; if (header.ID.Equals(CHUNKTYPE_NAME)) { nameFound = true; } if (header.ID.Equals(CHUNKTYPE_AUTHOR)) { authorFound = true; } if (header.ID.Equals(CHUNKTYPE_COPYRIGHT)) { copyrightFound = true; } SetMetaField(header.ID, Utils.Latin1Encoding.GetString(source.ReadBytes(header.Size)), readTagParams.ReadAllMetaFrames); } else if (header.ID.Equals(CHUNKTYPE_ANNOTATION)) { annotationIndex++; structureHelper.AddZone(source.BaseStream.Position - 8, header.Size + 8, header.ID + annotationIndex); structureHelper.AddSize(containerChunkPos, containerChunkSize, header.ID + annotationIndex); if (commentStr.Length > 0) { commentStr.Append(Settings.InternalValueSeparator); } commentStr.Append(Utils.Latin1Encoding.GetString(source.ReadBytes(header.Size))); tagExists = true; } else if (header.ID.Equals(CHUNKTYPE_COMMENTS)) { commentIndex++; structureHelper.AddZone(source.BaseStream.Position - 8, header.Size + 8, header.ID + commentIndex); structureHelper.AddSize(containerChunkPos, containerChunkSize, header.ID + commentIndex); tagExists = true; commentsFound = true; ushort numComs = StreamUtils.DecodeBEUInt16(source.ReadBytes(2)); for (int i = 0; i < numComs; i++) { CommentData cmtData = new CommentData(); cmtData.Timestamp = StreamUtils.DecodeBEUInt32(source.ReadBytes(4)); cmtData.MarkerId = StreamUtils.DecodeBEInt16(source.ReadBytes(2)); // Comments length ushort comLength = StreamUtils.DecodeBEUInt16(source.ReadBytes(2)); MetaFieldInfo comment = new MetaFieldInfo(getImplementedTagType(), header.ID + commentIndex); comment.Value = Utils.Latin1Encoding.GetString(source.ReadBytes(comLength)); comment.SpecificData = cmtData; tagData.AdditionalFields.Add(comment); // Only read general purpose comments, not those linked to a marker if (0 == cmtData.MarkerId) { if (commentStr.Length > 0) { commentStr.Append(Settings.InternalValueSeparator); } commentStr.Append(comment.Value); } } } else if (header.ID.Equals(CHUNKTYPE_ID3TAG)) { id3v2Offset = source.BaseStream.Position; // Zone is already added by Id3v2.Read id3v2StructureHelper.AddZone(id3v2Offset - 8, header.Size + 8, CHUNKTYPE_ID3TAG); id3v2StructureHelper.AddSize(containerChunkPos, containerChunkSize, CHUNKTYPE_ID3TAG); } source.BaseStream.Position = position + header.Size; if (header.ID.Equals(CHUNKTYPE_SOUND) && header.Size % 2 > 0) { source.BaseStream.Position += 1; // Sound chunk size must be even } } tagData.IntegrateValue(TagData.TAG_FIELD_COMMENT, commentStr.ToString().Replace("\0", " ").Trim()); if (-1 == id3v2Offset) { id3v2Offset = 0; // Switch status to "tried to read, but nothing found" if (readTagParams.PrepareForWriting) { id3v2StructureHelper.AddZone(soundChunkPosition + soundChunkSize, 0, CHUNKTYPE_ID3TAG); id3v2StructureHelper.AddSize(containerChunkPos, containerChunkSize, CHUNKTYPE_ID3TAG); } } // Add zone placeholders for future tag writing if (readTagParams.PrepareForWriting) { if (!nameFound) { structureHelper.AddZone(soundChunkPosition, 0, CHUNKTYPE_NAME); structureHelper.AddSize(containerChunkPos, containerChunkSize, CHUNKTYPE_NAME); } if (!authorFound) { structureHelper.AddZone(soundChunkPosition, 0, CHUNKTYPE_AUTHOR); structureHelper.AddSize(containerChunkPos, containerChunkSize, CHUNKTYPE_AUTHOR); } if (!copyrightFound) { structureHelper.AddZone(soundChunkPosition, 0, CHUNKTYPE_COPYRIGHT); structureHelper.AddSize(containerChunkPos, containerChunkSize, CHUNKTYPE_COPYRIGHT); } if (!commentsFound) { structureHelper.AddZone(soundChunkPosition, 0, CHUNKTYPE_COMMENTS); structureHelper.AddSize(containerChunkPos, containerChunkSize, CHUNKTYPE_COMMENTS); } } result = true; } } return(result); }
// =========== HELPERS FOR TIFF FILES private static short readInt16(BinaryReader r, bool isBigEndian) { return isBigEndian ? StreamUtils.DecodeBEInt16(r.ReadBytes(2)) : r.ReadInt16(); }
public void StreamUtils_Exceptions() { Assert.IsFalse(StreamUtils.ArrEqualsArr(new byte[1], new byte[2])); Assert.IsFalse(StreamUtils.StringEqualsArr(".", new char[2])); try { StreamUtils.DecodeBEUInt16(new byte[1]); Assert.Fail(); } catch { } try { StreamUtils.DecodeUInt16(new byte[1]); Assert.Fail(); } catch { } try { StreamUtils.DecodeInt16(new byte[1]); Assert.Fail(); } catch { } try { StreamUtils.DecodeBEInt16(new byte[1]); Assert.Fail(); } catch { } try { StreamUtils.DecodeBEInt24(new byte[2]); Assert.Fail(); } catch { } try { StreamUtils.DecodeBEUInt24(new byte[2]); Assert.Fail(); } catch { } try { StreamUtils.EncodeBEUInt24(0x01FFFFFF); Assert.Fail(); } catch { } try { StreamUtils.DecodeBEUInt32(new byte[3]); Assert.Fail(); } catch { } try { StreamUtils.DecodeUInt32(new byte[3]); Assert.Fail(); } catch { } try { StreamUtils.DecodeBEInt32(new byte[3]); Assert.Fail(); } catch { } try { StreamUtils.DecodeInt32(new byte[3]); Assert.Fail(); } catch { } try { StreamUtils.DecodeUInt64(new byte[7]); Assert.Fail(); } catch { } try { StreamUtils.DecodeBEInt64(new byte[7]); Assert.Fail(); } catch { } try { StreamUtils.DecodeSynchSafeInt(new byte[6]); Assert.Fail(); } catch { } try { StreamUtils.DecodeSynchSafeInt32(new byte[6]); Assert.Fail(); } catch { } try { StreamUtils.EncodeSynchSafeInt(1, 0); Assert.Fail(); } catch { } try { StreamUtils.EncodeSynchSafeInt(1, 6); Assert.Fail(); } catch { } try { StreamUtils.ReadBits(new BinaryReader(new MemoryStream()), 0, 0); Assert.Fail(); } catch { } try { StreamUtils.ReadBits(new BinaryReader(new MemoryStream()), 0, 33); Assert.Fail(); } catch { } }