private int DecompressTable(MemoryStream output, BigEndianReader reader, WoffTableEntry table, uint length) { int pos = (int)output.Position; using (var streamIn = new MemoryStream(reader.Read((int)table.CompressedLength))) { #if NET6_0 using (var compress = new System.IO.Compression.ZLibStream(streamIn, System.IO.Compression.CompressionMode.Decompress)) { compress.CopyTo(output); } #else using (InflaterInputStream decompressor = new InflaterInputStream(streamIn)) { decompressor.CopyTo(output); } #endif } int len = (int)(output.Position - pos); if (len != length) { return(-1); } else { return(len); } }
private static void Method_1(BigEndianReader param1, BigEndianReader param2) { int _loc3_ = param1.ReadInt32(); byte[] _loc4_ = new byte[_loc3_]; param1.Read(_loc4_, 0, _loc3_); var _loc5_ = new Class_5(param2); _loc5_.Method_5(_loc4_); var reader = new BinaryReader(new MemoryStream(_loc4_)); byte[] readbytes = new byte[_loc4_.Length]; reader.Read(readbytes, 0, _loc4_.Length); var_6.Add(Encoding.UTF8.GetString(readbytes)); }
protected override TrueTypeFontTable ReadTable(string tag, uint length, TrueTypeTableEntryList list, BigEndianReader reader) { var table = list[tag] as WoffTableEntry; if (table.CompressedLength == table.Length) { //just copy the data var pos = reader.Position; var data = reader.Read((int)length); table.SetDecompressedData(data); //and return to the original reader.Position = pos; return(base.ReadTable(tag, length, list, reader)); } if (table.DecompressedData == null) { using (var ms = new System.IO.MemoryStream()) { DecompressTable(ms, reader, table, length); table.SetDecompressedData(ms.ToArray()); ms.Position = 0; using (var newReader = new BigEndianReader(ms)) { var read = base.ReadTable(tag, length, list, newReader); table.SetTable(read); } } } else { using (var ms = new System.IO.MemoryStream(table.DecompressedData)) { using (var newReader = new BigEndianReader(ms)) { var read = base.ReadTable(tag, length, list, newReader); table.SetTable(read); } } } return(table.Table); }
public static byte[] ExtractTTFfromTTC(System.IO.Stream ttc, int ttfHeadOffset) { using (System.IO.MemoryStream ttf = new System.IO.MemoryStream()) { BigEndianReader reader = new BigEndianReader(ttc); reader.Position = ttfHeadOffset; TrueTypeHeader header; if (TrueTypeHeader.TryReadHeader(reader, out header) == false) { throw new NotSupportedException("The current stream is not a supported OpenType or TrueType font file"); } List <TrueTypeTableEntry> dirs; try { dirs = new List <TrueTypeTableEntry>(); for (int i = 0; i < header.NumberOfTables; i++) { TrueTypeTableEntry dir = new TrueTypeTableEntry(); dir.Read(reader); dirs.Add(dir); } } catch (TypefaceReadException) { throw; } catch (Exception ex) { throw new TypefaceReadException("Could not read the TTF File", ex); } BigEndianWriter writer = new BigEndianWriter(ttf); writer.Write(header.Version.HeaderData); writer.WriteUInt16((ushort)header.NumberOfTables); writer.WriteUInt16((ushort)header.SearchRange); writer.WriteUInt16((ushort)header.EntrySelector); writer.WriteUInt16((ushort)header.RangeShift); long[] dirOffsets = new long[dirs.Count]; //Set to the byte position of the Offset32 in the header to point to the table long[] tableOffsets = new long[dirs.Count]; //Set to the byte position of the table in the file for (var i = 0; i < dirs.Count; i++) { var dir = dirs[i]; writer.WriteASCIIChars(dir.Tag); writer.WriteUInt32(dir.CheckSum); //Write zero as the offset initially and then we will come back and update dirOffsets[i] = writer.Position; writer.WriteUInt32(0); writer.WriteUInt32(dir.Length); } for (var i = 0; i < dirs.Count; i++) { var dir = dirs[i]; while (writer.Position % 4 != 0) { writer.WriteByte(Zero); } //Remember the start position of the table tableOffsets[i] = writer.Position; reader.Position = dir.Offset; //we can improve this var data = reader.Read((int)dir.Length); writer.Write(data); } for (int i = 0; i < dirs.Count; i++) { writer.Position = dirOffsets[i]; writer.WriteUInt32((uint)tableOffsets[i]); } writer.Position = 0; var fileData = ttf.ToArray(); return(fileData); } }
private TrueTypeFontTable ReadOS2Table(uint length, TrueTypeTableEntryList list, BigEndianReader reader) { OS2Table os2 = new OS2Table(reader.Position); os2.Version = (OS2TableVersion)reader.ReadUInt16(); os2.XAverageCharWidth = reader.ReadInt16(); os2.WeightClass = (WeightClass)reader.ReadUInt16(); os2.WidthClass = (WidthClass)reader.ReadUInt16(); os2.FSType = (FontRestrictions)reader.ReadUInt16(); os2.SubscriptXSize = reader.ReadInt16(); os2.SubscriptYSize = reader.ReadInt16(); os2.SubscriptXOffset = reader.ReadInt16(); os2.SubscriptYOffset = reader.ReadInt16(); os2.SuperScriptXSize = reader.ReadInt16(); os2.SuperScriptYSize = reader.ReadInt16(); os2.SuperscriptXOffset = reader.ReadInt16(); os2.SuperscriptYOffset = reader.ReadInt16(); os2.StrikeoutSize = reader.ReadInt16(); os2.StrikeoutPosition = reader.ReadInt16(); byte hi = reader.ReadByte(); byte lo = reader.ReadByte(); os2.FamilyClass = new IBMFontClass(hi, lo); byte[] data = reader.Read(10); os2.Panose = new PanoseArray(data); uint zero = reader.ReadUInt32(); uint one = reader.ReadUInt32(); uint two = reader.ReadUInt32(); uint three = reader.ReadUInt32(); os2.UnicodeRanges = new UnicodeRanges(zero, one, two, three); os2.VendorID = reader.ReadString(4); os2.Selection = (FontSelection)reader.ReadUInt16(); os2.FirstCharIndex = reader.ReadUInt16(); os2.LastCharIndex = reader.ReadUInt16(); os2.TypoAscender = reader.ReadInt16(); os2.TypoDescender = reader.ReadInt16(); os2.TypoLineGap = reader.ReadInt16(); os2.WinAscent = reader.ReadUInt16(); os2.WinDescent = reader.ReadUInt16(); if (os2.Version >= OS2TableVersion.TrueType166) { zero = reader.ReadUInt32(); one = reader.ReadUInt32(); os2.CodePageRanges = new CodePageRange(zero, one); if (os2.Version >= OS2TableVersion.OpenType12) { os2.Height = reader.ReadInt16(); os2.CapHeight = reader.ReadInt16(); os2.DefaultChar = reader.ReadUInt16(); os2.BreakChar = reader.ReadUInt16(); os2.MaxContext = reader.ReadUInt16(); } } return(os2); }
public void Transform(BigEndianReader reader) { //the glyf table is split into several substreams, to group like data together. //The transformed table consists of a number of fields specifying the size of each of the substreams, //followed by the substreams in sequence. //During the decoding process the reverse transformation takes place, //where data from various separate substreams are recombined to create a complete glyph record //for each entry of the original glyf table. //Transformed glyf Table //Data-Type Semantic Description and value type(if applicable) //Fixed version = 0x00000000 //UInt16 numGlyphs Number of glyphs //UInt16 indexFormatOffset format for loca table, // should be consistent with indexToLocFormat of // the original head table(see[OFF] specification) //UInt32 nContourStreamSize Size of nContour stream in bytes //UInt32 nPointsStreamSize Size of nPoints stream in bytes //UInt32 flagStreamSize Size of flag stream in bytes //UInt32 glyphStreamSize Size of glyph stream in bytes(a stream of variable-length encoded values, see description below) //UInt32 compositeStreamSize Size of composite stream in bytes(a stream of variable-length encoded values, see description below) //UInt32 bboxStreamSize Size of bbox data in bytes representing combined length of bboxBitmap(a packed bit array) and bboxStream(a stream of Int16 values) //UInt32 instructionStreamSize Size of instruction stream(a stream of UInt8 values) //Int16 nContourStream[] Stream of Int16 values representing number of contours for each glyph record //255UInt16 nPointsStream[] Stream of values representing number of outline points for each contour in glyph records //UInt8 flagStream[] Stream of UInt8 values representing flag values for each outline point. //Vary glyphStream[] Stream of bytes representing point coordinate values using variable length encoding format(defined in subclause 5.2) //Vary compositeStream[] Stream of bytes representing component flag values and associated composite glyph data //UInt8 bboxBitmap[] Bitmap(a numGlyphs-long bit array) indicating explicit bounding boxes //Int16 bboxStream[] Stream of Int16 values representing glyph bounding box data //UInt8 instructionStream[] Stream of UInt8 values representing a set of instructions for each corresponding glyph uint version = reader.ReadUInt32(); ushort numGlyphs = reader.ReadUInt16(); ushort indexFormatOffset = reader.ReadUInt16(); uint nContourStreamSize = reader.ReadUInt32(); //in bytes uint nPointsStreamSize = reader.ReadUInt32(); //in bytes uint flagStreamSize = reader.ReadUInt32(); //in bytes uint glyphStreamSize = reader.ReadUInt32(); //in bytes uint compositeStreamSize = reader.ReadUInt32(); //in bytes uint bboxStreamSize = reader.ReadUInt32(); //in bytes uint instructionStreamSize = reader.ReadUInt32(); //in bytes long expected_nCountStartAt = reader.BaseStream.Position; long expected_nPointStartAt = expected_nCountStartAt + nContourStreamSize; long expected_FlagStreamStartAt = expected_nPointStartAt + nPointsStreamSize; long expected_GlyphStreamStartAt = expected_FlagStreamStartAt + flagStreamSize; long expected_CompositeStreamStartAt = expected_GlyphStreamStartAt + glyphStreamSize; long expected_BboxStreamStartAt = expected_CompositeStreamStartAt + compositeStreamSize; long expected_InstructionStreamStartAt = expected_BboxStreamStartAt + bboxStreamSize; long expected_EndAt = expected_InstructionStreamStartAt + instructionStreamSize; Glyph[] glyphs = new Glyph[numGlyphs]; TempGlyph[] allGlyphs = new TempGlyph[numGlyphs]; List <ushort> compositeGlyphs = new List <ushort>(); int contourCount = 0; for (ushort i = 0; i < numGlyphs; ++i) { short numContour = reader.ReadInt16(); allGlyphs[i] = new TempGlyph(i, numContour); if (numContour > 0) { contourCount += numContour; //>0 => simple glyph //-1 = compound //0 = empty glyph } else if (numContour < 0) { //composite glyph, resolve later compositeGlyphs.Add(i); } else { } } ushort[] pntPerContours = new ushort[contourCount]; for (int i = 0; i < contourCount; ++i) { // Each of these is the number of points of that contour. pntPerContours[i] = reader.Read255UInt16(); } byte[] flagStream = reader.Read((int)flagStreamSize); //TODO: Read more from line 296 in OpenFoint.Woff2Reader using (var composites = new MemoryStream()) { reader.Position = expected_CompositeStreamStartAt; var compositeData = reader.Read((int)compositeStreamSize); composites.Write(compositeData, 0, (int)compositeStreamSize); int compositeGlyphCount = compositeGlyphs.Count; using (var compositeReader = new BigEndianReader(composites)) { for (int i = 0; i < compositeGlyphCount; i++) { ushort compositeGlyphIndex = compositeGlyphs[i]; bool hasInstructions = CompositeHasInstructions(compositeReader, compositeGlyphIndex); if (hasInstructions) { break; } allGlyphs[compositeGlyphIndex].compositeHasInstructions = hasInstructions; } } } }
private Woff2CacheData ReadCachableDataAfterVersion(BigEndianReader reader, string source) { Woff2CacheData cache = null; Woff2Header header = Woff2Header.ReadHeader(this, reader); Woff2TableEntryList list = new Woff2TableEntryList(); uint startAt = (uint)reader.Position; uint offset = 0; bool hasOs2 = false; bool hasFHead = false; bool hasName = false; for (var i = 0; i < header.NumberOfTables; i++) { Woff2TableEntry entry = new Woff2TableEntry(offset); entry.Read(reader); if (entry.Tag == TrueTypeTableNames.WindowsMetrics) { hasOs2 = true; } else if (entry.Tag == TrueTypeTableNames.FontHeader) { hasFHead = true; } else if (entry.Tag == TrueTypeTableNames.NamingTable) { hasName = true; } // If the table data has been transformed, // then that will be the offset of the next table if (entry.HasTransformation) { offset += entry.TransformedLength; } else { offset += entry.Length; } list.Add(entry); } if (!(hasOs2 || hasName)) { return(null);// new Utility.UnknownTypefaceInfo(source, "Not all the required tables (head with OS/2 or name) were found in the font file"); } if (!hasFHead) { return(null);// new Utility.UnknownTypefaceInfo(source, "Not all the required tables (head with OS/2 or name) were found in the font file"); } //After the table entries, the entire table data is compressed using the Brotli alogorythm offset = (uint)reader.Position; var compressedData = reader.Read((int)header.TotalCompressedSize); var decompressedData = Woff2Brotli.DecompressData(compressedData); using (var uncompressed = new MemoryStream(decompressedData)) { #if RANGE_CHECK if (uncompressed.Length + offset != header.TotalFontSize) { throw new ArgumentOutOfRangeException("The header uncompressed size is not the same"); } #endif using (var newReader = new BigEndianReader(uncompressed)) { var info = ReadInfoFromTables(list, newReader, source, hasOs2); if (null != info && info.FontCount > 0) { cache = new Woff2CacheData() { Entries = list, Header = header, Info = info, Source = source, UncompressedData = decompressedData }; } } } return(cache); }