public unsafe static SbitTable Read(DataReader reader, TableRecord[] tables) { if (!SfntTables.SeekToTable(reader, tables, FourCC.Eblc)) return null; // skip version var baseOffset = reader.Position; reader.Skip(sizeof(int)); // load each strike table var count = reader.ReadInt32BE(); if (count > MaxBitmapStrikes) throw new InvalidFontException("Too many bitmap strikes in font."); var sizeTableHeaders = stackalloc BitmapSizeTable[count]; for (int i = 0; i < count; i++) { sizeTableHeaders[i].SubTableOffset = reader.ReadUInt32BE(); sizeTableHeaders[i].SubTableSize = reader.ReadUInt32BE(); sizeTableHeaders[i].SubTableCount = reader.ReadUInt32BE(); // skip colorRef, metrics entries, start and end glyph indices reader.Skip(sizeof(uint) + sizeof(ushort) * 2 + 12 * 2); sizeTableHeaders[i].PpemX = reader.ReadByte(); sizeTableHeaders[i].PpemY = reader.ReadByte(); sizeTableHeaders[i].BitDepth = reader.ReadByte(); sizeTableHeaders[i].Flags = (BitmapSizeFlags)reader.ReadByte(); } // read index subtables var indexSubTables = stackalloc IndexSubTable[count]; for (int i = 0; i < count; i++) { reader.Seek(baseOffset + sizeTableHeaders[i].SubTableOffset); indexSubTables[i] = new IndexSubTable { FirstGlyph = reader.ReadUInt16BE(), LastGlyph = reader.ReadUInt16BE(), Offset = reader.ReadUInt32BE() }; } // read the actual data for each strike table for (int i = 0; i < count; i++) { // read the subtable header reader.Seek(baseOffset + sizeTableHeaders[i].SubTableOffset + indexSubTables[i].Offset); var indexFormat = reader.ReadUInt16BE(); var imageFormat = reader.ReadUInt16BE(); var imageDataOffset = reader.ReadUInt32BE(); } return null; }
static string ExtractString(DataReader reader, uint baseOffset, StringData data) { reader.Seek(baseOffset + data.Offset); var bytes = reader.ReadBytes(data.Length); return(Encoding.BigEndianUnicode.GetString(bytes)); }
public static byte[] ReadProgram(DataReader reader, TableRecord[] tables, FourCC tag) { var index = FindTable(tables, tag); if (index == -1) { return(null); } reader.Seek(tables[index].Offset); return(reader.ReadBytes((int)tables[index].Length)); }
public static bool SeekToTable(DataReader reader, TableRecord[] tables, FourCC tag, bool required = false) { // check if we have the desired table and that it's not empty var index = FindTable(tables, tag); if (index == -1 || tables[index].Length == 0) { if (required) { throw new InvalidFontException($"Missing or empty '{tag}' table."); } return(false); } // seek to the appropriate offset reader.Seek(tables[index].Offset); return(true); }
public static FUnit[] ReadCvt(DataReader reader, TableRecord[] tables) { var index = FindTable(tables, FourCC.Cvt); if (index == -1) { return(null); } reader.Seek(tables[index].Offset); var results = new FUnit[tables[index].Length / sizeof(short)]; for (int i = 0; i < results.Length; i++) { results[i] = (FUnit)reader.ReadInt16BE(); } return(results); }
public static KerningTable ReadKern(DataReader reader, TableRecord[] tables) { // kern table is optional if (!SfntTables.SeekToTable(reader, tables, FourCC.Kern)) return null; // skip version reader.Skip(sizeof(short)); // read each subtable and accumulate kerning values var tableData = new Dictionary<uint, int>(); var subtableCount = reader.ReadUInt16BE(); for (int i = 0; i < subtableCount; i++) { // skip version var currentOffset = reader.Position; reader.Skip(sizeof(short)); var length = reader.ReadUInt16BE(); var coverage = reader.ReadUInt16BE(); // we (and Windows) only support Format 0 tables // only care about tables with horizontal kerning data var kc = (KernCoverage)coverage; if ((coverage & FormatMask) == 0 && (kc & KernCoverage.Horizontal) != 0 && (kc & KernCoverage.CrossStream) == 0) { // read the number of entries; skip over the rest of the header var entryCount = reader.ReadUInt16BE(); reader.Skip(sizeof(short) * 3); var isMin = (kc & KernCoverage.Minimum) != 0; var isOverride = (kc & KernCoverage.Override) != 0; // read in each entry and accumulate its kerning data for (int j = 0; j < entryCount; j++) { var left = reader.ReadUInt16BE(); var right = reader.ReadUInt16BE(); var value = reader.ReadInt16BE(); // look up the current value, if we have one; if not, start at zero int current = 0; var key = ((uint)left << 16) | right; tableData.TryGetValue(key, out current); if (isMin) { if (current < value) tableData[key] = value; } else if (isOverride) tableData[key] = value; else tableData[key] = current + value; } } // jump to the next subtable reader.Seek(currentOffset + length); } return new KerningTable(tableData); }
unsafe static CharacterMap ReadCmapFormat4(DataReader reader) { // skip over length and language reader.Skip(sizeof(short) * 2); // figure out how many segments we have var segmentCount = reader.ReadUInt16BE() / 2; if (segmentCount > MaxSegments) { throw new Exception("Too many cmap segments."); } // skip over searchRange, entrySelector, and rangeShift reader.Skip(sizeof(short) * 3); // read in segment ranges var endCount = stackalloc int[segmentCount]; for (int i = 0; i < segmentCount; i++) { endCount[i] = reader.ReadUInt16BE(); } reader.Skip(sizeof(short)); // padding var startCount = stackalloc int[segmentCount]; for (int i = 0; i < segmentCount; i++) { startCount[i] = reader.ReadUInt16BE(); } var idDelta = stackalloc int[segmentCount]; for (int i = 0; i < segmentCount; i++) { idDelta[i] = reader.ReadInt16BE(); } // build table from each segment var table = new Dictionary <CodePoint, int>(); for (int i = 0; i < segmentCount; i++) { // read the "idRangeOffset" for the current segment // if nonzero, we need to jump into the glyphIdArray to figure out the mapping // the layout is bizarre; see the OpenType spec for details var idRangeOffset = reader.ReadUInt16BE(); if (idRangeOffset != 0) { var currentOffset = reader.Position; reader.Seek(currentOffset + idRangeOffset - sizeof(ushort)); var end = endCount[i]; var delta = idDelta[i]; for (var codepoint = startCount[i]; codepoint <= end; codepoint++) { var glyphId = reader.ReadUInt16BE(); if (glyphId != 0) { var glyphIndex = (glyphId + delta) & 0xFFFF; if (glyphIndex != 0) { table.Add((CodePoint)codepoint, glyphIndex); } } } reader.Seek(currentOffset); } else { // otherwise, do a straight iteration through the segment var end = endCount[i]; var delta = idDelta[i]; for (var codepoint = startCount[i]; codepoint <= end; codepoint++) { var glyphIndex = (codepoint + delta) & 0xFFFF; if (glyphIndex != 0) { table.Add((CodePoint)codepoint, glyphIndex); } } } } return(new CharacterMap(table)); }
public static CharacterMap ReadCmap(DataReader reader, TableRecord[] tables) { SfntTables.SeekToTable(reader, tables, FourCC.Cmap, required: true); // skip version var cmapOffset = reader.Position; reader.Skip(sizeof(short)); // read all of the subtable headers var subtableCount = reader.ReadUInt16BE(); var subtableHeaders = new CmapSubtableHeader[subtableCount]; for (int i = 0; i < subtableHeaders.Length; i++) { subtableHeaders[i] = new CmapSubtableHeader { PlatformID = reader.ReadUInt16BE(), EncodingID = reader.ReadUInt16BE(), Offset = reader.ReadUInt32BE() }; } // search for a "full" Unicode table first var chosenSubtableOffset = 0u; for (int i = 0; i < subtableHeaders.Length; i++) { var platform = subtableHeaders[i].PlatformID; var encoding = subtableHeaders[i].EncodingID; if ((platform == PlatformID.Microsoft && encoding == WindowsEncoding.UnicodeFull) || (platform == PlatformID.Unicode && encoding == UnicodeEncoding.Unicode32)) { chosenSubtableOffset = subtableHeaders[i].Offset; break; } } // if no full unicode table, just grab the first // one that supports any flavor of Unicode if (chosenSubtableOffset == 0) { for (int i = 0; i < subtableHeaders.Length; i++) { var platform = subtableHeaders[i].PlatformID; var encoding = subtableHeaders[i].EncodingID; if ((platform == PlatformID.Microsoft && encoding == WindowsEncoding.UnicodeBmp) || platform == PlatformID.Unicode) { chosenSubtableOffset = subtableHeaders[i].Offset; break; } } } // no unicode support at all is an error if (chosenSubtableOffset == 0) { throw new Exception("Font does not support Unicode."); } // jump to our chosen table and find out what format it's in reader.Seek(cmapOffset + chosenSubtableOffset); var format = reader.ReadUInt16BE(); switch (format) { case 4: return(ReadCmapFormat4(reader)); default: throw new Exception("Unsupported cmap format."); } }
public static byte[] ReadProgram(DataReader reader, TableRecord[] tables, FourCC tag) { var index = FindTable(tables, tag); if (index == -1) return null; reader.Seek(tables[index].Offset); return reader.ReadBytes((int)tables[index].Length); }
public static CharacterMap ReadCmap(DataReader reader, TableRecord[] tables) { SfntTables.SeekToTable(reader, tables, FourCC.Cmap, required: true); // skip version var cmapOffset = reader.Position; reader.Skip(sizeof(short)); // read all of the subtable headers var subtableCount = reader.ReadUInt16BE(); var subtableHeaders = new CmapSubtableHeader[subtableCount]; for (int i = 0; i < subtableHeaders.Length; i++) { subtableHeaders[i] = new CmapSubtableHeader { PlatformID = reader.ReadUInt16BE(), EncodingID = reader.ReadUInt16BE(), Offset = reader.ReadUInt32BE() }; } // search for a "full" Unicode table first var chosenSubtableOffset = 0u; for (int i = 0; i < subtableHeaders.Length; i++) { var platform = subtableHeaders[i].PlatformID; var encoding = subtableHeaders[i].EncodingID; if ((platform == PlatformID.Microsoft && encoding == WindowsEncoding.UnicodeFull) || (platform == PlatformID.Unicode && encoding == UnicodeEncoding.Unicode32)) { chosenSubtableOffset = subtableHeaders[i].Offset; break; } } // if no full unicode table, just grab the first // one that supports any flavor of Unicode if (chosenSubtableOffset == 0) { for (int i = 0; i < subtableHeaders.Length; i++) { var platform = subtableHeaders[i].PlatformID; var encoding = subtableHeaders[i].EncodingID; if ((platform == PlatformID.Microsoft && encoding == WindowsEncoding.UnicodeBmp) || platform == PlatformID.Unicode) { chosenSubtableOffset = subtableHeaders[i].Offset; break; } } } // no unicode support at all is an error if (chosenSubtableOffset == 0) throw new Exception("Font does not support Unicode."); // jump to our chosen table and find out what format it's in reader.Seek(cmapOffset + chosenSubtableOffset); var format = reader.ReadUInt16BE(); switch (format) { case 4: return ReadCmapFormat4(reader); default: throw new Exception("Unsupported cmap format."); } }
static unsafe CharacterMap ReadCmapFormat4(DataReader reader) { // skip over length and language reader.Skip(sizeof(short) * 2); // figure out how many segments we have var segmentCount = reader.ReadUInt16BE() / 2; if (segmentCount > MaxSegments) throw new Exception("Too many cmap segments."); // skip over searchRange, entrySelector, and rangeShift reader.Skip(sizeof(short) * 3); // read in segment ranges var endCount = stackalloc int[segmentCount]; for (int i = 0; i < segmentCount; i++) endCount[i] = reader.ReadUInt16BE(); reader.Skip(sizeof(short)); // padding var startCount = stackalloc int[segmentCount]; for (int i = 0; i < segmentCount; i++) startCount[i] = reader.ReadUInt16BE(); var idDelta = stackalloc int[segmentCount]; for (int i = 0; i < segmentCount; i++) idDelta[i] = reader.ReadInt16BE(); // build table from each segment var table = new Dictionary<CodePoint, int>(); for (int i = 0; i < segmentCount; i++) { // read the "idRangeOffset" for the current segment // if nonzero, we need to jump into the glyphIdArray to figure out the mapping // the layout is bizarre; see the OpenType spec for details var idRangeOffset = reader.ReadUInt16BE(); if (idRangeOffset != 0) { var currentOffset = reader.Position; reader.Seek(currentOffset + idRangeOffset - sizeof(ushort)); var end = endCount[i]; var delta = idDelta[i]; for (var codepoint = startCount[i]; codepoint <= end; codepoint++) { var glyphId = reader.ReadUInt16BE(); if (glyphId != 0) { var glyphIndex = (glyphId + delta) & 0xFFFF; if (glyphIndex != 0) table.Add((CodePoint)codepoint, glyphIndex); } } reader.Seek(currentOffset); } else { // otherwise, do a straight iteration through the segment var end = endCount[i]; var delta = idDelta[i]; for (var codepoint = startCount[i]; codepoint <= end; codepoint++) { var glyphIndex = (codepoint + delta) & 0xFFFF; if (glyphIndex != 0) table.Add((CodePoint)codepoint, glyphIndex); } } } return new CharacterMap(table); }
public static KerningTable ReadKern(DataReader reader, TableRecord[] tables) { // kern table is optional if (!SfntTables.SeekToTable(reader, tables, FourCC.Kern)) { return(null); } // skip version reader.Skip(sizeof(short)); // read each subtable and accumulate kerning values var tableData = new Dictionary <uint, int>(); var subtableCount = reader.ReadUInt16BE(); for (int i = 0; i < subtableCount; i++) { // skip version var currentOffset = reader.Position; reader.Skip(sizeof(short)); var length = reader.ReadUInt16BE(); var coverage = reader.ReadUInt16BE(); // we (and Windows) only support Format 0 tables // only care about tables with horizontal kerning data var kc = (KernCoverage)coverage; if ((coverage & FormatMask) == 0 && (kc & KernCoverage.Horizontal) != 0 && (kc & KernCoverage.CrossStream) == 0) { // read the number of entries; skip over the rest of the header var entryCount = reader.ReadUInt16BE(); reader.Skip(sizeof(short) * 3); var isMin = (kc & KernCoverage.Minimum) != 0; var isOverride = (kc & KernCoverage.Override) != 0; // read in each entry and accumulate its kerning data for (int j = 0; j < entryCount; j++) { var left = reader.ReadUInt16BE(); var right = reader.ReadUInt16BE(); var value = reader.ReadInt16BE(); // look up the current value, if we have one; if not, start at zero int current = 0; var key = ((uint)left << 16) | right; tableData.TryGetValue(key, out current); if (isMin) { if (current < value) { tableData[key] = value; } } else if (isOverride) { tableData[key] = value; } else { tableData[key] = current + value; } } } // jump to the next subtable reader.Seek(currentOffset + length); } return(new KerningTable(tableData)); }
static string ExtractString(DataReader reader, uint baseOffset, StringData data) { reader.Seek(baseOffset + data.Offset); var bytes = reader.ReadBytes(data.Length); return Encoding.BigEndianUnicode.GetString(bytes); }
public static void ReadGlyph( DataReader reader, int glyphIndex, int recursionDepth, BaseGlyph[] glyphTable, uint glyfOffset, uint glyfLength, uint* loca ) { // check if this glyph has already been loaded; this can happen // if we're recursively loading subglyphs as part of a composite if (glyphTable[glyphIndex] != null) return; // prevent bad font data from causing infinite recursion if (recursionDepth > MaxRecursion) throw new InvalidFontException("Bad font data; infinite composite recursion."); // check if this glyph doesn't have any actual data GlyphHeader header; var offset = loca[glyphIndex]; if ((glyphIndex < glyphTable.Length - 1 && offset == loca[glyphIndex + 1]) || offset >= glyfLength) { // this is an empty glyph, so synthesize a header header = default(GlyphHeader); } else { // seek to the right spot and load the header reader.Seek(glyfOffset + loca[glyphIndex]); header = new GlyphHeader { ContourCount = reader.ReadInt16BE(), MinX = reader.ReadInt16BE(), MinY = reader.ReadInt16BE(), MaxX = reader.ReadInt16BE(), MaxY = reader.ReadInt16BE() }; if (header.ContourCount < -1 || header.ContourCount > MaxContours) throw new InvalidFontException("Invalid number of contours for glyph."); } if (header.ContourCount > 0) { // positive contours means a simple glyph glyphTable[glyphIndex] = ReadSimpleGlyph(reader, header.ContourCount); } else if (header.ContourCount == -1) { // -1 means composite glyph var composite = ReadCompositeGlyph(reader); var subglyphs = composite.Subglyphs; // read each subglyph recrusively for (int i = 0; i < subglyphs.Length; i++) ReadGlyph(reader, subglyphs[i].Index, recursionDepth + 1, glyphTable, glyfOffset, glyfLength, loca); glyphTable[glyphIndex] = composite; } else { // no data, so synthesize an empty glyph glyphTable[glyphIndex] = new SimpleGlyph { Points = new Point[0], ContourEndpoints = new int[0] }; } // save bounding box var glyph = glyphTable[glyphIndex]; glyph.MinX = header.MinX; glyph.MinY = header.MinY; glyph.MaxX = header.MaxX; glyph.MaxY = header.MaxY; }
public static bool SeekToTable(DataReader reader, TableRecord[] tables, FourCC tag, bool required = false) { // check if we have the desired table and that it's not empty var index = FindTable(tables, tag); if (index == -1 || tables[index].Length == 0) { if (required) //throw new InvalidFontException($"Missing or empty '{tag}' table."); throw new InvalidFontException(string.Format( "Missing or empty '{0}' table.", tag)); return false; } // seek to the appropriate offset reader.Seek(tables[index].Offset); return true; }
public static void ReadGlyph( DataReader reader, int glyphIndex, int recursionDepth, BaseGlyph[] glyphTable, uint glyfOffset, uint glyfLength, uint *loca ) { // check if this glyph has already been loaded; this can happen // if we're recursively loading subglyphs as part of a composite if (glyphTable[glyphIndex] != null) { return; } // prevent bad font data from causing infinite recursion if (recursionDepth > MaxRecursion) { throw new InvalidFontException("Bad font data; infinite composite recursion."); } // check if this glyph doesn't have any actual data GlyphHeader header; var offset = loca[glyphIndex]; if ((glyphIndex < glyphTable.Length - 1 && offset == loca[glyphIndex + 1]) || offset >= glyfLength) { // this is an empty glyph, so synthesize a header header = default(GlyphHeader); } else { // seek to the right spot and load the header reader.Seek(glyfOffset + loca[glyphIndex]); header = new GlyphHeader { ContourCount = reader.ReadInt16BE(), MinX = reader.ReadInt16BE(), MinY = reader.ReadInt16BE(), MaxX = reader.ReadInt16BE(), MaxY = reader.ReadInt16BE() }; if (header.ContourCount < -1 || header.ContourCount > MaxContours) { throw new InvalidFontException("Invalid number of contours for glyph."); } } if (header.ContourCount > 0) { // positive contours means a simple glyph glyphTable[glyphIndex] = ReadSimpleGlyph(reader, header.ContourCount); } else if (header.ContourCount == -1) { // -1 means composite glyph var composite = ReadCompositeGlyph(reader); var subglyphs = composite.Subglyphs; // read each subglyph recrusively for (int i = 0; i < subglyphs.Length; i++) { ReadGlyph(reader, subglyphs[i].Index, recursionDepth + 1, glyphTable, glyfOffset, glyfLength, loca); } glyphTable[glyphIndex] = composite; } else { // no data, so synthesize an empty glyph glyphTable[glyphIndex] = new SimpleGlyph { Points = new Point[0], ContourEndpoints = new int[0] }; } // save bounding box var glyph = glyphTable[glyphIndex]; glyph.MinX = header.MinX; glyph.MinY = header.MinY; glyph.MaxX = header.MaxX; glyph.MaxY = header.MaxY; }
public unsafe static SbitTable Read(DataReader reader, TableRecord[] tables) { if (!SfntTables.SeekToTable(reader, tables, FourCC.Eblc)) { return(null); } // skip version var baseOffset = reader.Position; reader.Skip(sizeof(int)); // load each strike table var count = reader.ReadInt32BE(); if (count > MaxBitmapStrikes) { throw new Exception("Too many bitmap strikes in font."); } var sizeTableHeaders = stackalloc BitmapSizeTable[count]; for (int i = 0; i < count; i++) { sizeTableHeaders[i].SubTableOffset = reader.ReadUInt32BE(); sizeTableHeaders[i].SubTableSize = reader.ReadUInt32BE(); sizeTableHeaders[i].SubTableCount = reader.ReadUInt32BE(); // skip colorRef, metrics entries, start and end glyph indices reader.Skip(sizeof(uint) + sizeof(ushort) * 2 + 12 * 2); sizeTableHeaders[i].PpemX = reader.ReadByte(); sizeTableHeaders[i].PpemY = reader.ReadByte(); sizeTableHeaders[i].BitDepth = reader.ReadByte(); sizeTableHeaders[i].Flags = (BitmapSizeFlags)reader.ReadByte(); } // read index subtables var indexSubTables = stackalloc IndexSubTable[count]; for (int i = 0; i < count; i++) { reader.Seek(baseOffset + sizeTableHeaders[i].SubTableOffset); indexSubTables[i] = new IndexSubTable { FirstGlyph = reader.ReadUInt16BE(), LastGlyph = reader.ReadUInt16BE(), Offset = reader.ReadUInt32BE() }; } // read the actual data for each strike table for (int i = 0; i < count; i++) { // read the subtable header reader.Seek(baseOffset + sizeTableHeaders[i].SubTableOffset + indexSubTables[i].Offset); var indexFormat = reader.ReadUInt16BE(); var imageFormat = reader.ReadUInt16BE(); var imageDataOffset = reader.ReadUInt32BE(); } return(null); }
public static FUnit[] ReadCvt(DataReader reader, TableRecord[] tables) { var index = FindTable(tables, FourCC.Cvt); if (index == -1) return null; reader.Seek(tables[index].Offset); var results = new FUnit[tables[index].Length / sizeof(short)]; for (int i = 0; i < results.Length; i++) results[i] = (FUnit)reader.ReadInt16BE(); return results; }