public bool TryTokenize(byte currentByte, IInputBytes inputBytes, out IToken token) { token = null; if (currentByte != '[') { return(false); } var scanner = new CoreTokenScanner(inputBytes, ScannerScope.Array); var contents = new List <IToken>(); IToken previousToken = null; while (!CurrentByteEndsCurrentArray(inputBytes, previousToken) && scanner.MoveNext()) { previousToken = scanner.CurrentToken; contents.Add(scanner.CurrentToken); } token = new ArrayToken(contents); return(true); }
private static T AssertDataToken <T, TData>(int index, ArrayToken array) where T : IDataToken <TData> { Assert.True(array.Data.Count > index); var result = Assert.IsType <T>(array.Data[index]); return(result); }
/// <summary> /// Create a new <see cref="Type1FontProgram"/> from the information retrieved from the PDF document. /// </summary> /// <param name="name">The name of the font.</param> /// <param name="encoding"></param> /// <param name="fontMatrix"></param> /// <param name="boundingBox"></param> /// <param name="privateDictionary"></param> /// <param name="charStrings"></param> public Type1FontProgram(string name, IReadOnlyDictionary <int, string> encoding, ArrayToken fontMatrix, PdfRectangle boundingBox, Type1PrivateDictionary privateDictionary, Type1CharStrings charStrings) { Name = name; Encoding = encoding; FontMatrix = fontMatrix; BoundingBox = boundingBox; PrivateDictionary = privateDictionary ?? throw new ArgumentNullException(nameof(privateDictionary)); CharStrings = charStrings ?? throw new ArgumentNullException(nameof(charStrings)); }
public void ToStringCorrect() { var token = new ArrayToken(new IToken[] { new StringToken("hedgehog"), new NumericToken(7), OperatorToken.StartObject }); Assert.Equal("[ (hedgehog), 7, obj ]", token.ToString()); }
/// <summary> /// Create a new <see cref="Type1Font"/>. /// </summary> internal Type1Font(string name, IReadOnlyDictionary <int, string> encoding, ArrayToken fontMatrix, PdfRectangle boundingBox, Type1PrivateDictionary privateDictionary, Type1CharStrings charStrings) { Name = name; Encoding = encoding; FontMatrix = GetFontTransformationMatrix(fontMatrix); BoundingBox = boundingBox; PrivateDictionary = privateDictionary ?? throw new ArgumentNullException(nameof(privateDictionary)); CharStrings = charStrings ?? throw new ArgumentNullException(nameof(charStrings)); }
public void SetsData() { var token = new ArrayToken(new[] { OperatorToken.StartStream, OperatorToken.EndStream }); Assert.Equal(2, token.Data.Count); Assert.Equal(OperatorToken.StartStream, token.Data[0]); Assert.Equal(OperatorToken.EndStream, token.Data[1]); }
private static void WriteArray(ArrayToken array, Stream outputStream) { outputStream.WriteByte(ArrayStart); WriteWhitespace(outputStream); for (var i = 0; i < array.Data.Count; i++) { var value = array.Data[i]; WriteToken(value, outputStream); } outputStream.WriteByte(ArrayEnd); WriteWhitespace(outputStream); }
public void ShoulHandleNullArgument() { var call = new CallToken(null); var group = new GroupToken(null); var lambda = new LambdaToken(null); var obj = new ObjectToken(null); var array = new ArrayToken(null); Assert.NotNull(call.Args); Assert.NotNull(group.Tokens); Assert.NotNull(lambda.Parameters); Assert.NotNull(obj.Members); Assert.NotNull(array.Items); }
private static TransformationMatrix GetFontTransformationMatrix(ArrayToken array) { if (array == null || array.Data.Count != 6) { return(TransformationMatrix.FromValues(0.001, 0, 0, 0.001, 0, 0)); } var a = ((NumericToken)array.Data[0]).Double; var b = ((NumericToken)array.Data[1]).Double; var c = ((NumericToken)array.Data[2]).Double; var d = ((NumericToken)array.Data[3]).Double; var e = ((NumericToken)array.Data[4]).Double; var f = ((NumericToken)array.Data[5]).Double; return(TransformationMatrix.FromValues(a, b, c, d, e, f)); }
public static PdfRectangle ToIntRectangle(this ArrayToken array, IPdfTokenScanner tokenScanner) { if (array == null) { throw new ArgumentNullException(nameof(array)); } if (array.Data.Count != 4) { throw new PdfDocumentFormatException($"Cannot convert array to rectangle, expected 4 values instead got: {array}."); } return(new PdfRectangle(DirectObjectFinder.Get <NumericToken>(array[0], tokenScanner).Int, DirectObjectFinder.Get <NumericToken>(array[1], tokenScanner).Int, DirectObjectFinder.Get <NumericToken>(array[2], tokenScanner).Int, DirectObjectFinder.Get <NumericToken>(array[3], tokenScanner).Int)); }
public static PdfRectangle ToIntRectangle(this ArrayToken array) { if (array == null) { throw new ArgumentNullException(nameof(array)); } if (array.Data.Count != 4) { throw new PdfDocumentFormatException($"Cannot convert array to rectangle, expected 4 values instead got: {array}."); } return(new PdfRectangle(array.GetNumeric(0).Int, array.GetNumeric(1).Int, array.GetNumeric(2).Int, array.GetNumeric(3).Int)); }
public static NumericToken GetNumeric(this ArrayToken array, int index) { if (array == null) { throw new ArgumentNullException(nameof(array)); } if (index < 0 || index >= array.Data.Count) { throw new ArgumentOutOfRangeException($"Cannot index into array at index {index}. Array was: {array}."); } if (array.Data[index] is NumericToken numeric) { return(numeric); } throw new PdfDocumentFormatException($"The array did not contain a number at index {index}. Array was: {array}."); }
public static NumericToken GetNumeric(this ArrayToken array, int index) { if (array == null) { throw new ArgumentNullException(nameof(array)); } if (index < 0 || index >= array.Data.Count) { throw new ArgumentOutOfRangeException(); } if (array.Data[index] is NumericToken numeric) { return(numeric); } throw new PdfDocumentFormatException(); }
private static bool TryGetExplicitDestination(ArrayToken explicitDestinationArray, Catalog catalog, ILog log, out ExplicitDestination destination) { destination = null; if (explicitDestinationArray == null || explicitDestinationArray.Length == 0) { return(false); } int pageNumber; var pageToken = explicitDestinationArray[0]; if (pageToken is IndirectReferenceToken pageIndirectReferenceToken) { var page = catalog.GetPageByReference(pageIndirectReferenceToken.Data); if (page?.PageNumber == null) { return(false); } pageNumber = page.PageNumber.Value; } else if (pageToken is NumericToken pageNumericToken) { pageNumber = pageNumericToken.Int + 1; } else { var errorMessage = $"{nameof(TryGetExplicitDestination)} No page number given in 'Dest': '{explicitDestinationArray}'."; log.Error(errorMessage); return(false); } var destTypeToken = explicitDestinationArray[1] as NameToken; if (destTypeToken == null) { var errorMessage = $"Missing name token as second argument to explicit destination: {explicitDestinationArray}."; log.Error(errorMessage); destination = new ExplicitDestination(pageNumber, ExplicitDestinationType.FitPage, ExplicitDestinationCoordinates.Empty); return(true); } if (destTypeToken.Equals(NameToken.XYZ)) { // [page /XYZ left top zoom] var left = explicitDestinationArray[2] as NumericToken; var top = explicitDestinationArray[3] as NumericToken; destination = new ExplicitDestination(pageNumber, ExplicitDestinationType.XyzCoordinates, new ExplicitDestinationCoordinates(left?.Data, top?.Data)); return(true); } if (destTypeToken.Equals(NameToken.Fit)) { // [page /Fit] destination = new ExplicitDestination(pageNumber, ExplicitDestinationType.FitPage, ExplicitDestinationCoordinates.Empty); return(true); } if (destTypeToken.Equals(NameToken.FitH)) { // [page /FitH top] var top = explicitDestinationArray[2] as NumericToken; destination = new ExplicitDestination(pageNumber, ExplicitDestinationType.FitHorizontally, new ExplicitDestinationCoordinates(null, top?.Data)); return(true); } if (destTypeToken.Equals(NameToken.FitV)) { // [page /FitV left] var left = explicitDestinationArray[2] as NumericToken; destination = new ExplicitDestination(pageNumber, ExplicitDestinationType.FitVertically, new ExplicitDestinationCoordinates(left?.Data)); return(true); } if (destTypeToken.Equals(NameToken.FitR)) { // [page /FitR left bottom right top] var left = explicitDestinationArray[2] as NumericToken; var bottom = explicitDestinationArray[3] as NumericToken; var right = explicitDestinationArray[4] as NumericToken; var top = explicitDestinationArray[5] as NumericToken; destination = new ExplicitDestination(pageNumber, ExplicitDestinationType.FitRectangle, new ExplicitDestinationCoordinates(left?.Data, top?.Data, right?.Data, bottom?.Data)); return(true); } if (destTypeToken.Equals(NameToken.FitB)) { // [page /FitB] destination = new ExplicitDestination(pageNumber, ExplicitDestinationType.FitBoundingBox, ExplicitDestinationCoordinates.Empty); return(true); } if (destTypeToken.Equals(NameToken.FitBH)) { // [page /FitBH top] destination = new ExplicitDestination(pageNumber, ExplicitDestinationType.FitBoundingBoxHorizontally, new ExplicitDestinationCoordinates(null, (explicitDestinationArray[2] as NumericToken)?.Data)); return(true); } if (destTypeToken.Equals(NameToken.FitBV)) { // [page /FitBV left] destination = new ExplicitDestination(pageNumber, ExplicitDestinationType.FitBoundingBoxVertically, new ExplicitDestinationCoordinates((explicitDestinationArray[2] as NumericToken)?.Data)); return(true); } return(false); }
/* * name: start * imports: * userId: * ./abc * ../abc * */ public static async Task <ObjectToken> ParseAsync(TextReader stream) { int deep = 0; IDictionary <string, Token> currentMap = null; List <Token> currentArr = null; var root = new ObjectToken(currentMap = new Dictionary <string, Token>()); Stack <IDictionary <string, Token> > stack = new Stack <IDictionary <string, Token> >(); var lineAt = 1; var charAt = 0; var buffer = new char[1024]; var readed = 0; var level = 0; var lastToken = '\n'; var sb = new StringBuilder(); string key = null; string value = null; string lastName = null; while ((readed = await stream.ReadAsync(buffer, 0, 1024)) != 0) { for (int i = 0; i < readed; i++) { char ch = buffer[i]; charAt++; if (ch == '\t') { if (lastToken == '\n') { level++; continue; } else { throw new DiagramException($"意外的\\t符号,row={lineAt},col={charAt}"); } } else if (ch == '#') { if (lastToken == '\n') { lastToken = ch; } else { throw new DiagramException($"意外的#符号,row={lineAt},col={charAt}"); } } else if (ch == ':') { lastName = key; key = sb.ToString().Trim(); if (currentArr != null) { throw new DiagramException($"意外的:符号,当前正在定义数组,无法指定属性,row={lineAt},col={charAt}"); } sb = new StringBuilder(); lastToken = ch; } else if (ch == '\n') { lineAt++; lastToken = '\n'; value = sb.ToString().Trim(); sb = new StringBuilder(); if (level > deep) { if (level > deep + 1) { throw new DiagramException($"多余的tab,row={lineAt}"); } deep++; //将当前map压入堆栈 stack.Push(currentMap); Token ct = null; if (key == null) { var nextArr = new List <Token>(); ct = new ArrayToken(nextArr); currentMap[lastName] = ct; currentArr = nextArr; currentMap = null; } else { var nextMap = new Dictionary <string, Token>(); ct = new ObjectToken(nextMap); currentMap[lastName] = ct; currentMap = nextMap; currentArr = null; } lastName = null; } else if (deep == level) { } else { for (var j = deep; j > level; j--) { currentMap = stack.Pop(); } if (currentMap == null) { throw new DiagramException($"内部算法错误"); } } if (currentArr != null) { currentArr.Add(new StringToken(value)); } else { if (currentMap.ContainsKey(key)) { throw new DiagramException($"重复的属性定义${key},row={lineAt},col={charAt}"); } currentMap.Add(key, new StringToken(value)); } level = 0; } else { sb.Append(ch); } } } return(root); }
public void Parse(NumericToken numberOfOperations, ITokenScanner scanner, CharacterMapBuilder builder, bool isLenientParsing) { for (var i = 0; i < numberOfOperations.Int; i++) { // The start of the input code range. if (!scanner.TryReadToken(out HexToken lowSourceCode)) { throw new InvalidFontFormatException($"bfrange was missing the low source code: {scanner.CurrentToken}"); } // The inclusive end of the input code range. if (!scanner.TryReadToken(out HexToken highSourceCode)) { throw new InvalidFontFormatException($"bfrange was missing the high source code: {scanner.CurrentToken}"); } if (!scanner.MoveNext()) { throw new InvalidFontFormatException("bfrange ended unexpectedly after the high source code."); } List <byte> destinationBytes = null; ArrayToken destinationArray = null; switch (scanner.CurrentToken) { case ArrayToken arrayToken: destinationArray = arrayToken; break; case HexToken hexToken: destinationBytes = hexToken.Bytes.ToList(); break; case NumericToken _: throw new NotImplementedException("From the spec it seems this possible but the meaning is unclear..."); default: throw new InvalidOperationException(); } var done = false; var startCode = new List <byte>(lowSourceCode.Bytes); var endCode = highSourceCode.Bytes; if (destinationArray != null) { int arrayIndex = 0; while (!done) { if (Compare(startCode, endCode) >= 0) { done = true; } var destination = destinationArray.Data[arrayIndex]; if (destination is NameToken name) { builder.AddBaseFontCharacter(startCode, name.Data); } else if (destination is HexToken hex) { builder.AddBaseFontCharacter(startCode, hex.Bytes); } Increment(startCode, startCode.Count - 1); arrayIndex++; } continue; } while (!done) { if (Compare(startCode, endCode) >= 0) { done = true; } builder.AddBaseFontCharacter(startCode, destinationBytes); Increment(startCode, startCode.Count - 1); Increment(destinationBytes, destinationBytes.Count - 1); } } }
private static PageTreeNode ProcessPagesNode(IndirectReference referenceInput, DictionaryToken nodeDictionaryInput, IndirectReference parentReferenceInput, bool isRoot, IPdfTokenScanner pdfTokenScanner, bool isLenientParsing, PageCounter pageNumber) { bool isPage = CheckIfIsPage(nodeDictionaryInput, parentReferenceInput, isRoot, pdfTokenScanner, isLenientParsing); if (isPage) { pageNumber.Increment(); return(new PageTreeNode(nodeDictionaryInput, referenceInput, true, pageNumber.PageCount).WithChildren(EmptyArray <PageTreeNode> .Instance)); } //If we got here, we have to iterate till we manage to exit var toProcess = new Queue <(PageTreeNode thisPage, IndirectReference reference, DictionaryToken nodeDictionary, IndirectReference parentReference, List <PageTreeNode> nodeChildren)>(); var firstPage = new PageTreeNode(nodeDictionaryInput, referenceInput, false, null); var setChildren = new List <Action>(); var firstPageChildren = new List <PageTreeNode>(); setChildren.Add(() => firstPage.WithChildren(firstPageChildren)); toProcess.Enqueue( (thisPage: firstPage, reference: referenceInput, nodeDictionary: nodeDictionaryInput, parentReference: parentReferenceInput, nodeChildren: firstPageChildren)); do { var current = toProcess.Dequeue(); if (!current.nodeDictionary.TryGet(NameToken.Kids, pdfTokenScanner, out ArrayToken kids)) { if (!isLenientParsing) { throw new PdfDocumentFormatException($"Pages node in the document pages tree did not define a kids array: {current.nodeDictionary}."); } kids = new ArrayToken(EmptyArray <IToken> .Instance); } foreach (var kid in kids.Data) { if (!(kid is IndirectReferenceToken kidRef)) { throw new PdfDocumentFormatException($"Kids array contained invalid entry (must be indirect reference): {kid}."); } if (!DirectObjectFinder.TryGet(kidRef, pdfTokenScanner, out DictionaryToken kidDictionaryToken)) { throw new PdfDocumentFormatException($"Could not find dictionary associated with reference in pages kids array: {kidRef}."); } bool isChildPage = CheckIfIsPage(kidDictionaryToken, current.reference, false, pdfTokenScanner, isLenientParsing); if (isChildPage) { pageNumber.Increment(); var kidPageNode = new PageTreeNode(kidDictionaryToken, kidRef.Data, true, pageNumber.PageCount).WithChildren(EmptyArray <PageTreeNode> .Instance); current.nodeChildren.Add(kidPageNode); } else { var kidChildNode = new PageTreeNode(kidDictionaryToken, kidRef.Data, false, null); var kidChildren = new List <PageTreeNode>(); toProcess.Enqueue( (thisPage: kidChildNode, reference: kidRef.Data, nodeDictionary: kidDictionaryToken, parentReference: current.reference, nodeChildren: kidChildren)); setChildren.Add(() => kidChildNode.WithChildren(kidChildren)); current.nodeChildren.Add(kidChildNode); } } } while (toProcess.Count > 0); foreach (var action in setChildren) { action(); } return(firstPage); }
/// <summary> /// Writes a valid single section cross-reference (xref) table plus trailer dictionary to the output for the set of object offsets. /// </summary> /// <param name="objectOffsets">The byte offset from the start of the document for each object in the document.</param> /// <param name="catalogToken">The object representing the catalog dictionary which is referenced from the trailer dictionary.</param> /// <param name="outputStream">The output stream to write to.</param> /// <param name="documentInformationReference">The object reference for the document information dictionary if present.</param> internal static void WriteCrossReferenceTable(IReadOnlyDictionary <IndirectReference, long> objectOffsets, ObjectToken catalogToken, Stream outputStream, IndirectReference?documentInformationReference) { if (objectOffsets.Count == 0) { throw new InvalidOperationException("Could not write empty cross reference table."); } WriteLineBreak(outputStream); var position = outputStream.Position; outputStream.Write(Xref, 0, Xref.Length); WriteLineBreak(outputStream); var min = objectOffsets.Min(x => x.Key.ObjectNumber); var max = objectOffsets.Max(x => x.Key.ObjectNumber); if (max - min != objectOffsets.Count - 1) { throw new NotSupportedException("Object numbers must form a contiguous range"); } WriteLong(0, outputStream); WriteWhitespace(outputStream); // 1 extra for the free entry. WriteLong(objectOffsets.Count + 1, outputStream); WriteWhitespace(outputStream); WriteLineBreak(outputStream); WriteFirstXrefEmptyEntry(outputStream); foreach (var keyValuePair in objectOffsets.OrderBy(x => x.Key.ObjectNumber)) { /* * nnnnnnnnnn ggggg n eol * where: * nnnnnnnnnn is a 10-digit byte offset * ggggg is a 5-digit generation number * n is a literal keyword identifying this as an in-use entry * eol is a 2-character end-of-line sequence ('\r\n' or ' \n') */ var paddedOffset = OtherEncodings.StringAsLatin1Bytes(keyValuePair.Value.ToString("D10")); outputStream.Write(paddedOffset, 0, paddedOffset.Length); WriteWhitespace(outputStream); var generation = OtherEncodings.StringAsLatin1Bytes(keyValuePair.Key.Generation.ToString("D5")); outputStream.Write(generation, 0, generation.Length); WriteWhitespace(outputStream); outputStream.WriteByte(InUseEntry); WriteWhitespace(outputStream); WriteLineBreak(outputStream); } outputStream.Write(Trailer, 0, Trailer.Length); WriteLineBreak(outputStream); var identifier = new ArrayToken(new IToken[] { new HexToken(Guid.NewGuid().ToString("N").ToCharArray()), new HexToken(Guid.NewGuid().ToString("N").ToCharArray()) }); var trailerDictionaryData = new Dictionary <NameToken, IToken> { // 1 for the free entry. { NameToken.Size, new NumericToken(objectOffsets.Count + 1) }, { NameToken.Root, new IndirectReferenceToken(catalogToken.Number) }, { NameToken.Id, identifier } }; if (documentInformationReference.HasValue) { trailerDictionaryData[NameToken.Info] = new IndirectReferenceToken(documentInformationReference.Value); } var trailerDictionary = new DictionaryToken(trailerDictionaryData); WriteDictionary(trailerDictionary, outputStream); WriteLineBreak(outputStream); outputStream.Write(StartXref, 0, StartXref.Length); WriteLineBreak(outputStream); WriteLong(position, outputStream); WriteLineBreak(outputStream); // Complete! outputStream.Write(Eof, 0, Eof.Length); }
public byte[] Build() { var objectLocations = new Dictionary <IndirectReference, long>(); var fontsWritten = new Dictionary <Guid, ObjectToken>(); var number = 1; using (var memory = new MemoryStream()) { // Header WriteString("%PDF-1.7", memory); // Body foreach (var font in fonts) { var widths = new ArrayToken(new [] { new NumericToken(0), new NumericToken(255) }); var widthsObj = WriteObject(widths, memory, objectLocations, ref number); var descriptorRef = new IndirectReference(number++, 0); var dictionary = new DictionaryToken(new Dictionary <IToken, IToken> { { NameToken.Type, NameToken.Font }, { NameToken.Subtype, NameToken.TrueType }, { NameToken.FirstChar, new NumericToken(0) }, { NameToken.LastChar, new NumericToken(255) }, { NameToken.Encoding, NameToken.WinAnsiEncoding }, { NameToken.Widths, widthsObj }, { NameToken.FontDesc, new IndirectReferenceToken(descriptorRef) } }); var fontObj = WriteObject(dictionary, memory, objectLocations, ref number); fontsWritten.Add(font.Key, fontObj); } var fontsDictionary = new DictionaryToken(fontsWritten.Select(x => ((IToken)fonts[x.Key].FontKey.Name, (IToken) new IndirectReferenceToken(x.Value.Number))) .ToDictionary(x => x.Item1, x => x.Item2)); var fontsDictionaryRef = WriteObject(fontsDictionary, memory, objectLocations, ref number); var page = new DictionaryToken(new Dictionary <IToken, IToken> { { NameToken.Type, NameToken.Page }, { NameToken.Resources, new DictionaryToken(new Dictionary <IToken, IToken> { { NameToken.ProcSet, new ArrayToken(new [] { NameToken.Create("PDF"), NameToken.Create("Text") }) }, { NameToken.Font, new IndirectReferenceToken(fontsDictionaryRef.Number) } }) } }); var pageRef = WriteObject(page, memory, objectLocations, ref number); var pagesDictionary = new DictionaryToken(new Dictionary <IToken, IToken> { { NameToken.Type, NameToken.Pages }, { NameToken.Kids, new ArrayToken(new [] { new IndirectReferenceToken(pageRef.Number) }) }, { NameToken.Count, new NumericToken(1) } }); var pagesRef = WriteObject(pagesDictionary, memory, objectLocations, ref number); var catalog = new DictionaryToken(new Dictionary <IToken, IToken> { { NameToken.Type, NameToken.Catalog }, { NameToken.Pages, new IndirectReferenceToken(pagesRef.Number) } }); WriteObject(catalog, memory, objectLocations, ref number); return(memory.ToArray()); } }
public void SetsDataEmpty() { var token = new ArrayToken(new IToken[0]); Assert.Empty(token.Data); }
private IToken DecryptInternal(IndirectReference reference, IToken token) { switch (token) { case StreamToken stream: { if (cryptHandler?.StreamDictionary?.IsIdentity == true || cryptHandler?.StreamDictionary?.Name == CryptDictionary.Method.None) { // TODO: No idea if this is right. return(token); } if (stream.StreamDictionary.TryGet(NameToken.Type, out NameToken typeName)) { if (NameToken.Xref.Equals(typeName)) { return(token); } if (!encryptionDictionary.EncryptMetadata && NameToken.Metadata.Equals(typeName)) { return(token); } // TODO: check unencrypted metadata } var streamDictionary = (DictionaryToken)DecryptInternal(reference, stream.StreamDictionary); var decrypted = DecryptData(stream.Data.ToArray(), reference); token = new StreamToken(streamDictionary, decrypted); break; } case StringToken stringToken: { if (cryptHandler?.StringDictionary?.IsIdentity == true || cryptHandler?.StringDictionary?.Name == CryptDictionary.Method.None) { // TODO: No idea if this is right. return(token); } var data = OtherEncodings.StringAsLatin1Bytes(stringToken.Data); var decrypted = DecryptData(data, reference); token = GetStringTokenFromDecryptedData(decrypted); break; } case HexToken hexToken: { var data = hexToken.Bytes.ToArray(); var decrypted = DecryptData(data, reference); token = new HexToken(Hex.GetString(decrypted).ToCharArray()); break; } case DictionaryToken dictionary: { // PDFBOX-2936: avoid orphan /CF dictionaries found in US govt "I-" files if (dictionary.TryGet(NameToken.Cf, out _)) { return(token); } var isSignatureDictionary = dictionary.TryGet(NameToken.Type, out NameToken typeName) && (typeName.Equals(NameToken.Sig) || typeName.Equals(NameToken.DocTimeStamp)); foreach (var keyValuePair in dictionary.Data) { if (isSignatureDictionary && keyValuePair.Key == NameToken.Contents.Data) { continue; } if (keyValuePair.Value is StringToken || keyValuePair.Value is ArrayToken || keyValuePair.Value is DictionaryToken || keyValuePair.Value is HexToken) { var inner = DecryptInternal(reference, keyValuePair.Value); dictionary = dictionary.With(keyValuePair.Key, inner); } } token = dictionary; break; } case ArrayToken array: { var result = new IToken[array.Length]; for (var i = 0; i < array.Length; i++) { result[i] = DecryptInternal(reference, array.Data[i]); } token = new ArrayToken(result); break; } } return(token); }
/// <summary> /// Writes a valid single section cross-reference (xref) table plus trailer dictionary to the output for the set of object offsets. /// </summary> /// <param name="objectOffsets">The byte offset from the start of the document for each object in the document.</param> /// <param name="catalogToken">The object representing the catalog dictionary which is referenced from the trailer dictionary.</param> /// <param name="outputStream">The output stream to write to.</param> /// <param name="documentInformationReference">The object reference for the document information dictionary if present.</param> internal static void WriteCrossReferenceTable(IReadOnlyDictionary <IndirectReference, long> objectOffsets, IndirectReference catalogToken, Stream outputStream, IndirectReference?documentInformationReference) { if (objectOffsets.Count == 0) { throw new InvalidOperationException("Could not write empty cross reference table."); } WriteLineBreak(outputStream); var position = outputStream.Position; outputStream.Write(Xref, 0, Xref.Length); WriteLineBreak(outputStream); var sets = new List <XrefSeries>(); var orderedList = objectOffsets.OrderBy(x => x.Key.ObjectNumber).ToList(); long firstObjectNumber = 0; long currentObjNum = 0; var items = new List <XrefSeries.OffsetAndGeneration> { // Zero entry null }; foreach (var item in orderedList) { var step = item.Key.ObjectNumber - currentObjNum; if (step == 1) { currentObjNum = item.Key.ObjectNumber; items.Add(new XrefSeries.OffsetAndGeneration(item.Value, item.Key.Generation)); } else { sets.Add(new XrefSeries(firstObjectNumber, items)); items = new List <XrefSeries.OffsetAndGeneration> { new XrefSeries.OffsetAndGeneration(item.Value, item.Key.Generation) }; currentObjNum = item.Key.ObjectNumber; firstObjectNumber = item.Key.ObjectNumber; } } if (items.Count > 0) { sets.Add(new XrefSeries(firstObjectNumber, items)); } foreach (var series in sets) { WriteLong(series.First, outputStream); WriteWhitespace(outputStream); WriteLong(series.Offsets.Count, outputStream); WriteWhitespace(outputStream); WriteLineBreak(outputStream); foreach (var offset in series.Offsets) { if (offset != null) { /* * nnnnnnnnnn ggggg n eol * where: * nnnnnnnnnn is a 10-digit byte offset * ggggg is a 5-digit generation number * n is a literal keyword identifying this as an in-use entry * eol is a 2-character end-of-line sequence ('\r\n' or ' \n') */ var paddedOffset = OtherEncodings.StringAsLatin1Bytes(offset.Offset.ToString("D10", CultureInfo.InvariantCulture)); outputStream.Write(paddedOffset, 0, paddedOffset.Length); WriteWhitespace(outputStream); var generation = OtherEncodings.StringAsLatin1Bytes(offset.Generation.ToString("D5", CultureInfo.InvariantCulture)); outputStream.Write(generation, 0, generation.Length); WriteWhitespace(outputStream); outputStream.WriteByte(InUseEntry); WriteWhitespace(outputStream); WriteLineBreak(outputStream); } else { WriteFirstXrefEmptyEntry(outputStream); } } } outputStream.Write(Trailer, 0, Trailer.Length); WriteLineBreak(outputStream); var identifier = new ArrayToken(new IToken[] { new HexToken(Guid.NewGuid().ToString("N").ToCharArray()), new HexToken(Guid.NewGuid().ToString("N").ToCharArray()) }); var trailerDictionaryData = new Dictionary <NameToken, IToken> { // 1 for the free entry. { NameToken.Size, new NumericToken(objectOffsets.Count + 1) }, { NameToken.Root, new IndirectReferenceToken(catalogToken) }, { NameToken.Id, identifier } }; if (documentInformationReference.HasValue) { trailerDictionaryData[NameToken.Info] = new IndirectReferenceToken(documentInformationReference.Value); } var trailerDictionary = new DictionaryToken(trailerDictionaryData); WriteDictionary(trailerDictionary, outputStream); WriteLineBreak(outputStream); outputStream.Write(StartXref, 0, StartXref.Length); WriteLineBreak(outputStream); WriteLong(position, outputStream); WriteLineBreak(outputStream); // Complete! outputStream.Write(Eof, 0, Eof.Length); }
public void Parse(NumericToken numeric, ITokenScanner scanner, CharacterMapBuilder builder, bool isLenientParsing) { for (var i = 0; i < numeric.Int; i++) { if (!scanner.TryReadToken(out HexToken lowSourceCode)) { // TODO: message throw new InvalidOperationException(); } if (!scanner.TryReadToken(out HexToken highSourceCode)) { // TODO: message throw new InvalidOperationException(); } if (!scanner.MoveNext()) { // TODO: message throw new InvalidOperationException(); } List <byte> destinationBytes = null; ArrayToken destinationArray = null; switch (scanner.CurrentToken) { case ArrayToken arrayToken: destinationArray = arrayToken; break; case HexToken hexToken: destinationBytes = hexToken.Bytes.ToList(); break; case NumericToken _: throw new NotImplementedException("From the spec it seems this possible but the meaning is unclear..."); default: throw new InvalidOperationException(); } var done = false; var startCode = new List <byte>(lowSourceCode.Bytes); var endCode = highSourceCode.Bytes; int arrayIndex = 0; while (!done) { if (Compare(startCode, endCode) >= 0) { done = true; } builder.AddBaseFontCharacter(startCode, destinationBytes); Increment(startCode, startCode.Count - 1); if (destinationArray == null) { Increment(destinationBytes, destinationBytes.Count - 1); } else { arrayIndex++; if (arrayIndex < destinationArray.Data.Count) { destinationBytes = ((HexToken)destinationArray.Data[arrayIndex]).Bytes.ToList(); } } } } }
private static PageTreeNode ProcessPagesNode(IndirectReference reference, DictionaryToken nodeDictionary, IndirectReference parentReference, bool isRoot, IPdfTokenScanner pdfTokenScanner, bool isLenientParsing, ref int pageNumber) { var isPage = false; if (!nodeDictionary.TryGet(NameToken.Type, pdfTokenScanner, out NameToken type)) { if (!isLenientParsing) { throw new PdfDocumentFormatException($"Node in the document pages tree did not define a type: {nodeDictionary}."); } if (!nodeDictionary.TryGet(NameToken.Kids, pdfTokenScanner, out ArrayToken _)) { isPage = true; } } else { isPage = type.Equals(NameToken.Page); if (!isPage && !type.Equals(NameToken.Pages) && !isLenientParsing) { throw new PdfDocumentFormatException($"Node in the document pages tree defined invalid type: {nodeDictionary}."); } } if (!isLenientParsing && !isRoot) { if (!nodeDictionary.TryGet(NameToken.Parent, pdfTokenScanner, out IndirectReferenceToken parentReferenceToken)) { throw new PdfDocumentFormatException($"Could not find parent indirect reference token on pages tree node: {nodeDictionary}."); } if (!parentReferenceToken.Data.Equals(parentReference)) { throw new PdfDocumentFormatException($"Pages tree node parent reference {parentReferenceToken.Data} did not match actual parent {parentReference}."); } } if (isPage) { pageNumber++; var thisNode = new PageTreeNode(nodeDictionary, reference, true, pageNumber, EmptyArray <PageTreeNode> .Instance); return(thisNode); } if (!nodeDictionary.TryGet(NameToken.Kids, pdfTokenScanner, out ArrayToken kids)) { if (!isLenientParsing) { throw new PdfDocumentFormatException($"Pages node in the document pages tree did not define a kids array: {nodeDictionary}."); } kids = new ArrayToken(EmptyArray <IToken> .Instance); } var nodeChildren = new List <PageTreeNode>(); foreach (var kid in kids.Data) { if (!(kid is IndirectReferenceToken kidRef)) { throw new PdfDocumentFormatException($"Kids array contained invalid entry (must be indirect reference): {kid}."); } if (!DirectObjectFinder.TryGet(kidRef, pdfTokenScanner, out DictionaryToken kidDictionaryToken)) { throw new PdfDocumentFormatException($"Could not find dictionary associated with reference in pages kids array: {kidRef}."); } var kidNode = ProcessPagesNode(kidRef.Data, kidDictionaryToken, reference, false, pdfTokenScanner, isLenientParsing, ref pageNumber); nodeChildren.Add(kidNode); } return(new PageTreeNode(nodeDictionary, reference, false, null, nodeChildren)); }