public void Visit(PdfInteger integer) { Append(integer); }
public void Load(Stream stream, bool immediate = false, byte[] bytes = null) { if (_open) { throw new ApplicationException("Document already has a stream open."); } _stream = stream; _parser = new Parser(_stream); _parser.ResolveReference += Parser_ResolveReference; // PDF file should have a well known marker at top of file _parser.ParseHeader(out int versionMajor, out int versionMinor); Version = new PdfVersion(this, versionMajor, versionMinor); // Find stream position of the last cross-reference table long xRefPosition = _parser.ParseXRefOffset(); bool lastHeader = true; do { // Get the aggregated set of entries from all the cross-reference table sections List <TokenXRefEntry> xrefs = _parser.ParseXRef(xRefPosition); // Should always be positioned at the trailer after parsing cross-table references PdfDictionary trailer = new PdfDictionary(this, _parser.ParseTrailer()); PdfInteger size = trailer.MandatoryValue <PdfInteger>("Size"); foreach (TokenXRefEntry xref in xrefs) { // Ignore unused entries and entries smaller than the defined size from the trailer dictionary if (xref.Used && (xref.Id < size.Value)) { IndirectObjects.AddXRef(xref); } } if (lastHeader) { // Replace the default decryption handler with one from the document settings DecryptHandler = PdfDecrypt.CreateDecrypt(this, trailer); // We only care about the latest defined catalog and information dictionary _refCatalog = trailer.MandatoryValue <PdfObjectReference>("Root"); _refInfo = trailer.OptionalValue <PdfObjectReference>("Info"); } // If there is a previous cross-reference table, then we want to process that as well PdfInteger prev = trailer.OptionalValue <PdfInteger>("Prev"); if (prev != null) { xRefPosition = prev.Value; } else { xRefPosition = 0; } lastHeader = false; } while (xRefPosition > 0); _open = true; // Must load all objects immediately so the stream can then be closed if (immediate) { // Is there enough work to justify using multiple threads if ((bytes != null) && (IndirectObjects.Count > BACKGROUND_TRIGGER)) { // Setup the synchronization event so we wait until all work is completed _backgroundCount = NUM_BACKGROUND_ITEMS; _backgroundEvent = new ManualResetEvent(false); List <int> ids = IndirectObjects.Ids.ToList(); int idCount = ids.Count; int batchSize = idCount / NUM_BACKGROUND_ITEMS; for (int i = 0, index = 0; i < NUM_BACKGROUND_ITEMS; i++, index += batchSize) { // Create a parser per unit of work, so they can work in parallel MemoryStream memoryStream = new MemoryStream(bytes); Parser parser = new Parser(memoryStream); // Make sure the last batch includes all the remaining Ids ThreadPool.QueueUserWorkItem(BackgroundResolveReference, new BackgroundArgs() { Parser = parser, Ids = ids, Index = index, Count = (i == (NUM_BACKGROUND_ITEMS - 1)) ? idCount - index : batchSize }); } _backgroundEvent.WaitOne(); _backgroundEvent.Dispose(); _backgroundEvent = null; } else { IndirectObjects.ResolveAllReferences(this); } Close(); } }