// Root, --- Done // Paragraph, --- Done // Header,// #(1-6)、===、--- --- Done // HorizontalRule,// ---、*** // List, // +、-、*、number. --- Done // Code, // Four spaces or \t // Quote, // > // ListItemBuilder, // Table, // 表 | 1 | 2 | // LinkReference, // [name](url) internal static MarkdownBlock ParseBlock(string markdownText, int start, int end, out int actualEnd) { actualEnd = start; MarkdownBlock block = null; char nonSpaceChar = GetNonSpaceChar(markdownText, start, end, out int nonSpacePos); if (nonSpaceChar == '#' && nonSpacePos == start) { block = HeaderBlock.Parse(markdownText, start, end, out actualEnd); } if (block == null && (nonSpaceChar == '*' || nonSpaceChar == '-' || nonSpaceChar == '_')) { block = HorizontalRuleBlock.Parse(markdownText, start, end, out actualEnd); } if (block == null && (nonSpaceChar == '*' || nonSpaceChar == '+' || nonSpaceChar == '-' || (nonSpaceChar >= '0' && nonSpaceChar <= '9'))) { block = ListElement.Parse(markdownText, start, end, out actualEnd); } if (block == null) { block = ParagraphBlock.Parse(markdownText, start, end, out actualEnd); } return(block); }
/// <summary> /// Initializes this target. /// </summary> public override void Initialize() { _currentEntities.Clear(); // write the mandatory header. _buffer.Seek(0, SeekOrigin.Begin); // create header block. var blockHeader = new HeaderBlock(); blockHeader.required_features.Add("OsmSchema-V0.6"); blockHeader.required_features.Add("DenseNodes"); _runtimeTypeModel.Serialize(_buffer, blockHeader); var blockHeaderData = _buffer.ToArray(); _buffer.SetLength(0); // create blob. var blob = new Blob(); blob.raw = blockHeaderData; _runtimeTypeModel.Serialize(_buffer, blob); // create blobheader. var blobHeader = new BlobHeader(); blobHeader.datasize = (int)_buffer.Length; blobHeader.indexdata = null; blobHeader.type = Encoder.OSMHeader; _runtimeTypeModel.SerializeWithLengthPrefix(_stream, blobHeader, _blobHeaderType, ProtoBuf.PrefixStyle.Fixed32BigEndian, 0); // flush to stream. _buffer.Seek(0, SeekOrigin.Begin); _buffer.CopyTo(_stream); }
public static HeaderBlock CreateOne() { HeaderBlock hb = new HeaderBlock { name = new byte[100], mode = new byte[8], uid = new byte[8], gid = new byte[8], size = new byte[12], mtime = new byte[12], chksum = new byte[8], linkname = new byte[100], magic = new byte[6], version = new byte[2], uname = new byte[32], gname = new byte[32], devmajor = new byte[8], devminor = new byte[8], prefix = new byte[155], pad = new byte[12], }; Array.Copy(System.Text.Encoding.ASCII.GetBytes("ustar "), 0, hb.magic, 0, 6); hb.version[0] = hb.version[1] = (byte)TarEntryType.File; return(hb); }
public static void MakeHeader(this HeaderBlock header, Paragraph paragraph, Document document) { paragraph.Style = $"Heading{header.HeaderLevel}"; paragraph.AddBookmark("Paragraphs"); header.Inlines.FillInlines(paragraph); }
/** * Create a POIFSFileSystem from an <tt>InputStream</tt>. Normally the stream is read until * EOF. The stream is always closed.<p/> * * Some streams are usable After reaching EOF (typically those that return <code>true</code> * for <tt>markSupported()</tt>). In the unlikely case that the caller has such a stream * <i>and</i> needs to use it After this constructor completes, a work around is to wrap the * stream in order to trap the <tt>close()</tt> call. A convenience method ( * <tt>CreateNonClosingInputStream()</tt>) has been provided for this purpose: * <pre> * InputStream wrappedStream = POIFSFileSystem.CreateNonClosingInputStream(is); * HSSFWorkbook wb = new HSSFWorkbook(wrappedStream); * is.Reset(); * doSomethingElse(is); * </pre> * Note also the special case of <tt>MemoryStream</tt> for which the <tt>close()</tt> * method does nothing. * <pre> * MemoryStream bais = ... * HSSFWorkbook wb = new HSSFWorkbook(bais); // calls bais.Close() ! * bais.Reset(); // no problem * doSomethingElse(bais); * </pre> * * @param stream the InputStream from which to read the data * * @exception IOException on errors Reading, or on invalid data */ public NPOIFSFileSystem(Stream stream) : this(false) { Stream channel = null; bool success = false; try { // Turn our InputStream into something NIO based channel = stream; // Get the header ByteBuffer headerBuffer = ByteBuffer.CreateBuffer(POIFSConstants.SMALLER_BIG_BLOCK_SIZE); IOUtils.ReadFully(channel, headerBuffer.Buffer); // Have the header Processed _header = new HeaderBlock(headerBuffer); // Sanity check the block count BlockAllocationTableReader.SanityCheckBlockCount(_header.BATCount); // We need to buffer the whole file into memory when // working with an InputStream. // The max possible size is when each BAT block entry is used int maxSize = BATBlock.CalculateMaximumSize(_header); //ByteBuffer data = ByteBuffer.allocate(maxSize); // byte[] data = new byte[maxSize]; //// Copy in the header //for(int i = 0; i < headerBuffer.Length; i++) //{ // data[i] = headerBuffer[i]; //} // byte[] temp = new byte[channel.Length]; // Now read the rest of the stream ByteBuffer data = ByteBuffer.CreateBuffer(maxSize); headerBuffer.Position = 0; data.Write(headerBuffer.Buffer); data.Position = headerBuffer.Length; //IOUtils.ReadFully(channel, data); data.Position += IOUtils.ReadFully(channel, data.Buffer, data.Position, (int)channel.Length); success = true; // Turn it into a DataSource _data = new ByteArrayBackedDataSource(data.Buffer, data.Position); } finally { // As per the constructor contract, always close the stream if (channel != null) { channel.Close(); } CloseInputStream(stream, success); } // Now process the various entries ReadCoreContents(); }
public void TestBATandXBAT() { byte[] hugeStream = new byte[8 * 1024 * 1024]; POIFSFileSystem fs = new POIFSFileSystem(); fs.Root.CreateDocument("BIG", new MemoryStream(hugeStream)); MemoryStream baos = new MemoryStream(); fs.WriteFileSystem(baos); byte[] fsData = baos.ToArray(); // Check the header was written properly Stream inp = new MemoryStream(fsData); HeaderBlock header = new HeaderBlock(inp); Assert.AreEqual(109 + 21, header.BATCount); Assert.AreEqual(1, header.XBATCount); ByteBuffer xbatData = ByteBuffer.CreateBuffer(512); xbatData.Write(fsData, (1 + header.XBATIndex) * 512, 512); xbatData.Position = 0; BATBlock xbat = BATBlock.CreateBATBlock(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, xbatData); for (int i = 0; i < 21; i++) { Assert.IsTrue(xbat.GetValueAt(i) != POIFSConstants.UNUSED_BLOCK); } for (int i = 21; i < 127; i++) { Assert.AreEqual(POIFSConstants.UNUSED_BLOCK, xbat.GetValueAt(i)); } Assert.AreEqual(POIFSConstants.END_OF_CHAIN, xbat.GetValueAt(127)); RawDataBlockList blockList = new RawDataBlockList(inp, POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS); Assert.AreEqual(fsData.Length / 512, blockList.BlockCount() + 1); new BlockAllocationTableReader(header.BigBlockSize, header.BATCount, header.BATArray, header.XBATCount, header.XBATIndex, blockList); Assert.AreEqual(fsData.Length / 512, blockList.BlockCount() + 1); //fs = null; //fs = new POIFSFileSystem(new MemoryStream(fsData)); //DirectoryNode root = fs.Root; //Assert.AreEqual(1, root.EntryCount); //DocumentNode big = (DocumentNode)root.GetEntry("BIG"); //Assert.AreEqual(hugeStream.Length, big.Size); }
public void GivenHeaderBlock_WhenInitialized_ShouldValidates() { var subject = new HeaderBlock("this is a description"); subject.Digest.Should().NotBeNullOrEmpty(); subject.Digest.Should().Be(subject.GetDigest()); }
public NPropertyTable(HeaderBlock headerBlock, NPOIFSFileSystem fileSystem) : base(headerBlock, BuildProperties((new NPOIFSStream(fileSystem, headerBlock.PropertyStart)).GetEnumerator(), headerBlock.BigBlockSize) ) { _bigBigBlockSize = headerBlock.BigBlockSize; }
/// <summary> /// Read from an InputStream and Process the documents we Get /// </summary> /// <param name="stream">the InputStream from which to Read the data</param> /// <returns>POIFSDocument list</returns> public List <DocumentDescriptor> Read(Stream stream) { registryClosed = true; // Read the header block from the stream HeaderBlock header_block = new HeaderBlock(stream); // Read the rest of the stream into blocks RawDataBlockList data_blocks = new RawDataBlockList(stream, header_block.BigBlockSize); // Set up the block allocation table (necessary for the // data_blocks to be manageable new BlockAllocationTableReader(header_block.BigBlockSize, header_block.BATCount, header_block.BATArray, header_block.XBATCount, header_block.XBATIndex, data_blocks); // Get property table from the document PropertyTable properties = new PropertyTable(header_block, data_blocks); // Process documents return(ProcessProperties(SmallBlockTableReader.GetSmallDocumentBlocks (header_block.BigBlockSize, data_blocks, properties.Root, header_block.SBATStart), data_blocks, properties.Root.Children, new POIFSDocumentPath() )); }
/** * reading constructor (used when we've read in a file and we want * to extract the property table from it). Populates the * properties thoroughly * * @param startBlock the first block of the property table * @param blockList the list of blocks * * @exception IOException if anything goes wrong (which should be * a result of the input being NFG) */ public PropertyTable(HeaderBlock headerBlock, RawDataBlockList blockList) : base(headerBlock, PropertyFactory.ConvertToProperties(blockList.FetchBlocks(headerBlock.PropertyStart, -1))) { _bigBigBlockSize = headerBlock.BigBlockSize; _blocks = null; }
// there is nothing to dispose //public void Dispose() //{ // Dispose(true); // GC.SuppressFinalize(this); //} //protected virtual void Dispose(bool disposing) //{ // if (disposing) // { // } //} /// <summary> /// Initializes a new instance of the <see cref="OPOIFSFileSystem"/> class. intended for writing /// </summary> public OPOIFSFileSystem() { HeaderBlock headerBlock = new HeaderBlock(bigBlockSize); _property_table = new PropertyTable(headerBlock); _documents = new List <OPOIFSDocument>(); _root = null; }
/// <summary> /// Renders a header element. /// </summary> private void RenderHeader(HeaderBlock element, UIElementCollection blockUIElementCollection, RenderContext context) { var textBlock = CreateOrReuseRichTextBlock(blockUIElementCollection, context); var paragraph = new Paragraph(); var childInlines = paragraph.Inlines; switch (element.HeaderLevel) { case 1: paragraph.Margin = Header1Margin; paragraph.FontSize = Header1FontSize; paragraph.FontWeight = Header1FontWeight; break; case 2: paragraph.Margin = Header2Margin; paragraph.FontSize = Header2FontSize; paragraph.FontWeight = Header2FontWeight; break; case 3: paragraph.Margin = Header3Margin; paragraph.FontSize = Header3FontSize; paragraph.FontWeight = Header3FontWeight; break; case 4: paragraph.Margin = Header4Margin; paragraph.FontSize = Header4FontSize; paragraph.FontWeight = Header4FontWeight; break; case 5: paragraph.Margin = Header5Margin; paragraph.FontSize = Header5FontSize; paragraph.FontWeight = Header5FontWeight; break; case 6: paragraph.Margin = Header6Margin; paragraph.FontSize = Header6FontSize; paragraph.FontWeight = Header6FontWeight; var underline = new Underline(); childInlines = underline.Inlines; paragraph.Inlines.Add(underline); break; } // Render the children into the para inline. context.TrimLeadingWhitespace = true; RenderInlineChildren(childInlines, element.Inlines, paragraph, context); // Add it to the blocks textBlock.Blocks.Add(paragraph); }
/// <summary> /// Extracts id and summary string from the given HeaderBlock /// </summary> /// <param name="header">markdown header block</param> /// <returns>Tuple of id and summary string</returns> static public (string id, string summary) DecomposeHeading(HeaderBlock header) { var text = header.ToString().Trim(); var prefix = Regex.Match(text, "\\[[-A-Z0-9]{1,}\\]").Value; var id = prefix.Substring(1, prefix.Length - 2); var summay = text?.Replace(prefix, string.Empty); return(string.IsNullOrEmpty(summay) ? (id, string.Empty) : (id, summay)); }
public static ParagraphBlock Pragraph(this HeaderBlock header, MarkdownDocument document) { int paragraphIndex = document.Blocks.IndexOf(header) + 1; return(document.Blocks.Count - 1 > paragraphIndex && document.Blocks[paragraphIndex].Type == MarkdownBlockType.Paragraph ? document.Blocks[paragraphIndex] as ParagraphBlock : null); }
public void TestWritingConstructor() { ArrayList documents = new ArrayList(); documents.Add( new POIFSDocument( "doc340", new MemoryStream(new byte[340]))); documents.Add( new POIFSDocument( "doc5000", new MemoryStream(new byte[5000]))); documents .Add(new POIFSDocument("doc0", new MemoryStream(new byte[0]))); documents .Add(new POIFSDocument("doc1", new MemoryStream(new byte[1]))); documents .Add(new POIFSDocument("doc2", new MemoryStream(new byte[2]))); documents .Add(new POIFSDocument("doc3", new MemoryStream(new byte[3]))); documents .Add(new POIFSDocument("doc4", new MemoryStream(new byte[4]))); documents .Add(new POIFSDocument("doc5", new MemoryStream(new byte[5]))); documents .Add(new POIFSDocument("doc6", new MemoryStream(new byte[6]))); documents .Add(new POIFSDocument("doc7", new MemoryStream(new byte[7]))); documents .Add(new POIFSDocument("doc8", new MemoryStream(new byte[8]))); documents .Add(new POIFSDocument("doc9", new MemoryStream(new byte[9]))); HeaderBlock header = new HeaderBlock(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS); RootProperty root = new PropertyTable(header).Root; SmallBlockTableWriter sbtw = new SmallBlockTableWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, documents, root); BlockAllocationTableWriter bat = sbtw.SBAT; // 15 small blocks: 6 for doc340, 0 for doc5000 (too big), 0 // for doc0 (no storage needed), 1 each for doc1 through doc9 Assert.AreEqual(15 * 64, root.Size); // 15 small blocks rounds up to 2 big blocks Assert.AreEqual(2, sbtw.CountBlocks); int start_block = 1000 + root.StartBlock; sbtw.StartBlock = start_block; Assert.AreEqual(start_block, root.StartBlock); }
/** * Create a POIFSFileSystem from an <tt>InputStream</tt>. Normally the stream is read until * EOF. The stream is always closed.<p/> * * Some streams are usable After reaching EOF (typically those that return <code>true</code> * for <tt>markSupported()</tt>). In the unlikely case that the caller has such a stream * <i>and</i> needs to use it After this constructor completes, a work around is to wrap the * stream in order to trap the <tt>close()</tt> call. A convenience method ( * <tt>CreateNonClosingInputStream()</tt>) has been provided for this purpose: * <pre> * InputStream wrappedStream = POIFSFileSystem.CreateNonClosingInputStream(is); * HSSFWorkbook wb = new HSSFWorkbook(wrappedStream); * is.Reset(); * doSomethingElse(is); * </pre> * Note also the special case of <tt>MemoryStream</tt> for which the <tt>close()</tt> * method does nothing. * <pre> * MemoryStream bais = ... * HSSFWorkbook wb = new HSSFWorkbook(bais); // calls bais.Close() ! * bais.Reset(); // no problem * doSomethingElse(bais); * </pre> * * @param stream the InputStream from which to read the data * * @exception IOException on errors Reading, or on invalid data */ public NPOIFSFileSystem(Stream stream) : this(false) { Stream channel = null; bool success = false; try { // Turn our InputStream into something NIO based channel = stream; // Get the header ByteBuffer headerBuffer = ByteBuffer.CreateBuffer(POIFSConstants.SMALLER_BIG_BLOCK_SIZE); IOUtils.ReadFully(channel, headerBuffer.Buffer); // Have the header Processed _header = new HeaderBlock(headerBuffer); // Sanity check the block count BlockAllocationTableReader.SanityCheckBlockCount(_header.BATCount); // We need to buffer the whole file into memory when // working with an InputStream. // The max possible size is when each BAT block entry is used long maxSize = BATBlock.CalculateMaximumSize(_header); if (maxSize > int.MaxValue) { throw new ArgumentException("Unable read a >2gb file via an InputStream"); } ByteBuffer data = ByteBuffer.CreateBuffer((int)maxSize); headerBuffer.Position = 0; data.Write(headerBuffer.Buffer); data.Position = headerBuffer.Length; //IOUtils.ReadFully(channel, data); data.Position += IOUtils.ReadFully(channel, data.Buffer, data.Position, (int)channel.Length); success = true; // Turn it into a DataSource _data = new ByteArrayBackedDataSource(data.Buffer, data.Position); } finally { // As per the constructor contract, always close the stream if (channel != null) { channel.Close(); channel.Dispose(); } CloseInputStream(stream, success); } // Now process the various entries ReadCoreContents(); }
public NPOIFSMiniStore(NPOIFSFileSystem filesystem, RootProperty root, List <BATBlock> sbats, HeaderBlock header) { this._filesystem = filesystem; this._sbat_blocks = sbats; this._header = header; this._root = root; this._mini_stream = new NPOIFSStream(filesystem, root.StartBlock); }
public static HeaderBlock WriteOutAndReadHeader(NPOIFSFileSystem fs) { MemoryStream baos = new MemoryStream(); fs.WriteFileSystem(baos); HeaderBlock header = new HeaderBlock(new MemoryStream(baos.ToArray())); return(header); }
public NPOIFSFileSystem(FileStream channel, FileInfo srcFile, bool readOnly, bool closeChannelOnError) : this(false) { try { // Initialize the datasource if (srcFile != null) { //FileBackedDataSource d = new FileBackedDataSource(srcFile, readOnly); channel = new FileStream(srcFile.FullName, FileMode.Open, FileAccess.ReadWrite); _data = new FileBackedDataSource(channel, readOnly); } else { _data = new FileBackedDataSource(channel, readOnly); } // Get the header byte[] headerBuffer = new byte[POIFSConstants.SMALLER_BIG_BLOCK_SIZE]; IOUtils.ReadFully(channel, headerBuffer); // Have the header Processed _header = new HeaderBlock(headerBuffer); // Now process the various entries //_data = new FileBackedDataSource(channel, readOnly); ReadCoreContents(); channel.Close(); } catch (IOException e) { if (closeChannelOnError) { if (channel != null) { channel.Close(); } } throw e; } catch (Exception e) { // Comes from Iterators etc. // TODO Decide if we can handle these better whilst // still sticking to the iterator contract if (closeChannelOnError) { if (channel != null) { channel.Close(); } } throw e; } }
private static Block CreateHeader(HeaderBlock headerBlock) { var paragraph = new Paragraph() { FontSize = Math.Max(19 - headerBlock.HeaderLevel, 12), Margin = new Thickness(0, 5, 0, 5) }; paragraph.Inlines.AddRange(CreateInlines(headerBlock.Inlines)); return(paragraph); }
public void GivenHeaderBlock_WhenCloned_ShouldValidate() { UnixDate now = UnixDate.UtcNow; var subject = new HeaderBlock(now, "Text-2222"); subject.Digest.Should().NotBeNullOrEmpty(); var s1 = new HeaderBlock(subject.TimeStamp, subject.Description); subject.Digest.Should().Be(subject.GetDigest()); }
/// <summary> /// Initializes this target. /// </summary> public override void Initialize() { _currentEntities.Clear(); // write the mandatory header. _buffer.Seek(0, SeekOrigin.Begin); // create header block. var blockHeader = new HeaderBlock(); blockHeader.required_features.Add("OsmSchema-V0.6"); blockHeader.required_features.Add("DenseNodes"); _runtimeTypeModel.Serialize(_buffer, blockHeader); var blockHeaderData = _buffer.ToArray(); _buffer.SetLength(0); // create blob. var blob = new Blob(); if (_compress) { using (var target = new MemoryStream()) { using (var source = new MemoryStream(blockHeaderData)) using (var deflate = new DeflaterOutputStream(target)) { source.CopyTo(deflate); } blob.zlib_data = target.ToArray(); } } else { blob.raw = blockHeaderData; } _runtimeTypeModel.Serialize(_buffer, blob); // create blobheader. var blobHeader = new BlobHeader(); blobHeader.datasize = (int)_buffer.Length; blobHeader.indexdata = null; blobHeader.type = Encoder.OSMHeader; _runtimeTypeModel.SerializeWithLengthPrefix(_stream, blobHeader, _blobHeaderType, ProtoBuf.PrefixStyle.Fixed32BigEndian, 0); // flush to stream. _buffer.Seek(0, SeekOrigin.Begin); _buffer.CopyTo(_stream); }
public void GivenHeaderBlock_WhenSameInitialized_ShouldValidate() { UnixDate now = UnixDate.UtcNow; var subject = new HeaderBlock(now, "Text-1111"); subject.Digest.Should().NotBeNullOrEmpty(); var s1 = new HeaderBlock(now, "Text-1111"); s1.Digest.Should().NotBeNullOrEmpty(); subject.Digest.Should().Be(s1.Digest); }
protected override void RenderHeader(HeaderBlock element, IRenderContext context) { _headerLevel = element.HeaderLevel; RenderInlineChildren(element.Inlines, context); if (context.Parent is FormattedString fs) { if (fs.Spans?.Any() ?? false) { fs.Spans.Last().Text += Environment.NewLine; } } _headerLevel = 0; }
private VltBlockContainer ReadBlock(BinaryReader reader) { if (reader.BaseStream.Position == reader.BaseStream.Length) { return(null); } var block = new VltBlock { Position = reader.BaseStream.Position, Type = (VltMarker)reader.ReadInt32(), BlockLength = reader.ReadInt32(), }; if (!block.IsBlank()) { var vltType = block.Type; VltBlockContainer bc; switch (vltType) { case VltMarker.VltMagic: bc = new HeaderBlock(); break; case VltMarker.TableStart: bc = new TableStartBlock(); break; case VltMarker.TableEnd: bc = new TableEndBlock(); break; default: bc = new PlaceholderBlock(); break; } bc.Block = block; bc.Read(reader); block.SeekToNextBlock(reader.BaseStream); return(bc); } return(null); }
private NPOIFSFileSystem(bool newFS) { _header = new HeaderBlock(bigBlockSize); _property_table = new NPropertyTable(_header); _mini_store = new NPOIFSMiniStore(this, _property_table.Root, new List <BATBlock>(), _header); _xbat_blocks = new List <BATBlock>(); _bat_blocks = new List <BATBlock>(); _root = null; if (newFS) { // Data needs to Initially hold just the header block, // a single bat block, and an empty properties section _data = new ByteArrayBackedDataSource(new byte[bigBlockSize.GetBigBlockSize() * 3]); } }
private void AddDirectory(string dirName) { dirName = dirName.TrimVolume(); if (!dirName.EndsWith("/")) { dirName += "/"; } if (TarOptions.StatusWriter != null) { TarOptions.StatusWriter.WriteLine("{0}", dirName); } HeaderBlock hb = HeaderBlock.CreateOne(); hb.InsertName(dirName); hb.typeflag = 5 + (byte)'0'; hb.SetSize(0); hb.SetChksum(); byte[] block = serializer.RawSerialize(hb); _outfs.Write(block, 0, block.Length); String[] filenames = Directory.GetFiles(dirName); foreach (String filename in filenames) { AddFile(filename); } String[] dirnames = Directory.GetDirectories(dirName); foreach (String d in dirnames) { var a = System.IO.File.GetAttributes(d); if ((a & FileAttributes.ReparsePoint) == 0) { AddDirectory(d); } else if (this.TarOptions.FollowSymLinks) { AddDirectory(d); } else { AddSymlink(d); } } }
private void AddSymlink(string name) { if (TarOptions.StatusWriter != null) { TarOptions.StatusWriter.WriteLine("{0}", name); } HeaderBlock hb = HeaderBlock.CreateOne(); hb.InsertName(name); hb.InsertLinkName(name); hb.typeflag = (byte)TarEntryType.SymbolicLink; hb.SetSize(0); hb.SetChksum(); byte[] block = serializer.RawSerialize(hb); _outfs.Write(block, 0, block.Length); }
/// <summary> /// Create a OPOIFSFileSystem from an Stream. Normally the stream is Read until /// EOF. The stream is always Closed. In the unlikely case that the caller has such a stream and /// needs to use it after this constructor completes, a work around is to wrap the /// stream in order to trap the Close() call. /// </summary> /// <param name="stream">the Streamfrom which to Read the data</param> public OPOIFSFileSystem(Stream stream) : this() { bool success = false; HeaderBlock header_block_reader; RawDataBlockList data_blocks; try { // Read the header block from the stream header_block_reader = new HeaderBlock(stream); bigBlockSize = header_block_reader.BigBlockSize; // Read the rest of the stream into blocks data_blocks = new RawDataBlockList(stream, bigBlockSize); success = true; } finally { CloseInputStream(stream, success); } // Set up the block allocation table (necessary for the // data_blocks to be manageable new BlockAllocationTableReader(header_block_reader.BigBlockSize, header_block_reader.BATCount, header_block_reader.BATArray, header_block_reader.XBATCount, header_block_reader.XBATIndex, data_blocks); // Get property table from the document PropertyTable properties = new PropertyTable(header_block_reader, data_blocks); // init documents ProcessProperties(SmallBlockTableReader.GetSmallDocumentBlocks(bigBlockSize, data_blocks, properties.Root, header_block_reader.SBATStart), data_blocks, properties.Root.Children, null, header_block_reader.PropertyStart); // For whatever reason CLSID of root is always 0. Root.StorageClsid = (properties.Root.StorageClsid); }
public void Test4KBlocks() { Stream inp = _samples.OpenResourceAsStream("BlockSize4096.zvi"); try { // First up, check that we can process the header properly HeaderBlock header_block = new HeaderBlock(inp); POIFSBigBlockSize bigBlockSize = header_block.BigBlockSize; Assert.AreEqual(4096, bigBlockSize.GetBigBlockSize()); // Check the fat info looks sane Assert.AreEqual(1, header_block.BATArray.Length); Assert.AreEqual(1, header_block.BATCount); Assert.AreEqual(0, header_block.XBATCount); // Now check we can get the basic fat RawDataBlockList data_blocks = new RawDataBlockList(inp, bigBlockSize); Assert.AreEqual(15, data_blocks.BlockCount()); // Now try and open properly POIFSFileSystem fs = new POIFSFileSystem( _samples.OpenResourceAsStream("BlockSize4096.zvi") ); Assert.IsTrue(fs.Root.EntryCount > 3); // Check we can get at all the contents CheckAllDirectoryContents(fs.Root); // Finally, check we can do a similar 512byte one too fs = new POIFSFileSystem( _samples.OpenResourceAsStream("BlockSize512.zvi") ); Assert.IsTrue(fs.Root.EntryCount > 3); CheckAllDirectoryContents(fs.Root); } finally { inp.Close(); } }