public void TestWrite() { IGZipBlockWriter blockWriter = new BlockWriter(); Stream compressedStream = TestUtils.Compress(10); Block block = new Block { ExtraField = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, OriginalFileName = "test.txt", Comment = "test", Flags = GZipFlags.FEXTRA | GZipFlags.FNAME | GZipFlags.FCOMMENT }; blockWriter.Write( compressedStream, block, BlockFlags.ExtraField | BlockFlags.OriginalFileName | BlockFlags.Comment | BlockFlags.Flags ); IBlockReader blockReader = new BlockReader(); Block newBlock = blockReader.Read(compressedStream, BlockFlags.All); Assert.AreEqual(block.Flags, newBlock.Flags); CollectionAssert.AreEqual(block.ExtraField, newBlock.ExtraField); Assert.AreEqual(block.Comment, newBlock.Comment); Assert.AreEqual(block.OriginalFileName, newBlock.OriginalFileName); }
public IEnumerable <InputStream> Read(string inputFile, CancellationToken cancellationToken) { if (string.IsNullOrEmpty(inputFile)) { throw new ArgumentException("Input file name must be non-empty"); } List <InputStream> inputStreams = new List <InputStream>(); MemoryMappedFile memoryMappedFile = MemoryMappedFile.CreateFromFile(inputFile); MemoryMappedViewStream stream = memoryMappedFile.CreateViewStream(0, 1024); IBlockReader blockReader = new BlockReader(); Block block = blockReader.Read(stream, BlockFlags.ExtraField); if (block.ExtraField != null && block.ExtraField.Length > 0) { GZipMultiStreamHeader multiStreamHeader = new GZipMultiStreamHeader(); multiStreamHeader.Deserialize(block.ExtraField); int streamIndex = 0; long offset = 0; foreach (MultiStreamHeaderItem multiStreamHeaderItem in multiStreamHeader.Items) { if (cancellationToken.IsCancellationRequested) { break; } InputStream inputStream = ReadStream(streamIndex++, memoryMappedFile, offset, multiStreamHeaderItem.Length); offset += multiStreamHeaderItem.Length; inputStreams.Add(inputStream); } } else { FileInfo fileInfo = new FileInfo(inputFile); long size = fileInfo.Length; int chunkSize = _settings.ChunkSize; if (size < chunkSize) { InputStream inputStream = new InputStream(0); IStreamChunk inputStreamChunk = new StreamChunk(0, fileInfo.Open(FileMode.Open)); inputStream.Chunks.Add(inputStreamChunk); inputStreams.Add(inputStream); } else { InputStream inputStream = ReadStream(0, memoryMappedFile, 0, size); inputStreams.Add(inputStream); } } return(inputStreams); }
public void ReadWriteTest(int count) { var bw = new BlockWriter(_filepath); var r = new Random(); var testData = new byte[count]; var testblock = new Datablock { Data = testData, Count = testData.Length }; bw.Write(testblock, 0); var testblock2 = new Datablock { Data = new byte[count] }; var br = new BlockReader(_filepath); br.Read(testblock2, 0, testblock.Count); CollectionAssert.AreEqual(testblock.Data, testblock2.Data); Assert.AreEqual(testblock.Count, testblock.Count); }
/// <summary>Exercise the BlockReader and read length bytes.</summary> /// <remarks> /// Exercise the BlockReader and read length bytes. /// It does not verify the bytes read. /// </remarks> /// <exception cref="System.IO.IOException"/> public virtual void ReadAndCheckEOS(BlockReader reader, int length, bool expectEof ) { byte[] buf = new byte[1024]; int nRead = 0; while (nRead < length) { DFSClient.Log.Info("So far read " + nRead + " - going to read more."); int n = reader.Read(buf, 0, buf.Length); NUnit.Framework.Assert.IsTrue(n > 0); nRead += n; } if (expectEof) { DFSClient.Log.Info("Done reading, expect EOF for next read."); NUnit.Framework.Assert.AreEqual(-1, reader.Read(buf, 0, buf.Length)); } }
/* See {@link BlockReader#readFully(byte[], int, int)} */ /// <exception cref="System.IO.IOException"/> public static void ReadFully(BlockReader reader, byte[] buf, int off, int len) { int toRead = len; while (toRead > 0) { int ret = reader.Read(buf, off, toRead); if (ret < 0) { throw new IOException("Premature EOF from inputStream"); } toRead -= ret; off += ret; } }
/* See {@link BlockReader#readAll(byte[], int, int)} */ /// <exception cref="System.IO.IOException"/> public static int ReadAll(BlockReader reader, byte[] buf, int offset, int len) { int n = 0; for (; ;) { int nread = reader.Read(buf, offset + n, len - n); if (nread <= 0) { return((n == 0) ? nread : n); } n += nread; if (n >= len) { return(n); } } }
public void Work(int blockNumber) { var blockReader = new BlockReader(_srcFilePath); var blockArchiver = new BlockArchiver(); var blockWriter = new BlockWriter(_trgFilePath); var readBlock = new Datablock { Data = new byte[1100000], Count = 1000000 }; var writeBlock = new Datablock { Data = new byte[1100000] }; blockReader.Read(readBlock, 0, 1000000); blockArchiver.Compress(readBlock, writeBlock); blockWriter.Write(writeBlock, 0); }
public void Write(string outputFilePath, OutputQueue outputQueue, CancellationToken cancellationToken) { if (string.IsNullOrEmpty(outputFilePath)) { throw new ArgumentException("Output file path must be non-empty"); } if (outputQueue == null) { throw new ArgumentNullException("Output queue must be non-empty"); } GZipMultiStreamHeader multiStreamHeader = new GZipMultiStreamHeader(); for (int i = 0; i < outputQueue.Count; i++) { if (cancellationToken.IsCancellationRequested) { break; } OutputWorkItem workItem = outputQueue[i]; MultiStreamHeaderItem multiStreamHeaderItem = new MultiStreamHeaderItem { Length = workItem.OutputStream.Stream.Length }; multiStreamHeader.Items.Add(multiStreamHeaderItem); } OutputWorkItem firstOutputWorkItem = outputQueue[0]; IBlockReader blockReader = new BlockReader(); firstOutputWorkItem.OutputStream.Stream.Position = 0; Block block = blockReader.Read(firstOutputWorkItem.OutputStream.Stream, BlockFlags.All); if (cancellationToken.IsCancellationRequested) { return; } block.ExtraField = multiStreamHeader.Serialize(); multiStreamHeader.Items[0].Length = block.Length; block.ExtraField = multiStreamHeader.Serialize(); block.Flags |= GZipFlags.FEXTRA; IGZipBlockWriter blockWriter = new BlockWriter(); using (FileStream outputFileStream = File.Create(outputFilePath)) { blockWriter.Write(outputFileStream, block, BlockFlags.All); for (int i = 1; i < outputQueue.Count; i++) { if (cancellationToken.IsCancellationRequested) { break; } OutputWorkItem workItem = outputQueue[i]; using (Stream compressedStream = workItem.OutputStream.Stream) { compressedStream.Position = 0; compressedStream.CopyTo(outputFileStream); } } } }