/// <summary> /// Decompresses bytes for one file from an archive or archive chain, /// checking the crc at the end. /// </summary> private void UnpackFileBytes( IUnpackStreamContext streamContext, string fileName, long compressedSize, long uncompressedSize, uint crc, Stream fileStream, Converter<Stream, Stream> compressionStreamCreator, ref Stream archiveStream) { CrcStream crcStream = new CrcStream(fileStream); ConcatStream concatStream = new ConcatStream( delegate(ConcatStream s) { this.currentArchiveBytesProcessed = s.Source.Position; streamContext.CloseArchiveReadStream( this.currentArchiveNumber, String.Empty, s.Source); this.currentArchiveNumber--; this.OnProgress(ArchiveProgressType.FinishArchive); this.currentArchiveNumber += 2; this.currentArchiveName = null; this.currentArchiveBytesProcessed = 0; this.currentArchiveTotalBytes = 0; s.Source = this.OpenArchive(streamContext, this.currentArchiveNumber); FileStream archiveFileStream = s.Source as FileStream; this.currentArchiveName = (archiveFileStream != null ? Path.GetFileName(archiveFileStream.Name) : null); this.currentArchiveTotalBytes = s.Source.Length; this.currentArchiveNumber--; this.OnProgress(ArchiveProgressType.StartArchive); this.currentArchiveNumber++; }); concatStream.Source = archiveStream; concatStream.SetLength(compressedSize); Stream decompressionStream = compressionStreamCreator(concatStream); try { byte[] buf = new byte[4096]; long bytesRemaining = uncompressedSize; int counter = 0; while (bytesRemaining > 0) { int count = (int) Math.Min(buf.Length, bytesRemaining); count = decompressionStream.Read(buf, 0, count); crcStream.Write(buf, 0, count); bytesRemaining -= count; this.fileBytesProcessed += count; this.currentFileBytesProcessed += count; this.currentArchiveBytesProcessed = concatStream.Source.Position; if (++counter % 16 == 0) // Report every 64K { this.currentArchiveNumber--; this.OnProgress(ArchiveProgressType.PartialFile); this.currentArchiveNumber++; } } } finally { archiveStream = concatStream.Source; } crcStream.Flush(); if (crcStream.Crc != crc) { throw new ZipException("CRC check failed for file: " + fileName); } }
/// <summary> /// Writes compressed bytes of one file to the archive, /// keeping track of the CRC and number of bytes written. /// </summary> private long PackFileBytes( IPackStreamContext streamContext, Stream fileStream, long maxArchiveSize, Converter<Stream, Stream> compressionStreamCreator, ref Stream archiveStream, out uint crc) { long writeStartPosition = archiveStream.Position; long bytesWritten = 0; CrcStream fileCrcStream = new CrcStream(fileStream); ConcatStream concatStream = new ConcatStream( delegate(ConcatStream s) { Stream sourceStream = s.Source; bytesWritten += sourceStream.Position - writeStartPosition; this.CheckArchiveWriteStream( streamContext, maxArchiveSize, 1, ref sourceStream); writeStartPosition = sourceStream.Position; s.Source = sourceStream; }); concatStream.Source = archiveStream; if (maxArchiveSize > 0) { concatStream.SetLength(maxArchiveSize); } Stream compressionStream = compressionStreamCreator(concatStream); try { byte[] buf = new byte[4096]; long bytesRemaining = fileStream.Length; int counter = 0; while (bytesRemaining > 0) { int count = (int) Math.Min( bytesRemaining, (long) buf.Length); count = fileCrcStream.Read(buf, 0, count); if (count <= 0) { throw new ZipException( "Failed to read file: " + this.currentFileName); } compressionStream.Write(buf, 0, count); bytesRemaining -= count; this.fileBytesProcessed += count; this.currentFileBytesProcessed += count; this.currentArchiveTotalBytes = concatStream.Source.Position; this.currentArchiveBytesProcessed = this.currentArchiveTotalBytes; if (++counter % 16 == 0) // Report every 64K { this.OnProgress(ArchiveProgressType.PartialFile); } } if (compressionStream is DeflateStream) { compressionStream.Close(); } else { compressionStream.Flush(); } } finally { archiveStream = concatStream.Source; } bytesWritten += archiveStream.Position - writeStartPosition; crc = fileCrcStream.Crc; return bytesWritten; }
/// <summary> /// Creates a new stream that contains the XVA image. /// </summary> /// <returns>The new stream.</returns> public override SparseStream Build() { TarFileBuilder tarBuilder = new TarFileBuilder(); int[] diskIds; string ovaFileContent = GenerateOvaXml(out diskIds); tarBuilder.AddFile("ova.xml", Encoding.ASCII.GetBytes(ovaFileContent)); int diskIdx = 0; foreach (DiskRecord diskRec in _disks) { SparseStream diskStream = diskRec.Item2; List <StreamExtent> extents = new List <StreamExtent>(diskStream.Extents); int lastChunkAdded = -1; foreach (StreamExtent extent in extents) { int firstChunk = (int)(extent.Start / Sizes.OneMiB); int lastChunk = (int)((extent.Start + extent.Length - 1) / Sizes.OneMiB); for (int i = firstChunk; i <= lastChunk; ++i) { if (i != lastChunkAdded) { Stream chunkStream; long diskBytesLeft = diskStream.Length - i * Sizes.OneMiB; if (diskBytesLeft < Sizes.OneMiB) { chunkStream = new ConcatStream( Ownership.Dispose, new SubStream(diskStream, i * Sizes.OneMiB, diskBytesLeft), new ZeroStream(Sizes.OneMiB - diskBytesLeft)); } else { chunkStream = new SubStream(diskStream, i * Sizes.OneMiB, Sizes.OneMiB); } Stream chunkHashStream; #if NETCORE IncrementalHash hashAlgCore = IncrementalHash.CreateHash(HashAlgorithmName.SHA1); chunkHashStream = new HashStreamCore(chunkStream, Ownership.Dispose, hashAlgCore); #else HashAlgorithm hashAlgDotnet = new SHA1Managed(); chunkHashStream = new HashStreamDotnet(chunkStream, Ownership.Dispose, hashAlgDotnet); #endif tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}", diskIds[diskIdx], i), chunkHashStream); byte[] hash; #if NETCORE hash = hashAlgCore.GetHashAndReset(); #else hashAlgDotnet.TransformFinalBlock(new byte[0], 0, 0); hash = hashAlgDotnet.Hash; #endif string hashString = BitConverter.ToString(hash).Replace("-", "").ToLower(); byte[] hashStringAscii = Encoding.ASCII.GetBytes(hashString); tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}.checksum", diskIds[diskIdx], i), hashStringAscii); lastChunkAdded = i; } } } // Make sure the last chunk is present, filled with zero's if necessary int lastActualChunk = (int)((diskStream.Length - 1) / Sizes.OneMiB); if (lastChunkAdded < lastActualChunk) { Stream chunkStream = new ZeroStream(Sizes.OneMiB); Stream chunkHashStream; #if NETCORE IncrementalHash hashAlgCore = IncrementalHash.CreateHash(HashAlgorithmName.SHA1); chunkHashStream = new HashStreamCore(chunkStream, Ownership.Dispose, hashAlgCore); #else HashAlgorithm hashAlgDotnet = new SHA1Managed(); chunkHashStream = new HashStreamDotnet(chunkStream, Ownership.Dispose, hashAlgDotnet); #endif tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}", diskIds[diskIdx], lastActualChunk), chunkHashStream); byte[] hash; #if NETCORE hash = hashAlgCore.GetHashAndReset(); #else hashAlgDotnet.TransformFinalBlock(new byte[0], 0, 0); hash = hashAlgDotnet.Hash; #endif string hashString = BitConverter.ToString(hash).Replace("-", "").ToLower(); byte[] hashStringAscii = Encoding.ASCII.GetBytes(hashString); tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}.checksum", diskIds[diskIdx], lastActualChunk), hashStringAscii); } ++diskIdx; } return(tarBuilder.Build()); }
public static void Main(string[] args) { systems = new NonLinearSystem[16]; nudges = new byte[systems.Length]; for(ushort i=0; i<systems.Length; i++){ systems[i] = new Ball(); nudges[i] = (byte) 0; } action = args[0].ToLower(); inFile = args[1]; outFile = args[2]; FileStream inStream = new FileStream(inFile, FileMode.Open); FileStream outStream = new FileStream(outFile, FileMode.Create); inBuffer = new byte[256]; outBuffer = new byte[inBuffer.Length]; /* * N.B. each time the ConvertBuffer function is called * it causes a slight break in the plainText nudging as * it treats the buffers as if they are the beginning * of the stream. A smaller buffer uses less memory * but also reduces the effect of the plainText on the * PRNG stream. */ int MACLength; try { MACLength = Math.Max(0, int.Parse(args[3])); } catch(Exception){ Console.WriteLine("No MAC length provided. Using default of 64 bytes."); MACLength = 64; } switch(action){ case "enc": Console.Write("Key: "); key = ReadBlind(); Console.Write("Confirm key: "); if(!CompareSecureStrings(key, ReadBlind())){ Console.WriteLine("Keys do not match"); return; } RNGCryptoServiceProvider rng = new RNGCryptoServiceProvider(); rng.GetBytes(iv); outStream.Write(iv, 0, iv.Length); /* * ### See note after initialisation of in/outBuffer ### * Because of the plainText breaks the null byte authentication * stream had to be concatenated with the inStream so that the * PRNG output was consistent with the one generated when * deciphering. */ ConcatStream MACStream = new ConcatStream(); MACStream.Push(inStream); MACStream.Push(new MemoryStream(new byte[MACLength])); Cipher(MACStream, outStream, inBuffer); break; case "dec": Console.Write("Key: "); key = ReadBlind(); inStream.Read(iv, 0, iv.Length); Cipher(inStream, outStream, outBuffer); /* * N.B. if DEciphering is performed with a MACLength * shorter than that used for ENciphering the integrity * of the MAC is compromised and may return true even * if the ciphertext was altered. */ outStream.Seek(-MACLength, SeekOrigin.End); byte[] MACbuffer = new byte[MACLength]; outStream.Read(MACbuffer, 0, MACLength); outStream.SetLength(outStream.Length - MACLength); foreach(byte b in MACbuffer){ if((int)b > 0){ Console.WriteLine("*** ERROR ***"); Console.WriteLine("Message authentication failed!"); Console.WriteLine("Either you provided the wrong decryption settings or the encrypted message was altered."); return; } } Console.WriteLine("Message authenticated. It is *unlikely* that the encrypted message was altered."); break; }; }
/// <summary> /// Creates a new stream that contains the XVA image. /// </summary> /// <returns>The new stream</returns> public override SparseStream Build() { TarFileBuilder tarBuilder = new TarFileBuilder(); int[] diskIds; string ovaFileContent = GenerateOvaXml(out diskIds); tarBuilder.AddFile("ova.xml", Encoding.ASCII.GetBytes(ovaFileContent)); int diskIdx = 0; foreach (var diskRec in _disks) { SparseStream diskStream = diskRec.Second; List <StreamExtent> extents = new List <StreamExtent>(diskStream.Extents); int lastChunkAdded = -1; foreach (StreamExtent extent in extents) { int firstChunk = (int)(extent.Start / Sizes.OneMiB); int lastChunk = (int)((extent.Start + extent.Length - 1) / Sizes.OneMiB); for (int i = firstChunk; i <= lastChunk; ++i) { if (i != lastChunkAdded) { HashAlgorithm hashAlg = new SHA1Managed(); Stream chunkStream; long diskBytesLeft = diskStream.Length - (i * Sizes.OneMiB); if (diskBytesLeft < Sizes.OneMiB) { chunkStream = new ConcatStream( Ownership.Dispose, new SubStream(diskStream, i * Sizes.OneMiB, diskBytesLeft), new ZeroStream(Sizes.OneMiB - diskBytesLeft)); } else { chunkStream = new SubStream(diskStream, i * Sizes.OneMiB, Sizes.OneMiB); } HashStream chunkHashStream = new HashStream(chunkStream, Ownership.Dispose, hashAlg); tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}", diskIds[diskIdx], i), chunkHashStream); tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}.checksum", diskIds[diskIdx], i), new ChecksumStream(hashAlg)); lastChunkAdded = i; } } } // Make sure the last chunk is present, filled with zero's if necessary int lastActualChunk = (int)((diskStream.Length - 1) / Sizes.OneMiB); if (lastChunkAdded < lastActualChunk) { HashAlgorithm hashAlg = new SHA1Managed(); Stream chunkStream = new ZeroStream(Sizes.OneMiB); HashStream chunkHashStream = new HashStream(chunkStream, Ownership.Dispose, hashAlg); tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}", diskIds[diskIdx], lastActualChunk), chunkHashStream); tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}.checksum", diskIds[diskIdx], lastActualChunk), new ChecksumStream(hashAlg)); } ++diskIdx; } return(tarBuilder.Build()); }
public override Stream OpenEntry(ArcFile arc, Entry entry) { var parc = arc as PazArchiveBase; var pent = entry as PazEntry; if (null == parc || null == pent) { return(base.OpenEntry(arc, entry)); } Stream input = null; try { long part_offset = 0; long entry_start = pent.Offset; long entry_end = pent.Offset + pent.AlignedSize; foreach (var part in parc.Parts) { long part_end_offset = part_offset + part.MaxOffset; if (entry_start < part_end_offset) { uint part_size = (uint)Math.Min(entry_end - entry_start, part_end_offset - entry_start); var entry_part = part.CreateStream(entry_start - part_offset, part_size); if (input != null) { input = new ConcatStream(input, entry_part); } else { input = entry_part; } entry_start += part_size; if (entry_start >= entry_end) { break; } } part_offset = part_end_offset; } if (null == input) { return(Stream.Null); } if (parc.XorKey != 0) { input = new XoredStream(input, parc.XorKey); } input = parc.DecryptEntry(input, pent); if (pent.Size < pent.AlignedSize) { input = new LimitStream(input, pent.Size); } if (pent.IsPacked) { input = new ZLibStream(input, CompressionMode.Decompress); } return(input); } catch { if (input != null) { input.Dispose(); } throw; } }
public void EmptyStreamPosition() { var cs = new ConcatStream(); Assert.That(cs.Position, Is.Zero); }
public void EmptyStreamLength() { var cs = new ConcatStream(); Assert.That(cs.Length, Is.Zero); }
public void EmptyStreamCanWrite() { var cs = new ConcatStream(); Assert.That(cs.CanWrite, Is.False); }
public void EmptyStreamCanTimeout() { var cs = new ConcatStream(); Assert.That(cs.CanTimeout, Is.False); }
public void EmptyStreamCanSeek() { var cs = new ConcatStream(); Assert.That(cs.CanSeek, Is.True); }
public void EmptyStreamReadByte() { var cs = new ConcatStream(); Assert.That(cs.ReadByte(), Is.EqualTo(-1)); }
public Stream GetStream(Context context) { if (Encryption) { throw new IOException("Extent encryption is not supported"); } Stream stream; switch (Type) { case ExtentDataType.Inline: byte[] data = InlineData; stream = new MemoryStream(data); break; case ExtentDataType.Regular: var address = ExtentAddress; if (address == 0) { stream = new ZeroStream((long)LogicalSize); } else { var physicalAddress = context.MapToPhysical(address); stream = new SubStream(context.RawStream, Ownership.None, (long)(physicalAddress + ExtentOffset), (long)ExtentSize); } break; case ExtentDataType.PreAlloc: throw new NotImplementedException(); default: throw new IOException("invalid extent type"); } switch (Compression) { case ExtentDataCompression.None: break; case ExtentDataCompression.Zlib: { var zlib = new ZlibStream(stream, CompressionMode.Decompress, false); var sparse = SparseStream.FromStream(zlib, Ownership.Dispose); var length = new LengthWrappingStream(sparse, (long)LogicalSize, Ownership.Dispose); stream = new PositionWrappingStream(length, 0, Ownership.Dispose); break; } case ExtentDataCompression.Lzo: { var buffer = StreamUtilities.ReadExact(stream, sizeof(uint)); var totalLength = EndianUtilities.ToUInt32LittleEndian(buffer, 0); long processed = sizeof(uint); var parts = new List <SparseStream>(); var remaining = (long)LogicalSize; while (processed < totalLength) { stream.Position = processed; StreamUtilities.ReadExact(stream, buffer, 0, sizeof(uint)); var partLength = EndianUtilities.ToUInt32LittleEndian(buffer, 0); processed += sizeof(uint); var part = new SubStream(stream, Ownership.Dispose, processed, partLength); var uncompressed = new SeekableLzoStream(part, CompressionMode.Decompress, false); uncompressed.SetLength(Math.Min(Sizes.OneKiB * 4, remaining)); remaining -= uncompressed.Length; parts.Add(SparseStream.FromStream(uncompressed, Ownership.Dispose)); processed += partLength; } stream = new ConcatStream(Ownership.Dispose, parts.ToArray()); break; } default: throw new IOException($"Unsupported extent compression ({Compression})"); } return(stream); }