public void Read_WithFormat_Works(string path, LzmaFormat format) { using (Stream stream = File.OpenRead(path)) using (XZInputStream xzStream = new XZInputStream(stream, format)) { byte[] buffer = new byte[128]; Assert.Equal(14, xzStream.Read(buffer, 0, 128)); Assert.Equal(0, xzStream.Read(buffer, 0, 128)); Assert.Equal("Hello, World!\n", Encoding.UTF8.GetString(buffer, 0, 14)); xzStream.Dispose(); Assert.Throws <ObjectDisposedException>(() => xzStream.Read(buffer, 0, 128)); } }
public void Read_Partial_Works(string path) { using (Stream stream = File.OpenRead(path)) using (XZInputStream xzStream = new XZInputStream(stream)) { byte[] buffer = new byte[128]; Assert.Equal(7, xzStream.Read(buffer, 0, 7)); Assert.Equal(7, xzStream.Read(buffer, 7, 7)); Assert.Equal(0, xzStream.Read(buffer, 14, 114)); Assert.Equal("Hello, World!\n", Encoding.UTF8.GetString(buffer, 0, 14)); xzStream.Dispose(); Assert.Throws <ObjectDisposedException>(() => xzStream.Read(buffer, 0, 128)); } }
public void CompressDecompressTextTest() { using (FileStream stream = File.Open(@"XZOutputStreamTests_CompressTextTest.txt.xz", FileMode.Create, FileAccess.Write, FileShare.None)) using (XZOutputStream compressedStream = new XZOutputStream(stream)) { byte[] data = Encoding.UTF8.GetBytes("Hello, World! I am your XZ compressed file stream. Did we decompress correctly?"); compressedStream.Write(data, 0, data.Length); } using (FileStream stream = File.Open(@"XZOutputStreamTests_CompressTextTest.txt.xz", FileMode.Open, FileAccess.Read, FileShare.None)) using (XZInputStream decompressedStream = new XZInputStream(stream)) { byte[] data = new byte[1024]; int read = decompressedStream.Read(data, 0, data.Length); string text = Encoding.UTF8.GetString(data, 0, read); Assert.Equal("Hello, World! I am your XZ compressed file stream. Did we decompress correctly?", text); } }
public EncodedFileInfo(Script p, string dirName, string fileName) { string section = $"EncodedFile-{dirName}-{fileName}"; if (p.Sections.ContainsKey(section) == false) { throw new FileDecodeFailException($"[{dirName}\\{fileName}] does not exists in [{p.FullPath}]"); } List <string> encodedList = p.Sections[$"EncodedFile-{dirName}-{fileName}"].GetLinesOnce(); if (Ini.GetKeyValueFromLine(encodedList[0], out string key, out string value)) { throw new FileDecodeFailException("Encoded lines are malformed"); } // [Stage 1] Concat sliced base64-encoded lines into one string byte[] decoded; { int.TryParse(value, out int blockCount); encodedList.RemoveAt(0); // Remove "lines=n" // Each line is 64KB block if (Ini.GetKeyValueFromLines(encodedList, out List <string> keys, out List <string> base64Blocks)) { throw new FileDecodeFailException("Encoded lines are malformed"); } StringBuilder b = new StringBuilder(); foreach (string block in base64Blocks) { b.Append(block); } switch (b.Length % 4) { case 0: break; case 1: throw new FileDecodeFailException("Encoded lines are malformed"); case 2: b.Append("=="); break; case 3: b.Append("="); break; } decoded = Convert.FromBase64String(b.ToString()); } // [Stage 2] Read final footer const int finalFooterLen = 0x24; int finalFooterIdx = decoded.Length - finalFooterLen; // 0x00 - 0x04 : 4B -> CRC32 uint full_crc32 = BitConverter.ToUInt32(decoded, finalFooterIdx + 0x00); // 0x0C - 0x0F : 4B -> Zlib Compressed Footer Length int compressedFooterLen = (int)BitConverter.ToUInt32(decoded, finalFooterIdx + 0x0C); int compressedFooterIdx = decoded.Length - (finalFooterLen + compressedFooterLen); // 0x10 - 0x17 : 8B -> Zlib Compressed File Length int compressedBodyLen = (int)BitConverter.ToUInt64(decoded, finalFooterIdx + 0x10); // [Stage 3] Validate final footer this.FinalFooterValid = true; if (compressedBodyLen != compressedFooterIdx) { this.FinalFooterValid = false; } uint calcFull_crc32 = Crc32Checksum.Crc32(decoded, 0, finalFooterIdx); if (full_crc32 != calcFull_crc32) { this.FinalFooterValid = false; } if (this.FinalFooterValid == false) { return; } // [Stage 4] Decompress first footer byte[] rawFooter; using (MemoryStream rawFooterStream = new MemoryStream()) { using (MemoryStream ms = new MemoryStream(decoded, compressedFooterIdx, compressedFooterLen)) using (ZLibStream zs = new ZLibStream(ms, CompressionMode.Decompress, CompressionLevel.Default)) { zs.CopyTo(rawFooterStream); } rawFooter = rawFooterStream.ToArray(); } // [Stage 5] Read first footer this.FirstFooterValid = true; // 0x200 - 0x207 : 8B -> Length of raw file, in little endian int rawBodyLen = (int)BitConverter.ToUInt32(rawFooter, 0x200); // 0x208 - 0x20F : 8B -> Length of zlib-compressed file, in little endian // Note: In Type 2, 0x208 entry is null - padded int compressedBodyLen2 = (int)BitConverter.ToUInt32(rawFooter, 0x208); // 0x220 - 0x223 : 4B -> CRC32C Checksum of zlib-compressed file uint compressedBody_crc32 = BitConverter.ToUInt32(rawFooter, 0x220); // 0x224 : 1B -> Compress Mode (Type 1 : 00, Type 2 : 01) byte compMode = rawFooter[0x224]; // 0x225 : 1B -> ZLib Compress Level (Type 1 : 01~09, Type 2 : 00) byte compLevel = rawFooter[0x225]; // [Stage 6] Validate first footer if (compMode == 0) { this.Mode = EncodedFile.EncodeMode.ZLib; if (compLevel < 1 || 9 < compLevel) { this.FirstFooterValid = false; } if (compressedBodyLen2 == 0 || (compressedBodyLen2 != compressedBodyLen)) { this.FirstFooterValid = false; } } else if (compMode == 1) { this.Mode = EncodedFile.EncodeMode.Raw; if (compLevel != 0) { this.FirstFooterValid = false; } if (compressedBodyLen2 != 0) { this.FirstFooterValid = false; } } else // Wrong compMode { this.FirstFooterValid = false; } if (this.FirstFooterValid == false) { return; } // [Stage 7] Decompress body switch ((EncodedFile.EncodeMode)compMode) { case EncodedFile.EncodeMode.ZLib: { this.RawBodyStream = new MemoryStream(); using (MemoryStream ms = new MemoryStream(decoded, 0, compressedBodyLen)) using (ZLibStream zs = new ZLibStream(ms, CompressionMode.Decompress, CompressionLevel.Default)) { zs.CopyTo(this.RawBodyStream); } this.CompressedBodyValid = true; } break; case EncodedFile.EncodeMode.Raw: { this.RawBodyStream = new MemoryStream(decoded, 0, rawBodyLen); this.CompressedBodyValid = true; } break; case EncodedFile.EncodeMode.XZ: { using (MemoryStream ms = new MemoryStream(decoded, 0, compressedBodyLen)) using (XZInputStream xzs = new XZInputStream(ms)) { xzs.CopyTo(this.RawBodyStream); } this.CompressedBodyValid = true; } break; default: throw new InternalException($"Wrong EncodeMode [{compMode}]"); } this.RawBodyStream.Position = 0; // [Stage 8] Validate decompressed body this.RawBodyValid = true; uint calcCompBody_crc32 = Crc32Checksum.Crc32(RawBodyStream.ToArray()); if (compressedBody_crc32 != calcCompBody_crc32) { this.RawBodyValid = false; } // [Stage 9] Return decompressed body stream this.RawBodyStream.Position = 0; }
private static MemoryStream Decode(List <string> encodedList) { if (Ini.GetKeyValueFromLine(encodedList[0], out string key, out string value)) { throw new FileDecodeFailException("Encoded lines are malformed"); } // [Stage 1] Concat sliced base64-encoded lines into one string byte[] decoded; { int.TryParse(value, out int blockCount); encodedList.RemoveAt(0); // Remove "lines=n" // Each line is 64KB block if (Ini.GetKeyValueFromLines(encodedList, out List <string> keys, out List <string> base64Blocks)) { throw new FileDecodeFailException("Encoded lines are malformed"); } StringBuilder b = new StringBuilder(); foreach (string block in base64Blocks) { b.Append(block); } switch (b.Length % 4) { case 0: break; case 1: throw new FileDecodeFailException("Encoded lines are malformed"); case 2: b.Append("=="); break; case 3: b.Append("="); break; } decoded = Convert.FromBase64String(b.ToString()); } // [Stage 2] Read final footer const int finalFooterLen = 0x24; int finalFooterIdx = decoded.Length - finalFooterLen; // 0x00 - 0x04 : 4B -> CRC32 uint full_crc32 = BitConverter.ToUInt32(decoded, finalFooterIdx + 0x00); // 0x0C - 0x0F : 4B -> Zlib Compressed Footer Length int compressedFooterLen = (int)BitConverter.ToUInt32(decoded, finalFooterIdx + 0x0C); int compressedFooterIdx = decoded.Length - (finalFooterLen + compressedFooterLen); // 0x10 - 0x17 : 8B -> Zlib Compressed File Length int compressedBodyLen = (int)BitConverter.ToUInt64(decoded, finalFooterIdx + 0x10); // [Stage 3] Validate final footer if (compressedBodyLen != compressedFooterIdx) { throw new FileDecodeFailException($"Encoded file is corrupted"); } uint calcFull_crc32 = Crc32Checksum.Crc32(decoded, 0, finalFooterIdx); if (full_crc32 != calcFull_crc32) { throw new FileDecodeFailException($"Encoded file is corrupted"); } // [Stage 4] Decompress first footer byte[] rawFooter; using (MemoryStream rawFooterStream = new MemoryStream()) { using (MemoryStream ms = new MemoryStream(decoded, compressedFooterIdx, compressedFooterLen)) using (ZLibStream zs = new ZLibStream(ms, CompressionMode.Decompress, CompressionLevel.Default)) { zs.CopyTo(rawFooterStream); } rawFooter = rawFooterStream.ToArray(); } // [Stage 5] Read first footer // 0x200 - 0x207 : 8B -> Length of raw file, in little endian int rawBodyLen = BitConverter.ToInt32(rawFooter, 0x200); // 0x208 - 0x20F : 8B -> Length of zlib-compressed file, in little endian // Note: In Type 2, 0x208 entry is null - padded int compressedBodyLen2 = BitConverter.ToInt32(rawFooter, 0x208); // 0x220 - 0x223 : 4B -> CRC32C Checksum of zlib-compressed file uint compressedBody_crc32 = BitConverter.ToUInt32(rawFooter, 0x220); // 0x224 : 1B -> Compress Mode (Type 1 : 00, Type 2 : 01) byte compMode = rawFooter[0x224]; // 0x225 : 1B -> ZLib Compress Level (Type 1 : 01~09, Type 2 : 00) byte compLevel = rawFooter[0x225]; // [Stage 6] Validate first footer switch ((EncodeMode)compMode) { case EncodeMode.ZLib: // Type 1, zlib { if (compressedBodyLen2 == 0 || (compressedBodyLen2 != compressedBodyLen)) { throw new FileDecodeFailException($"Encoded file is corrupted: compMode"); } if (compLevel < 1 || 9 < compLevel) { throw new FileDecodeFailException($"Encoded file is corrupted: compLevel"); } } break; case EncodeMode.Raw: // Type 2, raw { if (compressedBodyLen2 != 0) { throw new FileDecodeFailException($"Encoded file is corrupted: compMode"); } if (compLevel != 0) { throw new FileDecodeFailException($"Encoded file is corrupted: compLevel"); } } break; case EncodeMode.XZ: // Type 3, LZMA { if (compressedBodyLen2 == 0 || (compressedBodyLen2 != compressedBodyLen)) { throw new FileDecodeFailException($"Encoded file is corrupted: compMode"); } if (compLevel < 1 || 9 < compLevel) { throw new FileDecodeFailException($"Encoded file is corrupted: compLevel"); } } break; default: throw new FileDecodeFailException($"Encoded file is corrupted: compMode"); } // [Stage 7] Decompress body MemoryStream rawBodyStream = new MemoryStream(); // This stream should be alive even after this method returns switch ((EncodeMode)compMode) { case EncodeMode.ZLib: // Type 1, zlib { using (MemoryStream ms = new MemoryStream(decoded, 0, compressedBodyLen)) using (ZLibStream zs = new ZLibStream(ms, CompressionMode.Decompress, false)) { zs.CopyTo(rawBodyStream); } } break; case EncodeMode.Raw: // Type 2, raw { // rawBodyStream = new MemoryStream(decoded, 0, rawBodyLen); rawBodyStream.Write(decoded, 0, rawBodyLen); } break; case EncodeMode.XZ: // Type 3, LZMA { using (MemoryStream ms = new MemoryStream(decoded, 0, compressedBodyLen)) using (XZInputStream xzs = new XZInputStream(ms)) { xzs.CopyTo(rawBodyStream); } } break; default: throw new FileDecodeFailException($"Encoded file is corrupted: compMode"); } rawBodyStream.Position = 0; // [Stage 8] Validate decompressed body uint calcCompBody_crc32 = Crc32Checksum.Crc32(rawBodyStream.ToArray()); if (compressedBody_crc32 != calcCompBody_crc32) { throw new FileDecodeFailException($"Encoded file is corrupted: body"); } // [Stage 9] Return decompressed body stream rawBodyStream.Position = 0; return(rawBodyStream); }
private static void InitializeCardsManager() { manager = new CardsManager(); if (!File.Exists(cache_data_file)) { max_steps += 2; if (!File.Exists(cache_data_printings_file)) { max_steps += 3; if (!File.Exists(cache_data_printings_archive)) { max_steps += 2; } if (!File.Exists(cache_data_tokens_file)) { max_steps += 2; } } } if (!File.Exists(cache_data_file)) { Console.WriteLine(GetSteps() + "Cache (" + cache_data_file + ") not found, creating cache..."); if (!File.Exists(cache_data_printings_file)) { Console.WriteLine(GetSteps() + "Card data (" + cache_data_printings_file + ") not found..."); if (!File.Exists(cache_data_printings_archive)) { Console.WriteLine(GetSteps() + "Card archive (" + cache_data_printings_archive + ") not found, downloading..."); Download(allsets_url, cache_data_printings_archive, true); Console.WriteLine(GetSteps() + "Download complete"); } if (!File.Exists(cache_data_tokens_file)) { Console.WriteLine(GetSteps() + "Token XML (" + cache_data_tokens_file + ") not found, downloading..."); Download(tokens_url, cache_data_tokens_file); Console.WriteLine(GetSteps() + "Download complete"); } Console.WriteLine(GetSteps() + "Extracting card archive..."); using (Stream xz = new XZInputStream(File.OpenRead(cache_data_printings_archive))) using (Stream stream = new FileStream(cache_data_printings_file, FileMode.OpenOrCreate)) { xz.CopyTo(stream); } Console.WriteLine(GetSteps() + "Extraction complete"); } Console.WriteLine(GetSteps() + "Processing data set (cards and tokens)..."); string string_data = File.ReadAllText(cache_data_printings_file); JObject data = JObject.Parse(string_data); List <Thread> threads = new List <Thread>(); foreach (var set in data) { threads.Add(new Thread(() => { Set.Parse(set.Key, set.Value.ToObject <JObject>(), manager); })); } foreach (var thread in threads) { thread.Start(); } foreach (var thread in threads) { thread.Join(); } WriteToBinaryFile(cache_data_file, manager); Console.WriteLine(GetSteps() + "Cache (" + cache_data_file + ") creation completed."); } else { Console.Write(GetSteps() + "Cache (" + cache_data_file + ") found! Loading..."); manager = ReadFromBinaryFile <CardsManager>(cache_data_file); Console.WriteLine(" Done"); } }
/// <summary> /// /// </summary> /// <param name="driveLetter"></param> /// <param name="fileName"></param> /// <param name="eCompType"></param> /// <param name="removeAfter"></param> /// <returns></returns> public bool WriteDrive(string driveLetter, string fileName, EnumCompressionType eCompType, bool removeAfter) { IsCancelling = false; var dtStart = DateTime.Now; if (!File.Exists(fileName)) { throw new ArgumentException(fileName + Resources.Disk_WriteDrive__doesn_t_exist); } // // Get physical drive partition for logical partition // var physicalDrive = _diskAccess.GetPhysicalPathForLogicalPath(driveLetter); if (string.IsNullOrEmpty(physicalDrive)) { LogMsg(Resources.Disk_WriteDrive_Error__Couldn_t_map_partition_to_physical_drive); _diskAccess.UnlockDrive(); return(false); } // // Lock logical drive // var success = _diskAccess.LockDrive(driveLetter); if (!success) { LogMsg(Resources.Disk_WriteDrive_Failed_to_lock_drive); return(false); } // // Get drive size // var driveSize = _diskAccess.GetDriveSize(physicalDrive); if (driveSize <= 0) { LogMsg(Resources.Disk_WriteDrive_Failed_to_get_device_size); _diskAccess.UnlockDrive(); return(false); } // // Open the physical drive // var physicalHandle = _diskAccess.Open(physicalDrive); if (physicalHandle == null) { LogMsg(Resources.Disk_WriteDrive_Failed_to_open_physical_drive); _diskAccess.UnlockDrive(); return(false); } var buffer = new byte[Globals.MaxBufferSize]; long offset = 0; var fileLength = new FileInfo(fileName).Length; var uncompressedlength = fileLength; var errored = true; using (var basefs = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read)) { Stream fs; switch (eCompType) { case EnumCompressionType.Zip: var zipFile = new ZipFile(basefs); var ze = (from ZipEntry zipEntry in zipFile where zipEntry.IsFile select zipEntry).FirstOrDefault(); if (ze == null) { LogMsg(Resources.Disk_WriteDrive_Error_reading_zip_input_stream); goto readfail2; } var zis = zipFile.GetInputStream(ze); uncompressedlength = ze.Size; fs = zis; break; case EnumCompressionType.Gzip: var gzis = new GZipInputStream(basefs) { IsStreamOwner = true }; uncompressedlength = gzis.Length; fs = gzis; break; case EnumCompressionType.Targzip: var gzos = new GZipInputStream(basefs) { IsStreamOwner = true }; var tis = new TarInputStream(gzos); TarEntry tarEntry; do { tarEntry = tis.GetNextEntry(); } while (tarEntry.IsDirectory); uncompressedlength = tarEntry.Size; fs = tis; break; case EnumCompressionType.XZ: var xzs = new XZInputStream(basefs); uncompressedlength = xzs.Length; fs = xzs; break; default: // No compression - direct to file stream fs = basefs; uncompressedlength = fs.Length; break; } var bufferOffset = 0; using (var br = new BinaryReader(fs)) { while (offset < uncompressedlength && !IsCancelling) { // Note: There's a problem writing certain lengths to the underlying physical drive. // This appears when we try to read from a compressed stream as it gives us // "strange" lengths which then fail to be written via Writefile() so try to build // up a decent block of bytes here... int readBytes; do { readBytes = br.Read(buffer, bufferOffset, buffer.Length - bufferOffset); bufferOffset += readBytes; } while (bufferOffset < Globals.MaxBufferSize && readBytes != 0); int wroteBytes; var bytesToWrite = bufferOffset; var trailingBytes = 0; // Assume that the underlying physical drive will at least accept powers of two! if (!IsPowerOfTwo((ulong)bufferOffset)) { // Find highest bit (32-bit max) var highBit = 31; for (; ((bufferOffset & (1 << highBit)) == 0) && highBit >= 0; highBit--) { } // Work out trailing bytes after last power of two var lastPowerOf2 = 1 << highBit; bytesToWrite = lastPowerOf2; trailingBytes = bufferOffset - lastPowerOf2; } if (_diskAccess.Write(buffer, bytesToWrite, out wroteBytes) < 0) { LogMsg(Resources.Disk_WriteDrive_Error_writing_data_to_drive__ + Marshal.GetHRForLastWin32Error()); goto readfail1; } if (wroteBytes != bytesToWrite) { LogMsg(Resources.Disk_WriteDrive_Error_writing_data_to_drive___past_EOF_); goto readfail1; } // Move trailing bytes up - Todo: Suboptimal if (trailingBytes > 0) { Buffer.BlockCopy(buffer, bufferOffset - trailingBytes, buffer, 0, trailingBytes); bufferOffset = trailingBytes; } else { bufferOffset = 0; } offset += (uint)wroteBytes; var percentDone = (int)(100 * offset / uncompressedlength); var tsElapsed = DateTime.Now.Subtract(dtStart); var bytesPerSec = offset / tsElapsed.TotalSeconds; Progress(percentDone); LogMsg(Resources.Disk_WriteDrive_Wrote + @": " + (offset / Globals.MbModifier) + @" / " + (uncompressedlength / Globals.MbModifier) + " MB; " + string.Format("{0:F}", (bytesPerSec / Globals.MbModifier)) + @" MB/s; " + Resources.Disk_Elapsed_time + ": " + tsElapsed.ToString(@"hh\:mm\:ss")); } } if (fs is ZipOutputStream) { ((ZipOutputStream)fs).CloseEntry(); ((ZipOutputStream)fs).Close(); } else if (fs is TarOutputStream) { ((TarOutputStream)fs).CloseEntry(); fs.Close(); } else if (fs is GZipOutputStream) { fs.Close(); } else if (fs is XZOutputStream) { fs.Close(); } } errored = false; if (removeAfter && !IsCancelling) { _diskAccess.UnmountDrive(); } readfail1: _diskAccess.Close(); readfail2: _diskAccess.UnlockDrive(); var tstotalTime = DateTime.Now.Subtract(dtStart); if (IsCancelling) { LogMsg(Resources.Disk_WriteDrive_Cancelled); } else { LogMsg(Resources.Disk_WriteDrive_Wrote + @" " + offset + @" " + Resources.Disk_WriteDrive_bytes + @". " + Resources.Disk_Elapsed_time + @": " + tstotalTime.ToString(@"hh\:mm\:ss")); } Progress(0); return(!errored); }
private void LoadRun(CompressedRun run) { int toCopy = (int)(run.SectorCount * Sizes.Sector); switch (run.Type) { case RunType.ZlibCompressed: this.stream.Position = run.CompOffset + 2; // 2 byte zlib header using (DeflateStream ds = new DeflateStream(this.stream, CompressionMode.Decompress, true)) { StreamUtilities.ReadExact(ds, this.decompBuffer, 0, toCopy); } break; case RunType.AdcCompressed: this.stream.Position = run.CompOffset; byte[] compressed = StreamUtilities.ReadExact(this.stream, (int)run.CompLength); if (ADCDecompress(compressed, 0, compressed.Length, this.decompBuffer, 0) != toCopy) { throw new InvalidDataException("Run too short when decompressed"); } break; case RunType.BZlibCompressed: using ( BZip2DecoderStream ds = new BZip2DecoderStream( new SubStream( this.stream, run.CompOffset, run.CompLength), Ownership.None)) { StreamUtilities.ReadExact(ds, this.decompBuffer, 0, toCopy); } break; case RunType.LzfseCompressed: this.stream.Position = run.CompOffset; byte[] lzfseCompressed = StreamUtilities.ReadExact(this.stream, (int)run.CompLength); if (LzfseCompressor.Decompress(lzfseCompressed, this.decompBuffer) != toCopy) { throw new InvalidDataException("Run too short when decompressed"); } break; case RunType.LzmaCompressed: using (var ds = new XZInputStream( new SubStream( this.stream, run.CompOffset, run.CompLength))) { StreamUtilities.ReadExact(ds, this.decompBuffer, 0, toCopy); } break; case RunType.Zeros: case RunType.Raw: case RunType.None: break; default: throw new NotImplementedException("Unrecognized run type " + run.Type); } this.activeRun = run; }