/// <summary> /// 读取消息头,如果有Gzip压缩时自动解压字节流 /// </summary> /// <returns></returns> /// <exception cref="OverflowException"></exception> public MessageHead ReadHeadGzip() { try { MessageHead head = new MessageHead(); head.HasGzip = true; head.PacketLength = Length; if (CheckGzipBuffer()) { byte[] gzipData = PopBuffer(); head.GzipLength = gzipData.Length; //gzip格式500+gzip( 1000+ XXXXXX) //启用7z? byte[] deZipData = LZMA.Decompress(gzipData, 0);// GzipUtils.DeCompress(gzipData, 0, gzipData.Length); WriteByte(deZipData); Reset(); } head.TotalLength = ReadInt(); head.ErrorCode = ReadInt(); head.MsgId = ReadInt(); head.ErrorInfo = ReadString(); head.Action = ReadInt(); head.St = ReadString(); return(head); } catch (Exception ex) { string error = string.Format("read to {0}/{1}pos error,\r\nbytes:{2}", Offset, Length, ToHexString()); throw new OverflowException(error, ex); } }
public static string DecompressLZMA(byte[] compressedFile) { IntPtr intPtr = new IntPtr(compressedFile.Length - 13); byte[] array = new byte[intPtr.ToInt64()]; IntPtr outPropsSize = new IntPtr(5); byte[] array2 = new byte[5]; compressedFile.CopyTo(array, 13); for (int i = 0; i < 5; i++) { array2[i] = compressedFile[i]; } int num = 0; for (int j = 0; j < 8; j++) { num += (int)compressedFile[j + 5] << 8 * j; } IntPtr intPtr2 = new IntPtr(num); byte[] array3 = new byte[num]; int num2 = LZMA.LzmaUncompress(array3, ref intPtr2, array, ref intPtr, array2, outPropsSize); if (num2 != 0) { MessageBox.Show("Decompression returned " + num2); } return(new string(Encoding.UTF8.GetString(array3).ToCharArray())); }
private static string LZMACompressFileForUpload(string relativePath, string stagingPath, string modPath, Func <bool?> cancelCheckCallback = null) { Log.Information(@"Compressing " + relativePath); var destPath = Path.Combine(stagingPath, relativePath + @".lzma"); var sourcePath = Path.Combine(modPath, relativePath); Directory.CreateDirectory(Directory.GetParent(destPath).FullName); var src = File.ReadAllBytes(sourcePath); var compressedBytes = LZMA.Compress(src); byte[] fixedBytes = new byte[compressedBytes.Count() + 8]; //needs 8 byte header written into it (only mem version needs this) Buffer.BlockCopy(compressedBytes, 0, fixedBytes, 0, 5); fixedBytes.OverwriteRange(5, BitConverter.GetBytes((int)new FileInfo(sourcePath).Length)); Buffer.BlockCopy(compressedBytes, 5, fixedBytes, 13, compressedBytes.Length - 5); File.WriteAllBytes(destPath, fixedBytes); //Test! //var decomp = SevenZipHelper.LZMA.DecompressLZMAFile(fixedBytes); //if (decomp == null) //{ // Debug.WriteLine("NOT LZMA"); //} //else if (decomp.Length != src.Length) //{ // Debug.WriteLine("Decompressed data does not match source length!"); //} return(destPath); }
long LZMA.Master.LZMA.ISeqOutStream.Write(LZMA.P<byte> buf, long size) { System.Diagnostics.Debug.Assert(size > 0); var buffer = buf.mBuffer; var offset = buf.mOffset; var result = size; while (size > Int32.MaxValue) { int written; try { written = mStream.WriteAsync(buffer, offset, Int32.MaxValue, StreamMode.Partial).GetAwaiter().GetResult(); } catch (OperationCanceledException) { return 0; } if (written <= 0) throw new InvalidOperationException("IOutputStream.WriteAsync returned an invalid result."); offset += written; size -= written; } if (size > 0) { int written; try { written = mStream.WriteAsync(buffer, offset, (int)size, StreamMode.Complete).GetAwaiter().GetResult(); } catch (OperationCanceledException) { return 0; } if (written != size) throw new InvalidOperationException("IOutputStream.WriteAsync returned an invalid result."); } return result; }
protected ShockwaveFlash(FlashReader input) : this(false) { Compression = (CompressionKind)input.ReadString(3)[0]; Version = input.ReadByte(); FileLength = input.ReadUInt32(); switch (Compression) { case CompressionKind.LZMA: { byte[] decompressed = LZMA.Decompress(input.BaseStream, ((int)FileLength - 8)); _input = new FlashReader(decompressed); break; } case CompressionKind.ZLIB: { _input = ZLIB.WrapDecompressor(input.BaseStream); break; } case CompressionKind.None: { _input = input; break; } } Frame = new FrameRecord(_input); }
public static string Load(string name) { string script = null; var index = name.IndexOf("(Clone)"); if (index >= 0) { name = name.Substring(0, index); } #if UNITY_EDITOR string filename = Application.dataPath + kLuaScriptDebugPath + "/" + name + ".lua"; try { var fs = new StreamReader(filename, Encoding.UTF8); script = fs.ReadToEnd(); fs.Close(); fs.Dispose(); } catch (System.Exception e) { } #else var ta = ResourceUtils.Load <TextAsset>(kLuaScriptPath + "/" + name); if (ta == null) { script_cache_[name] = null; return(null); } var bytes = LZMA.Decompress(ta.bytes); script = Encoding.UTF8.GetString(bytes); #endif return(script); }
/// <summary> /// Decompresses LZMA. Uses native code if non-streamed (produced by lzma.exe), uses managed code if streamed (which can be done by things such as PHP) /// </summary> /// <param name="input"></param> /// <returns></returns> public static byte[] DecompressLZMA(MemoryStream input) { input.Position = 5; var lzmaLen = input.ReadInt32(); input.Position = 0; if (lzmaLen > 0) { // Non streamed LZMA return(LZMA.DecompressLZMAFile(input.ToArray())); } else { // It's streaming lzma. MEM code can't handle streamed so we have to fallback var lzmads = new LzmaDecodeStream(input); using var decompressedStream = new MemoryStream(); int bufSize = 24576, count; byte[] buf = new byte[bufSize]; while (/*lzmads.Position < lzmaFile.Length && */ (count = lzmads.Read(buf, 0, bufSize)) > 0) { decompressedStream.Write(buf, 0, count); } return(decompressedStream.ToArray()); } }
/// <summary> /// Stitches the Shockwave Flash(SWF) file header containing basic information, with the compressed/uncompressed content of the file. /// </summary> /// <param name="standard">The compression/decompression standard to use.</param> /// <param name="isCompressing">if set to <c>true</c> [is compressing].</param> /// <returns></returns> protected byte[] StitchFlashDataWithHeader(CompressionStandard standard, bool isCompressing) { if (isCompressing && standard == CompressionStandard.None) { CompressWith = CompressionStandard.ZLIB; standard = CompressWith; } var flashHeader = new byte[8]; Buffer.BlockCopy(_flashData, 0, flashHeader, 0, 8); flashHeader[0] = (isCompressing ? (byte)standard : (byte)'F'); var flashBody = new byte[_flashData.Length - 8]; Buffer.BlockCopy(_flashData, 8, flashBody, 0, flashBody.Length); byte[] body = null; switch (standard) { default: { throw new InvalidOperationException( "Invalid compression/decompression standard was specified: " + standard); } case CompressionStandard.ZLIB: { if (isCompressing) { body = ZlibStream.CompressBuffer(flashBody); } if (!isCompressing) { body = ZlibStream.UncompressBuffer(flashBody); } break; } case CompressionStandard.LZMA: { if (isCompressing) { body = LZMA.CompressBuffer(flashBody); } if (!isCompressing) { body = LZMA.DecompressBuffer(flashBody, (int)FileLength - 8); } break; } } var buffer = new byte[8 + body.Length]; Buffer.BlockCopy(flashHeader, 0, buffer, 0, 8); Buffer.BlockCopy(body, 0, buffer, 8, body.Length); return(buffer); }
LZMA.Master.LZMA.SRes LZMA.Master.LZMA.ISeqInStream.Read(LZMA.P<byte> buf, ref long size) { System.Diagnostics.Debug.Assert(size > 0); if (mCompleted) { size = 0; return LZMA.Master.LZMA.SZ_OK; } var capacity = size < Int32.MaxValue ? (int)size : Int32.MaxValue; int fetched; try { fetched = mStream.ReadAsync(buf.mBuffer, buf.mOffset, capacity, StreamMode.Partial).GetAwaiter().GetResult(); } catch (OperationCanceledException) { size = 0; return LZMA.Master.LZMA.SZ_ERROR_FAIL; } if (fetched < 0 || fetched > capacity) throw new InvalidOperationException("IInputStream.ReadAsync returned an invalid result."); if (fetched == 0) mCompleted = true; size = fetched; return LZMA.Master.LZMA.SZ_OK; }
/// <summary> /// Decompress an encrypted FAES File. /// </summary> /// <param name="encryptedFile">Encrypted FAES File</param> /// <param name="overridePath">Override the read path</param> /// <returns>Path of the encrypted, Decompressed file</returns> public string DecompressFAESFile(FAES_File encryptedFile, string overridePath = "") { string fileCompressionMode = FileAES_Utilities.GetCompressionMode(encryptedFile.GetPath()); Logging.Log(String.Format("Compression Mode: {0}", fileCompressionMode), Severity.DEBUG); switch (fileCompressionMode) { case "LZMA": LZMA lzma = new LZMA(); return(lzma.DecompressFAESFile(encryptedFile, overridePath)); case "TAR": TAR tar = new TAR(); return(tar.DecompressFAESFile(encryptedFile, overridePath)); case "ZIP": ZIP zip = new ZIP(_compressionLevel); return(zip.DecompressFAESFile(encryptedFile, overridePath)); case "LEGACY": case "LEGACYZIP": case "LGYZIP": LegacyZIP legacyZip = new LegacyZIP(); return(legacyZip.DecompressFAESFile(encryptedFile, overridePath)); default: throw new NotSupportedException("FAES File was compressed using an unsupported file format."); } }
public MemoryStream DecompressEntry(int Index, FileStream fs) { MemoryStream result = new MemoryStream(); FileEntryStruct e = Files[Index]; uint count = 0; byte[] inputBlock; byte[] outputBlock = new byte[Header.MaxBlockSize]; long left = e.RealUncompressedSize; fs.Seek(e.BlockOffsets[0], SeekOrigin.Begin); byte[] buff; if (e.BlockSizeTableIndex == 0xFFFFFFFF) { buff = new byte[e.RealUncompressedSize]; fs.Read(buff, 0, buff.Length); result.Write(buff, 0, buff.Length); } else { while (left > 0) { uint compressedBlockSize = e.BlockSizes[count]; if (compressedBlockSize == 0) { compressedBlockSize = Header.MaxBlockSize; } if (compressedBlockSize == Header.MaxBlockSize || compressedBlockSize == left) { buff = new byte[compressedBlockSize]; fs.Read(buff, 0, buff.Length); result.Write(buff, 0, buff.Length); left -= compressedBlockSize; } else { var uncompressedBlockSize = (uint)Math.Min(left, Header.MaxBlockSize); if (compressedBlockSize < 5) { throw new Exception("compressed block size smaller than 5"); } inputBlock = new byte[compressedBlockSize]; fs.Read(inputBlock, 0, (int)compressedBlockSize); uint actualUncompressedBlockSize = uncompressedBlockSize; uint actualCompressedBlockSize = compressedBlockSize; outputBlock = LZMA.Decompress(inputBlock, actualUncompressedBlockSize); if (outputBlock.Length != actualUncompressedBlockSize) { throw new Exception("Decompression Error"); } result.Write(outputBlock, 0, (int)actualUncompressedBlockSize); left -= uncompressedBlockSize; } count++; } } return(result); }
/// <summary> /// 自动压缩 /// </summary> /// <param name="buffer"></param> /// <returns>返回压缩后的字节</returns> public byte[] CompressBuffer(byte[] buffer) { if (buffer.Length > Enable7zipMinByte) { return(LZMA.Compress(buffer));//GzipUtils.EnCompress(buffer, 0, buffer.Length); } return(buffer); }
public void ExtractEntry(DLCEntry entry, Stream input, Stream output) { input.JumpTo(entry.dataOffset); if (entry.compressedBlockSizesIndex == -1) { output.WriteFromStream(input, entry.uncomprSize); } else { var uncompressedBlockBuffers = new List <byte[]>(); var compressedBlockBuffers = new List <byte[]>(); var blockBytesLeft = new List <long>(); long bytesLeft = entry.uncomprSize; for (int j = 0; j < entry.numBlocks; j++) { blockBytesLeft.Add(bytesLeft); int compressedBlockSize = blockSizes[entry.compressedBlockSizesIndex + j]; int uncompressedBlockSize = (int)Math.Min(bytesLeft, maxBlockSize); if (compressedBlockSize == 0) { compressedBlockSize = (int)maxBlockSize; } compressedBlockBuffers.Add(input.ReadToBuffer(compressedBlockSize)); uncompressedBlockBuffers.Add(null); bytesLeft -= uncompressedBlockSize; } Parallel.For(0, entry.numBlocks, j => { int compressedBlockSize = blockSizes[entry.compressedBlockSizesIndex + (int)j]; int uncompressedBlockSize = (int)Math.Min(blockBytesLeft[(int)j], maxBlockSize); if (compressedBlockSize == 0 || compressedBlockSize == blockBytesLeft[(int)j]) { uncompressedBlockBuffers[(int)j] = compressedBlockBuffers[(int)j]; } else { uncompressedBlockBuffers[(int)j] = LZMA.Decompress(compressedBlockBuffers[(int)j], (uint)uncompressedBlockSize); if (uncompressedBlockBuffers[(int)j].Length == 0) { throw new Exception(); } } }); for (int j = 0; j < entry.numBlocks; j++) { output.WriteFromBuffer(uncompressedBlockBuffers[j]); } } }
public static void UpdateMD5Map(MEGame game, string directory, string outName) { var outF = $@"C:\Users\mgame\source\repos\ME3Tweaks\MassEffectModManager\MassEffectModManagerCore\modmanager\gamemd5\{outName}"; var db = VanillaDatabaseService.LoadDatabaseFor(game, false); if (game.IsLEGame()) { db.RemoveAll(x => x.Key.Contains(@"BioGame\Config")); // Do not include config files } MemoryStream mapStream = new MemoryStream(); // Name Table mapStream.WriteInt32(db.Count); // Num Entries foreach (var f in db.Keys) { mapStream.WriteStringASCIINull(f); } // Data Table mapStream.WriteInt32(db.Count); int idx = 0; foreach (var f in db) { mapStream.WriteInt32(idx); // Name Table IDX. Update this code for duplicates support mapStream.WriteInt32(f.Value[0].size); // Size var md5 = f.Value[0].md5; for (int i = 0; i < 32; i++) { byte b = 0; b |= HexToInt(md5[i]); b = (byte)(b << 4); i++; b |= HexToInt(md5[i]); mapStream.WriteByte(b); } idx++; } var compBytes = LZMA.Compress(mapStream.ToArray()); MemoryStream ms = new MemoryStream(); ms.WriteStringASCII(@"MD5T"); ms.WriteInt32((int)mapStream.Length); ms.Write(compBytes); ms.WriteToFile(outF); }
private void LZMA_Compress(SharedSettings s) { long s_WrittenSize = s.Dst.Length; var props = LZMA.CLzmaEncProps.LzmaEncProps_Init(); props.mLevel = s.ActualLevel; props.mDictSize = (uint)s.ActualDictSize; props.mLC = s.ActualLC; props.mLP = s.ActualLP; props.mPB = s.ActualPB; props.mAlgo = s.ActualAlgo; props.mFB = s.ActualFB; props.mBtMode = s.ActualBTMode; props.mNumHashBytes = s.ActualNumHashBytes; props.mMC = s.ActualMC; props.mWriteEndMark = s.ActualWriteEndMark; props.mNumThreads = s.ActualNumThreads; var enc = LZMA.LzmaEnc_Create(LZMA.ISzAlloc.BigAlloc); var res = enc.LzmaEnc_SetProps(props); if (res != LZMA.SZ_OK) { throw new Exception("SetProps failed: " + res); } res = enc.LzmaEnc_MemEncode(P.From(s.Dst), ref s_WrittenSize, P.From(s.Src), s.Src.Length, s.ActualWriteEndMark != 0, null, LZMA.ISzAlloc.SmallAlloc, LZMA.ISzAlloc.BigAlloc); if (res != LZMA.SZ_OK) { throw new Exception("MemEncode failed: " + res); } s.Enc = new PZ(new byte[LZMA.LZMA_PROPS_SIZE]); long s_Enc_Length = s.Enc.Length; res = enc.LzmaEnc_WriteProperties(P.From(s.Enc), ref s_Enc_Length); if (res != LZMA.SZ_OK) { throw new Exception("WriteProperties failed: " + res); } if (s.Enc.Length != s.Enc.Buffer.Length) { throw new NotSupportedException(); } enc.LzmaEnc_Destroy(LZMA.ISzAlloc.SmallAlloc, LZMA.ISzAlloc.BigAlloc); s.WrittenSize = (int)s_WrittenSize; s.Enc.Length = (int)s_Enc_Length; }
public virtual void Assemble(FlashWriter output, CompressionKind compression, Action <TagItem> callback) { output.Write(((char)compression) + "WS", true); output.Write(Version); output.Write(uint.MinValue); int fileLength = 8; FlashWriter compressor = null; switch (compression) { case CompressionKind.LZMA: { compressor = new FlashWriter((int)FileLength); break; } case CompressionKind.ZLIB: { compressor = ZLIB.WrapCompressor(output.BaseStream, true); break; } } /* Body Start */ Frame.WriteTo(compressor ?? output); fileLength += (Frame.Area.GetByteSize() + 4); for (int i = 0; i < Tags.Count; i++) { TagItem tag = Tags[i]; callback?.Invoke(tag); WriteTag(tag, compressor ?? output); fileLength += tag.Header.Length; fileLength += (tag.Header.IsLongTag ? 6 : 2); } if (compression == CompressionKind.LZMA) { byte[] uncompressedBody = ((MemoryStream)compressor.BaseStream).ToArray(); byte[] compressedBody = LZMA.Compress(uncompressedBody); output.Write(compressedBody); } compressor?.Dispose(); /* Body End */ output.Position = 4; output.Write((uint)fileLength); output.Position = output.Length; }
public static void GenerateMD5Map(MEGame game, string directory, string outName) { var allFiles = Directory.GetFiles(directory, @"*.*", SearchOption.AllDirectories);//.Take(10).ToArray(); if (game.IsLEGame()) { allFiles.RemoveAll(x => x.Contains(@"BioGame\Config")); // Do not include config files } MemoryStream mapStream = new MemoryStream(); // Name Table mapStream.WriteInt32(allFiles.Length); // Num Entries foreach (var f in allFiles) { mapStream.WriteStringASCIINull(f.Substring(directory.Length + 1)); } // Data Table mapStream.WriteInt32(allFiles.Length); int idx = 0; foreach (var f in allFiles) { mapStream.WriteInt32(idx); // Name Table IDX. Update this code for duplicates support mapStream.WriteInt32((int)new FileInfo(f).Length); // Size var md5 = Utilities.CalculateMD5(f); for (int i = 0; i < 32; i++) { byte b = 0; b |= HexToInt(md5[i]); b = (byte)(b << 4); i++; b |= HexToInt(md5[i]); mapStream.WriteByte(b); } idx++; } var compBytes = LZMA.Compress(mapStream.ToArray()); MemoryStream outStream = new MemoryStream(); outStream.WriteStringASCII(@"MD5T"); outStream.WriteInt32((int)mapStream.Length); outStream.Write(compBytes); outStream.WriteToFile($@"C:\Users\mgame\source\repos\ME3Tweaks\MassEffectModManager\MassEffectModManagerCore\modmanager\gamemd5\{outName}"); }
/// <summary> /// LZMA compression helper function. /// </summary> static public byte[] Compress(byte[] data, byte[] prefix = null) { MemoryStream stream = new MemoryStream(); stream.Write(data, 0, data.Length); stream.Position = 0; MemoryStream outStream = LZMA.Compress(stream, prefix); stream.Close(); if (outStream != null) { data = outStream.ToArray(); } outStream.Close(); return(data); }
public static string Load(string name) { string script = null; var index = name.IndexOf("(Clone)"); if (index >= 0) { name = name.Substring(0, index); } if (include_files.TryGetValue(name, out script)) { return(script); } var fullpath = string.Concat(script_path, LuaScriptRuntimePath, "/", name, ".lua"); script = LoadScript(name, fullpath); if (!string.IsNullOrEmpty(script)) { return(script); } Debug.LogError(fullpath); #if UNITY_EDITOR fullpath = string.Concat(script_path, kLuaScriptDevelopmentPath, "/", name, ".lua"); script = LoadScript(name, fullpath); if (!string.IsNullOrEmpty(script)) { return(script); } #endif var ta = Resources.Load <TextAsset>(string.Concat(kLuaScriptResourcePath, "/", name)); if (ta == null) { include_files[name] = null; return(null); } var bytes = LZMA.Decompress(ta.bytes); script = Encoding.UTF8.GetString(bytes); include_files[name] = script; return(script); }
/// <summary> /// LZMA compression helper function. /// </summary> static public byte[] Compress(byte[] data, byte[] prefix = null) { var stream = new MemoryStream(); stream.Write(data, 0, data.Length); stream.Position = 0; var outStream = LZMA.Compress(stream, prefix); stream.Close(); byte[] retVal = null; if (outStream != null) { retVal = outStream.ToArray(); } outStream.Close(); return(retVal); }
private static void initAppCenter() { #if !DEBUG if (APIKeys.HasAppCenterKey && !telemetryStarted) { Microsoft.AppCenter.Crashes.Crashes.GetErrorAttachments = (ErrorReport report) => { var attachments = new List <ErrorAttachmentLog>(); // Attach some text. string errorMessage = "ALOT Installer has crashed! This is the exception that caused the crash:\n" + report.StackTrace; MERLog.Fatal(errorMessage); Log.Error("Note that this exception may appear to occur in a follow up boot due to how appcenter works"); string log = LogCollector.CollectLatestLog(false); if (log.Length < 1024 * 1024 * 7) { attachments.Add(ErrorAttachmentLog.AttachmentWithText(log, "crashlog.txt")); } else { //Compress log var compressedLog = LZMA.CompressToLZMAFile(Encoding.UTF8.GetBytes(log)); attachments.Add(ErrorAttachmentLog.AttachmentWithBinary(compressedLog, "crashlog.txt.lzma", "application/x-lzma")); } // Attach binary data. //var fakeImage = System.Text.Encoding.Default.GetBytes("Fake image"); //ErrorAttachmentLog binaryLog = ErrorAttachmentLog.AttachmentWithBinary(fakeImage, "ic_launcher.jpeg", "image/jpeg"); return(attachments); }; AppCenter.Start(APIKeys.AppCenterKey, typeof(Analytics), typeof(Crashes)); } #else if (!APIKeys.HasAppCenterKey) { Debug.WriteLine(" >>> This build is missing an API key for AppCenter!"); } else { Debug.WriteLine("This build has an API key for AppCenter"); } #endif telemetryStarted = true; }
public static void Deploy() { var debug_path = Application.dataPath + kLuaScriptDebugPath; var resources_path = Application.dataPath + "/Resources"; var deploy_path = resources_path + kLuaScriptPath; if (Directory.Exists(deploy_path)) { DirectoryInfo deploy_di = new DirectoryInfo(deploy_path); deploy_di.Delete(true); } FileInfo[] fileinfos = null; DirectoryInfo di = new DirectoryInfo(debug_path); var debug_path_length = di.FullName.Length; if (di != null) { fileinfos = di.GetFiles("*", SearchOption.AllDirectories); } foreach (var fi in fileinfos) { var suffix = Path.GetExtension(fi.FullName); if (suffix.ToLower() != ".lua") { continue; } var filepath = fi.FullName.Substring(debug_path_length); filepath = Path.GetDirectoryName(filepath); var filename = Path.GetFileName(fi.FullName); filename = Path.GetFileNameWithoutExtension(filename); byte[] bytes = null; using (var s = new StreamReader(fi.FullName)) { var b = s.ReadToEnd(); bytes = LZMA.Compress(Encoding.UTF8.GetBytes(b)); } var path = deploy_path + "/" + filepath; DeployFile(path, filename, bytes); } }
/// <summary> /// Just here for consistency. /// </summary> public void Write(BinaryWriter writer, bool compressed = false) { if (compressed) { LZMA lzma = new LZMA(); lzma.BeginWriting().WriteObject(this); byte[] data = lzma.Compress(); if (data != null) { for (int i = 0; i < 4; ++i) { writer.Write(mLZMA[i]); } writer.Write(data); return; } } writer.WriteObject(this); }
/// <summary> /// Write the node hierarchy to the specified filename. /// </summary> public bool Write(string path, SaveType type = SaveType.Text, bool allowConfigAccess = false) { bool retVal = false; MemoryStream stream = new MemoryStream(); if (type == SaveType.Binary) { BinaryWriter writer = new BinaryWriter(stream); writer.WriteObject(this); retVal = Tools.WriteFile(path, stream, false, allowConfigAccess); writer.Close(); } else if (type == SaveType.Compressed) { BinaryWriter writer = new BinaryWriter(stream); writer.WriteObject(this); stream.Position = 0; MemoryStream comp = LZMA.Compress(stream, mLZMA); if (comp != null) { retVal = Tools.WriteFile(path, comp, false, allowConfigAccess); comp.Close(); } else { retVal = Tools.WriteFile(path, stream, false, allowConfigAccess); } writer.Close(); } else { StreamWriter writer = new StreamWriter(stream); Write(writer, 0); retVal = Tools.WriteFile(path, stream, false, allowConfigAccess); writer.Close(); } return(retVal); }
/// <summary> /// Read the node hierarchy from the specified buffer. /// </summary> static public DataNode Read(byte[] bytes, SaveType type) { if (bytes == null || bytes.Length < 4) { return(null); } if (type == SaveType.Text) { MemoryStream stream = new MemoryStream(bytes); StreamReader reader = new StreamReader(stream); DataNode node = Read(reader); reader.Close(); return(node); } else if (type == SaveType.Compressed) { bool skipPrefix = true; for (int i = 0; i < 4; ++i) { if (bytes[i] != mLZMA[i]) { skipPrefix = false; break; } } bytes = LZMA.Decompress(bytes, skipPrefix ? 4 : 0); } { MemoryStream stream = new MemoryStream(bytes); BinaryReader reader = new BinaryReader(stream); DataNode node = reader.ReadObject <DataNode>(); reader.Close(); return(node); } }
/// <summary> /// Compress an unencrypted FAES File. /// </summary> /// <param name="unencryptedFile">Unencrypted FAES File</param> /// <returns>Path of the unencrypted, compressed file</returns> public string CompressFAESFile(FAES_File unencryptedFile) { switch (_compressionMode) { case CompressionMode.LZMA: LZMA lzma = new LZMA(); Logging.Log("Compression Mode: LZMA", Severity.DEBUG); return(lzma.CompressFAESFile(unencryptedFile)); case CompressionMode.TAR: TAR tar = new TAR(); Logging.Log("Compression Mode: TAR", Severity.DEBUG); return(tar.CompressFAESFile(unencryptedFile)); case CompressionMode.LGYZIP: LegacyZIP legacyZIP = new LegacyZIP(); Logging.Log("Compression Mode: LEGACYZIP", Severity.DEBUG); return(legacyZIP.CompressFAESFile(unencryptedFile)); default: { ZIP zip; Logging.Log("Compression Mode: ZIP", Severity.DEBUG); if (_compressionLevelRaw < 0) { Logging.Log(String.Format("Compression Level: {0}", _compressionLevel), Severity.DEBUG); zip = new ZIP(_compressionLevel); } else { Logging.Log(String.Format("Compression Level: {0}", _compressionLevelRaw), Severity.DEBUG); zip = new ZIP(_compressionLevelRaw); } return(zip.CompressFAESFile(unencryptedFile)); } } }
/// <summary> /// Envia datos al servidor /// </summary> public void EnviarComando(ushort comando, string datos) { if (TcpClient.Connected == false) { return; } byte[] array = Encoding.UTF8.GetBytes(datos); bool comprimido = false; if (Comprimir) { byte[] arrayComprimido = LZMA.Compress(array); if (arrayComprimido.Length < array.Length) { comprimido = true; array = arrayComprimido; } } TcpClient.SendBufferSize = array.Length + 15; if (TcpClient != null) { NetworkStream stream = TcpClient.GetStream(); BinaryWriter escritor = new BinaryWriter(stream); escritor.Write(comprimido); escritor.Write((UInt16)comando); escritor.Write((Int32)array.Length); escritor.Write(array); escritor.Flush(); System.Windows.Forms.Application.DoEvents(); } }
private static void Save(string path, string token) { if (!KK_SaveLoadCompression.Enable.Value || !KK_SaveLoadCompression.Notice.Value) { return; } byte[] pngData; string TempPath = Path.Combine(KK_SaveLoadCompression.CacheDirectory.CreateSubdirectory("Compressed").FullName, Path.GetFileName(path)); //這裡用cleanedPath作"_compressed"字串清理 string cleanedPath = path; while (cleanedPath.Contains("_compressed")) { cleanedPath = cleanedPath.Replace("_compressed", ""); } if (cleanedPath != path) { File.Copy(path, cleanedPath, true); Logger.LogDebug($"Clean Path: {cleanedPath}"); } //Update Cache string decompressPath = Path.Combine(KK_SaveLoadCompression.CacheDirectory.CreateSubdirectory("Decompressed").FullName, Path.GetFileName(cleanedPath)); File.Copy(path, decompressPath, true); using (FileStream fileStreamReader = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { using (BinaryReader binaryReader = new BinaryReader(fileStreamReader)) { Logger.LogInfo("Start Compress"); pngData = PngFile.LoadPngBytes(binaryReader); Texture2D png = new Texture2D(2, 2); png.LoadImage(pngData); Texture2D watermark = Extension.Extension.LoadDllResource($"KK_SaveLoadCompression.Resources.zip_watermark.png"); float scaleTimes = (token == StudioToken) ? .14375f : .30423f; watermark = Extension.Extension.Scale(watermark, Convert.ToInt32(png.width * scaleTimes), Convert.ToInt32(png.width * scaleTimes)); png = Extension.Extension.OverwriteTexture( png, watermark, 0, png.height - watermark.height ); //Logger.LogDebug($"Add Watermark: zip"); pngData = png.EncodeToPNG(); } } Thread newThread = new Thread(doMain); newThread.Start(); void doMain() { bool successFlag = true; try { using (FileStream fileStreamReader = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { float startTime = Time.time; using (FileStream fileStreamWriter = new FileStream(TempPath, FileMode.Create, FileAccess.Write)) { using (BinaryWriter binaryWriter = new BinaryWriter(fileStreamWriter)) { binaryWriter.Write(pngData); switch (token) { case StudioToken: //Studio binaryWriter.Write(new Version(101, 0, 0, 0).ToString()); break; case CoordinateToken: //Coordinate binaryWriter.Write(101); break; default: //Chara if (token.IndexOf(CharaToken) >= 0) { binaryWriter.Write(101); break; } throw new Exception("Token not match."); } //為了通過 InvalidSceneFileProtection 和 DragAndDrop binaryWriter.Write(token); using (MemoryStream msCompressed = new MemoryStream()) { PngFile.SkipPng(fileStreamReader); long fileStreamPos = fileStreamReader.Position; LZMA.Compress(fileStreamReader, msCompressed, LzmaSpeed.Fastest, KK_SaveLoadCompression.DictionarySize.Value, delegate(long inSize, long _) { KK_SaveLoadCompression.Progress = $"Compressing: {Convert.ToInt32(inSize * 100 / (fileStreamReader.Length - fileStreamPos))}%"; } ); KK_SaveLoadCompression.Progress = ""; Logger.LogInfo("Start compression test..."); using (MemoryStream msDecompressed = new MemoryStream()) { if (!KK_SaveLoadCompression.SkipSaveCheck.Value) { msCompressed.Seek(0, SeekOrigin.Begin); LZMA.Decompress(msCompressed, msDecompressed, delegate(long inSize, long _) { KK_SaveLoadCompression.Progress = $"Decompressing: {Convert.ToInt32(inSize * 100 / (fileStreamReader.Length - fileStreamPos))}%"; } ); KK_SaveLoadCompression.Progress = ""; fileStreamReader.Seek(fileStreamPos, SeekOrigin.Begin); msDecompressed.Seek(0, SeekOrigin.Begin); for (int i = 0; i < msDecompressed.Length; i++) { KK_SaveLoadCompression.Progress = $"Comparing: {i * 100 / msDecompressed.Length}%"; int aByte = fileStreamReader.ReadByte(); int bByte = msDecompressed.ReadByte(); if (aByte.CompareTo(bByte) != 0) { successFlag = false; break; } } KK_SaveLoadCompression.Progress = ""; } if (successFlag) { long newSize = msCompressed.Length + token.Length + pngData.Length; binaryWriter.Write(msCompressed.ToArray()); LogLevel logLevel = KK_SaveLoadCompression.DisplayMessage.Value ? (LogLevel.Message | LogLevel.Info) : LogLevel.Info; Logger.LogInfo($"Compression test SUCCESS"); Logger.Log(logLevel, $"Compression finish in {Math.Round(Time.time - startTime, 2)} seconds"); Logger.Log(logLevel, $"Size compress from {fileStreamReader.Length} bytes to {newSize} bytes"); Logger.Log(logLevel, $"Compress ratio: {Math.Round(Convert.ToDouble(fileStreamReader.Length) / newSize, 2)}, which means it is now {Math.Round(100 / (Convert.ToDouble(fileStreamReader.Length) / newSize), 2)}% big."); } else { Logger.LogError($"Compression test FAILED"); } } } } } } //複製或刪除檔案 if (successFlag) { string compressedPath = cleanedPath; if (!KK_SaveLoadCompression.DeleteTheOri.Value) { compressedPath = cleanedPath.Substring(0, cleanedPath.Length - 4) + "_compressed.png"; } File.Copy(TempPath, compressedPath, true); Logger.LogDebug($"Write to: {compressedPath}"); //因為File.Delete()不是立即執行完畢,不能有「砍掉以後立即在同位置寫入」的操作,所以是這個邏輯順序 //如果相同的話,上方就已經覆寫了;不同的話此處再做刪除 if (path != compressedPath && path != cleanedPath) { File.Delete(path); Logger.LogDebug($"Delete Original File: {path}"); } } } catch (Exception e) { if (e is IOException && successFlag) { //覆寫時遇到讀取重整會IOException: Sharing violation on path,這在Compress太快時會發生 //Retry try { if (File.Exists(TempPath)) { if (KK_SaveLoadCompression.DeleteTheOri.Value) { File.Copy(TempPath, path, true); } } } catch (Exception) { //Copy to a new name if failed twice File.Copy(TempPath, path.Substring(0, path.Length - 4) + "_compressed2.png"); Logger.LogError("Overwrite was FAILED twice. Fallback to use the '_compressed2' path."); } } else { Logger.LogError($"An unknown error occurred. If your files are lost, please find them at %TEMP%/{KK_SaveLoadCompression.GUID}"); throw; } } finally { if (File.Exists(TempPath)) { File.Delete(TempPath); } } } }
public System.Collections.IEnumerator UnzipProcess(object obj) { float step = 0.5f / ResManager.RES_NUM; for (int i = 0; i < ResManager.RES_NUM; ++i) { ResType rt = (ResType)i; string path = RuntimeInfo.GetResStreamingPath(rt); string destPath = RuntimeInfo.GetResPersistentPath(rt); string destPathTemp = destPath + ".temp"; WWW www = new WWW(path); while (www.isDone == false) { yield return(new WaitForEndOfFrame()); } if (www.bytes == null || www.bytes.Length == 0) { m_Error = "资源解压失败:" + path; LogMgr.Log("解压失败,未找到文件:" + www.error + ", Path:" + path); yield break; } try { FileInfo fi = new FileInfo(destPathTemp); if (fi.Exists) { fi.Delete(); } FileStream fss = fi.Create(); fss.Write(www.bytes, 0, www.bytes.Length); fss.Flush(); fss.Close(); LZMA.DecompressFile(destPathTemp, destPath); if (fi.Exists) { fi.Delete(); } m_Progress += step; } catch (System.Exception e) { m_Error = "资源解压失败:" + path; ReportException.Instance.AddException("Exception:解压文件失败: " + e.ToString()); yield break; } yield return(new WaitForEndOfFrame()); try { m_ResVer[i].ResCrc = Crc.Crc32FromFile(destPathTemp); FileInfo ff = new FileInfo(destPath); m_ResVer[i].ResSize = (uint)ff.Length; m_Progress += step; } catch (System.Exception e) { m_Error = "计算CRC失败:" + path; ReportException.Instance.AddException("Exception:计算文件CRC失败: " + e.ToString()); yield break; } File.Delete(destPathTemp); yield return(new WaitForEndOfFrame()); } m_bHasVerFile = true; m_Progress = 1.0f; if (!SaveVersion()) { ReportException.Instance.AddException("Exception:保存版本文件失败:"); } yield break; }
public static Dictionary <uint, TextureMapEntry> LoadTextureMap(MEGame game) { // Read the vanilla file name table List <string> packageNames = null; { using var stream = File.OpenRead(Path.Combine(App.ExecFolder, @"TextureMap", $@"vanilla{game}.bin")); if (stream.ReadStringASCII(4) != @"MD5T") { throw new Exception(@"Header of MD5 table doesn't match expected value!"); } //Decompress var decompressedSize = stream.ReadInt32(); //var compressedSize = stream.Length - stream.Position; var compressedBuffer = stream.ReadToBuffer(stream.Length - stream.Position); var decompressedBuffer = LZMA.Decompress(compressedBuffer, (uint)decompressedSize); if (decompressedBuffer.Length != decompressedSize) { throw new Exception(@"Vanilla database failed to decompress"); } //Read MemoryStream table = new MemoryStream(decompressedBuffer); int numEntries = table.ReadInt32(); packageNames = new List <string>(numEntries); //Package names for (int i = 0; i < numEntries; i++) { //Read entry packageNames.Add(table.ReadStringASCIINull().Replace('/', '\\').TrimStart('\\')); } } var tmf = Path.Combine(App.ExecFolder, @"TextureMap", $@"vanilla{game}Map.bin"); using var fs = File.OpenRead(tmf); // read the precomputed vanilla texture map. // this map will help identify vanilla textures var magic = fs.ReadInt32(); if (magic != 0x504D5443) { throw new Exception(@"Invalid precomputed texture map! Wrong magic"); } var decompSize = fs.ReadUInt32(); byte[] compresssed = fs.ReadToBuffer(fs.ReadInt32()); var texMap = new MemoryStream(LZMA.Decompress(compresssed, decompSize)); texMap.Seek(8, SeekOrigin.Begin); // skip magic, ??? var textureCount = texMap.ReadInt32(); Dictionary <uint, TextureMapEntry> map = new Dictionary <uint, TextureMapEntry>(textureCount); for (int i = 0; i < textureCount; i++) { var entry = TextureMapEntry.ReadTextureMapEntry(texMap, game, packageNames); map[entry.CRC] = entry; } return(map); }
internal LzmaEncoder(LZMA.CLzmaEncProps encoderProps) { mSyncObject = new object(); mOutputHelper = new LZMA.CSeqOutStream(WriteOutputHelper); mInputHelper = new LZMA.CSeqInStream(ReadInputHelper); mEncoderProps = (encoderProps != null) ? encoderProps : LZMA.CLzmaEncProps.LzmaEncProps_Init(); StartThread("LZMA Stream Buffer Thread"); }
internal Lzma2Encoder(int? threadCount, LZMA.CLzma2EncProps encoderProps) { mThreadCount = threadCount; mSyncObject = new object(); mOutputHelper = new LZMA.CSeqOutStream(WriteOutputHelper); mInputHelper = new LZMA.CSeqInStream(ReadInputHelper); if (encoderProps != null) { mEncoderProps = encoderProps; } else { mEncoderProps = new LZMA.CLzma2EncProps(); mEncoderProps.Lzma2EncProps_Init(); } StartThread("LZMA 2 Stream Buffer Thread"); }
private static void Load(ref string path, string token) { string n = Path.GetFileName(path); DirectoryInfo d = KK_SaveLoadCompression.CacheDirectory.CreateSubdirectory("Decompressed"); string tmpPath = Path.Combine(d.FullName, n); if (File.Exists(tmpPath)) { path = tmpPath; Logger.LogDebug("Load from cache: " + path); return; } using (FileStream fileStreamReader = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { using (BinaryReader binaryReader = new BinaryReader(fileStreamReader)) { byte[] pngData; try { bool checkfail = false; pngData = PngFile.LoadPngBytes(binaryReader); switch (token) { case StudioToken: checkfail = !new Version(binaryReader.ReadString()).Equals(new Version(101, 0, 0, 0)); break; case CoordinateToken: case CharaToken: checkfail = 101 != binaryReader.ReadInt32(); break; } if (checkfail) { return; } } catch (Exception) { //在這裡發生讀取錯誤,那大概不是個正確的存檔 //因為已經有其它檢核的plugin存在,直接拋給他處理 Logger.Log(LogLevel.Error | LogLevel.Message, "Corrupted file: " + path); return; } try { //Discard token string binaryReader.ReadString(); Logger.LogDebug("Start Decompress..."); //KK_Fix_CharacterListOptimizations依賴檔名做比對 using (FileStream fileStreamWriter = new FileStream(tmpPath, FileMode.Create, FileAccess.Write)) { using (BinaryWriter binaryWriter = new BinaryWriter(fileStreamWriter)) { binaryWriter.Write(pngData); long fileStreamPos = fileStreamReader.Position; LZMA.Decompress(fileStreamReader, fileStreamWriter, delegate(long inSize, long _) { KK_SaveLoadCompression.Progress = $"Decompressing: {Convert.ToInt32(inSize * 100 / (fileStreamReader.Length - fileStreamPos))}%"; } ); KK_SaveLoadCompression.Progress = ""; } } path = tmpPath; Logger.LogDebug($"Decompression FINISH"); } catch (Exception) { Logger.LogError($"Decompression FAILDED. The file was damaged during compression."); Logger.LogError($"Do not disable the byte comparison setting next time to avoid this."); return; } } } }
public LzmaEncoderNode(LZMA.EncoderSettings settings) { mEncoder = new LZMA.AsyncEncoder(settings); }
public int decodeChar(LZMA.RangeCoder.Decoder decoder) { if (minContext.NumStats != 1) { State s = tempState1.Initialize(Heap); s.Address = minContext.FreqData.GetStats(); int i; int count, hiCnt; if ((count = (int) decoder.GetThreshold((uint) minContext.FreqData.SummFreq)) < (hiCnt = s.Freq)) { byte symbol; decoder.Decode(0, (uint) s.Freq); symbol = (byte) s.Symbol; minContext.update1_0(this, s.Address); nextContext(); return symbol; } prevSuccess = 0; i = minContext.NumStats - 1; do { s.IncrementAddress(); if ((hiCnt += s.Freq) > count) { byte symbol; decoder.Decode((uint) (hiCnt - s.Freq), (uint) s.Freq); symbol = (byte) s.Symbol; minContext.update1(this, s.Address); nextContext(); return symbol; } } while (--i > 0); if (count >= minContext.FreqData.SummFreq) return -2; hiBitsFlag = HB2Flag[foundState.Symbol]; decoder.Decode((uint) hiCnt, (uint) (minContext.FreqData.SummFreq - hiCnt)); for (i = 0; i < 256; i++) charMask[i] = -1; charMask[s.Symbol] = 0; i = minContext.NumStats - 1; do { s.DecrementAddress(); charMask[s.Symbol] = 0; } while (--i > 0); } else { State rs = tempState1.Initialize(Heap); rs.Address = minContext.getOneState().Address; hiBitsFlag = getHB2Flag()[foundState.Symbol]; int off1 = rs.Freq - 1; int off2 = minContext.getArrayIndex(this, rs); int bs = binSumm[off1][off2]; if (decoder.DecodeBit((uint) bs, 14) == 0) { byte symbol; binSumm[off1][off2] = (bs + INTERVAL - minContext.getMean(bs, PERIOD_BITS, 2)) & 0xFFFF; foundState.Address = rs.Address; symbol = (byte) rs.Symbol; rs.IncrementFreq((rs.Freq < 128) ? 1 : 0); prevSuccess = 1; incRunLength(1); nextContext(); return symbol; } bs = (bs - minContext.getMean(bs, PERIOD_BITS, 2)) & 0xFFFF; binSumm[off1][off2] = bs; initEsc = PPMContext.ExpEscape[Utility.URShift(bs, 10)]; int i; for (i = 0; i < 256; i++) charMask[i] = -1; charMask[rs.Symbol] = 0; prevSuccess = 0; } for (;;) { State s = tempState1.Initialize(Heap); int i; int freqSum, count, hiCnt; SEE2Context see; int num, numMasked = minContext.NumStats; do { orderFall++; minContext.Address = minContext.getSuffix(); if (minContext.Address <= subAlloc.PText || minContext.Address > subAlloc.HeapEnd) return -1; } while (minContext.NumStats == numMasked); hiCnt = 0; s.Address = minContext.FreqData.GetStats(); i = 0; num = minContext.NumStats - numMasked; do { int k = charMask[s.Symbol]; hiCnt += s.Freq & k; minContext.ps[i] = s.Address; s.IncrementAddress(); i -= k; } while (i != num); see = minContext.makeEscFreq(this, numMasked, out freqSum); freqSum += hiCnt; count = (int) decoder.GetThreshold((uint) freqSum); if (count < hiCnt) { byte symbol; State ps = tempState2.Initialize(Heap); for (hiCnt = 0, i = 0, ps.Address = minContext.ps[i]; (hiCnt += ps.Freq) <= count; i++, ps.Address = minContext.ps[i]) ; s.Address = ps.Address; decoder.Decode((uint) (hiCnt - s.Freq), (uint) s.Freq); see.update(); symbol = (byte) s.Symbol; minContext.update2(this, s.Address); updateModel(); return symbol; } if (count >= freqSum) return -2; decoder.Decode((uint) hiCnt, (uint) (freqSum - hiCnt)); see.Summ = see.Summ + freqSum; do { s.Address = minContext.ps[--i]; charMask[s.Symbol] = 0; } while (i != 0); } }
// Extract a LZMA-compressed file. private async Task HandleCompressedFile(SFileInfo fileInfo, BinaryReader binaryReader, string sectionUrl) { // Console.WriteLine($"[FILE-{fileInfo.Section} {fileInfo.FullPath}]: handling compressed file"); // A multipart file can span multiple sections, // so we have to keep track of how much has been read. // This code is kind of irritating and semi-confusing, but it works. if (binaryReader.BaseStream.Position + fileInfo.CompressedLength > binaryReader.BaseStream.Length) { // Console.WriteLine( // $" Multi-part file detected @ 0x{binaryReader.BaseStream.Position:X8} (total {fileInfo.CompressedLength}, reader has {binaryReader.BaseStream.Length - binaryReader.BaseStream.Position})"); var bytesRead = 0; var maxBytesRead = fileInfo.CompressedLength - 13; // header is 13 bytes var curSection = fileInfo.Section; var props = binaryReader.ReadBytes(5); binaryReader.BaseStream.Seek(8, SeekOrigin.Current); // Here we're reading fragments until we've got everything. // First we read as much data from the current section as we can. var bytes = new List <byte>(); var readFromMaster = (int)(binaryReader.BaseStream.Length - binaryReader.BaseStream.Position); bytes.AddRange(binaryReader.ReadBytes(readFromMaster)); bytesRead += readFromMaster; // Then, we continuously jump to the next section, until all the data is retrieved. while (bytesRead < maxBytesRead) { // Create a new section URL that gives us access to the next section. var newSectUrl = sectionUrl.Replace($"section{fileInfo.Section}", $"section{++curSection}"); // Fetch a BinaryReader for the new section. var newSectReader = await FetchSection(newSectUrl); // Calculate the remaining number of bytes to read. // This takes into account the length of the section as opposed to the total length of the file. var bytesToRead = (int)Math.Min(newSectReader.BaseStream.Length, maxBytesRead - bytes.Count); bytes.AddRange(newSectReader.ReadBytes(bytesToRead)); bytesRead += bytesToRead; } var destLen = new IntPtr(fileInfo.Length); var srcLen = new IntPtr(fileInfo.CompressedLength - 13); var inData = bytes.ToArray(); bytes.Clear(); var decompressedOutput = new byte[destLen.ToInt32()]; LZMA.LzmaUncompress(decompressedOutput, ref destLen, inData, ref srcLen, props, _propsSizePtr); using (var outStream = new FileStream(fileInfo.FullPath, FileMode.Create)) { outStream.Write(decompressedOutput, 0, decompressedOutput.Length); } decompressedOutput = null; props = null; inData = null; } else { // Thankfully there are some files that are small and don't require reading multiple sections at a time. var destLen = new IntPtr(fileInfo.Length); var srcLen = new IntPtr(fileInfo.CompressedLength - 13); var props = binaryReader.ReadBytes(5); binaryReader.BaseStream.Seek(8, SeekOrigin.Current); var bytes = new byte[fileInfo.CompressedLength]; binaryReader.Read(bytes, 0, bytes.Length); var decompressedOutput = new byte[destLen.ToInt32()]; LZMA.LzmaUncompress(decompressedOutput, ref destLen, bytes, ref srcLen, props, _propsSizePtr); using (var outStream = new FileStream(fileInfo.FullPath, FileMode.Create)) { outStream.Write(decompressedOutput, 0, decompressedOutput.Length); } bytes = null; decompressedOutput = null; props = null; } }
public LzmaEncoderSettings(LZMA.EncoderSettings settings) { mSettings = settings; }