private static string Check(string fn) { Console.WriteLine("Testing {0}", fn); var md5 = MD5.Create(); var buffer = new byte[0x10000]; using (var finput = File.OpenRead(fn + ".lz4")) using (var zinput = new LZ4Stream(finput, LZ4StreamMode.Decompress)) { while (true) { var length = zinput.Read(buffer, 0, buffer.Length); if (length == 0) break; md5.TransformBlock(buffer, 0, length, buffer, 0); } md5.TransformFinalBlock(buffer, 0, 0); return Convert.ToBase64String(md5.Hash); } }
public static HashSet <CKey> ReadCKeys(Stream stream) { using (var reader = new BinaryReader(stream)) { var zero = reader.ReadInt32(); IEnumerable <CKey> ckeys; if (zero == 0) { using (var lz4Stream = new LZ4Stream(stream, LZ4StreamMode.Decompress)) using (var lz4Reader = new BinaryReader(lz4Stream)) ckeys = ReadBinaryCKeys(lz4Reader); } else { stream.Position = 0; using (var streamReader = new StreamReader(stream)) ckeys = ReadTextCKeys(streamReader); } return(new HashSet <CKey>(ckeys, CASCKeyComparer.Instance)); } }
public void Write(BinaryWriter writer) { writer.Write(TestName); writer.Write(CurrentVersion); writer.Write(Frame); // This call returns the pixels without any extra stride var pixels = Image.PixelBuffer[0].GetPixels <byte>(); writer.Write(Image.PixelBuffer[0].Width); writer.Write(Image.PixelBuffer[0].Height); writer.Write((int)Image.PixelBuffer[0].Format); writer.Write(pixels.Length); // Write image data var lz4Stream = new LZ4Stream(writer.BaseStream, CompressionMode.Compress, false, pixels.Length); lz4Stream.Write(pixels, 0, pixels.Length); lz4Stream.Flush(); writer.Flush(); }
public async ValueTask <ArraySegment <byte> > DecompressAsync(ReadOnlyMemory <byte> compressedData) { Guard.AgainstEmpty(compressedData, nameof(compressedData)); using var uncompressedStream = new MemoryStream(); using (var lz4Stream = LZ4Stream.Decode(compressedData.AsStream(), _decoderSettings, false)) { await lz4Stream .CopyToAsync(uncompressedStream) .ConfigureAwait(false); } if (uncompressedStream.TryGetBuffer(out var buffer)) { return(buffer); } else { return(uncompressedStream.ToArray()); } }
public static HashSet <ulong> ReadGUIDs(Stream stream) { using (var reader = new BinaryReader(stream)) { var zero = reader.ReadInt32(); IEnumerable <ulong> guids; if (zero == 0) { using (var lz4Stream = new LZ4Stream(stream, LZ4StreamMode.Decompress)) using (var lz4Reader = new BinaryReader(lz4Stream)) guids = ReadBinaryGUIDs(lz4Reader); } else { stream.Position = 0; using (var streamReader = new StreamReader(stream)) guids = ReadTextGUIDs(streamReader); } return(new HashSet <ulong>(guids)); } }
public async ValueTask <ArraySegment <byte> > CompressAsync(ReadOnlyMemory <byte> inputData) { Guard.AgainstEmpty(inputData, nameof(inputData)); using var compressedStream = new MemoryStream(); using (var lz4Stream = LZ4Stream.Encode(compressedStream, _encoderSettings, false)) { await lz4Stream .WriteAsync(inputData) .ConfigureAwait(false); } if (compressedStream.TryGetBuffer(out var buffer)) { return(buffer); } else { return(compressedStream.ToArray()); } }
public void InteractiveReadingReturnsBytesAsSoonAsTheyAreAvailable() { var original = Tools.FindFile($".corpus/reymont"); var encoded = Path.GetTempFileName(); try { ReferenceLZ4.Encode("-1 -BD -B4", original, encoded); using (var input = LZ4Stream.Decode(File.OpenRead(encoded), Mem.M1)) { var buffer = new byte[0x80000]; Assert.Equal(5000, input.Read(buffer, 0, 5000)); Assert.Equal(0x10000 - 5000, input.Read(buffer, 0, 0x10000)); } } finally { File.Delete(encoded); } }
public void TcpClient() { var client = new TcpClient(); client.Connect("127.0.0.1", 4444); Console.WriteLine("Connected..."); using (var tcpStream = client.GetStream()) using (var lz4Stream = new LZ4Stream(tcpStream, CompressionMode.Decompress)) using (var reader = new BinaryReader(lz4Stream)) { while (true) { var file = reader.ReadString(); if (file.Length == 0) break; Console.WriteLine("client: {0}", file); var length = reader.ReadInt32(); var bytes = reader.ReadBytes(length); Assert.AreEqual(length, bytes.Length); } } }
public void Save(string fileName) { try { using (FileStream fileStream = new FileStream(fileName, FileMode.Create, FileAccess.Write, FileShare.None)) { using (BinaryWriter binaryWriter = new BinaryWriter(fileStream)) { binaryWriter.Write(this.Version); using (LZ4Stream lZ4Stream = new LZ4Stream(fileStream, LZ4StreamMode.Compress, LZ4StreamFlags.None, 1048576)) { Serializer.Serialize <WorldSerialization.WorldData>(lZ4Stream, this.world); } } } this.Checksum = this.Hash(); } catch (Exception exception) { Debug.LogError(exception.Message); } }
public void Save(string fileName) { try { using (var fileStream = new FileStream(fileName, FileMode.Create, FileAccess.Write, FileShare.None)) { using (var binaryWriter = new BinaryWriter(fileStream)) { binaryWriter.Write(Version); using (var compressionStream = new LZ4Stream(fileStream, LZ4StreamMode.Compress)) { Serializer.Serialize(compressionStream, world); } } } } catch (Exception e) { Debug.LogError(e.Message); } }
// TODO: add TExpected public static ImaginaryObject UnpackImaginaryObject(string path) { Encoding encoding = Encoding.UTF8; using (var fileStream = new FileStream(path, FileMode.Open)) #if DEBUG // BinaryReader without decompression if the program is being debugged. using (var reader = new BinaryReader(fileStream, encoding)) #else using (LZ4DecoderStream decompressionStream = LZ4Stream.Decode(fileStream)) using (BinaryReader reader = new BinaryReader(decompressionStream, encoding)) #endif { // Read the version of CrystalClear that this pack file was created in. var fileCreatedInVersion = new Version(reader.ReadString()); // Is the pack file from an older version of CrystalClear? if (fileCreatedInVersion < CrystalClearInformation.CrystalClearVersion) { // The version that this file was created in is older than the current version. Output.ErrorLog( $"This file was created in an older version of the CrystalClear Engine. {fileCreatedInVersion} (file) < {CrystalClearInformation.CrystalClearVersion} (current)"); } // Is the pack file from a newer version of CrystalClear? else if (fileCreatedInVersion > CrystalClearInformation.CrystalClearVersion) { // The version that this file was created in is newer than the current version. Output.ErrorLog( $"This file was created in a newer version of the CrystalClear Engine. {fileCreatedInVersion} (file) > {CrystalClearInformation.CrystalClearVersion} (current)"); } ImaginaryObject unpacked; unpacked = ImaginaryObject.ReadImaginaryObject(reader, out _); return(unpacked); } }
private static string Check(string fn) { Console.WriteLine("Testing {0}", fn); var md5 = MD5.Create(); var buffer = new byte[0x10000]; using (var finput = File.OpenRead(fn + ".lz4")) using (var zinput = new LZ4Stream(finput, LZ4StreamMode.Decompress)) { while (true) { var length = zinput.Read(buffer, 0, buffer.Length); if (length == 0) { break; } md5.TransformBlock(buffer, 0, length, buffer, 0); } md5.TransformFinalBlock(buffer, 0, 0); return(Convert.ToBase64String(md5.Hash)); } }
public async Task AsyncStreamProduceBinaryIdenticalOutput(string filename, int seed) { filename = Tools.FindFile(filename); var source = File.ReadAllBytes(filename); using (var memoryA = new MemoryStream()) using (var memoryB = new MemoryStream()) { using (var streamA = LZ4Stream.Encode(memoryA, LZ4Level.L00_FAST, leaveOpen: true)) using (var streamB = LZ4Stream.Encode(memoryB, LZ4Level.L00_FAST, leaveOpen: true)) { var offset = 0; var random = new Random(seed); while (true) { var chunk = Math.Min( (int)(random.NextExp(10) * 1024 * 1024) + 1, source.Length - offset); if (chunk == 0) { break; } streamA.Write(source, offset, chunk); await streamB.WriteAsync(source, offset, chunk); offset += chunk; } } var bytesA = memoryA.ToArray(); var bytesB = memoryB.ToArray(); Tools.SameBytes(bytesA, bytesB); } }
public void LzFuzzTest() { var rand = new Random(); var recordCount = rand.Next(3, 20); using (var source = new MemoryStream()) { FillStreamWithData(source, recordCount, rand); source.Seek(0, SeekOrigin.Begin); using (var un = new MemoryStream()) { using (var unzip = new LZ4Stream(source, CompressionMode.Decompress, LZ4StreamFlags.IsolateInnerStream)) { unzip.CopyTo(un); un.Seek(0, SeekOrigin.Begin); } using (var reader = new BinaryReader(un)) { for (int i = 0; i < recordCount; i++) { var data = reader.ReadInt32(); Console.WriteLine("<< {0} bytes", data); using (var wrapper = new BoundedStream(un, data)) { if (i % 3 == 1) { // do nothing, the stream should consume } else { wrapper.CopyTo(Stream.Null); } } } } } } }
/// <summary> /// Retrieve a new <c>MemoryStream</c> object with the contents unzipped and copied from the provided /// stream. The provided stream is optionally closed. /// </summary> /// <remarks>The new stream's position is set to the beginning of the stream when returned.</remarks> /// <param name="data"></param> /// <returns></returns> public MemoryStream Compress(Stream inputStream, bool leaveStreamOpen = false) { Guard.AgainstNullOrEmpty(inputStream, nameof(inputStream)); if (inputStream.Position == inputStream.Length) { inputStream.Seek(0, SeekOrigin.Begin); } var compressedStream = new MemoryStream(); using (var lz4Stream = LZ4Stream.Encode(compressedStream, _encoderSettings, true)) { inputStream.CopyTo(lz4Stream); } if (!leaveStreamOpen) { inputStream.Close(); } compressedStream.Seek(0, SeekOrigin.Begin); return(compressedStream); }
public static byte[] Compress(byte[] data) { LZ4EncoderSettings settings = new LZ4EncoderSettings(); settings.ChainBlocks = false; // settings.BlockSize = K4os.Compression.LZ4.Internal.Mem.M1; using (MemoryStream mem = new MemoryStream()) { var encodeSettings = new LZ4EncoderSettings(); using (var source = LZ4Stream.Encode(mem, settings)) { source.Write(data, 0, data.Length); var newMem = new MemoryStream(); BinaryWriter writer = new BinaryWriter(newMem); writer.Write((uint)data.Length); writer.Write(mem.ToArray()); writer.Write((uint)973407368); return(newMem.ToArray()); } } }
public void CopyTo() { var tempFileName = Path.GetTempFileName(); var builder = new StringBuilder(); for (var i = 0; i < 1000; i++) { builder.AppendLine(Utilities.LoremIpsum); } var data = Encoding.UTF8.GetBytes(builder.ToString()); using (var ostream = File.Create(tempFileName)) using (var zstream = new LZ4Stream(ostream, CompressionMode.Compress)) { zstream.Write(data, 0, data.Length); } using (var istream = File.OpenRead(tempFileName)) using (var zstream = new LZ4Stream(istream, CompressionMode.Decompress)) using (var ostream = File.Create(tempFileName + ".orig")) { zstream.CopyTo(ostream); } }
private byte[] CompressData(byte[] dataBytes) { var settingsByte = dataBytes[0]; var compressedArrayWithSettingsByte = new byte[0]; var dataBytesWithoutSettingsByte = new byte[dataBytes.Length - 1]; Array.Copy(dataBytes, 1, dataBytesWithoutSettingsByte, 0, dataBytes.Length - 1); using (var compressedStream = new MemoryStream()) { using (var lz4Stream = LZ4Stream.Encode(compressedStream)) { using (var compressedWriter = new StreamWriter(lz4Stream)) { compressedWriter.Write(Convert.ToBase64String(dataBytesWithoutSettingsByte)); } } var compressedArray = compressedStream.ToArray(); compressedArrayWithSettingsByte = new byte[compressedArray.Length + 1]; compressedArrayWithSettingsByte[0] = settingsByte; Array.Copy(compressedArray, 0, compressedArrayWithSettingsByte, 1, compressedArray.Length); } return(compressedArrayWithSettingsByte); }
public void DecodeFromSlowStream() { var original = Tools.FindFile($".corpus/reymont"); var encoded = Path.GetTempFileName(); try { ReferenceLZ4.Encode("-1 -BD -B4", original, encoded); // We need this to work even if the stream gives us only a single byte at a time using (var input = LZ4Stream.Decode(Tools.Slow(File.OpenRead(encoded)), Mem.M1)) { var buffer = new byte[0x80000]; Assert.Equal(5000, input.Read(buffer, 0, 5000)); Assert.Equal(0x10000 - 5000, input.Read(buffer, 0, 0x10000)); } } finally { File.Delete(encoded); } }
public Lmdb.ChunkDto Compress(byte[] symmetricKey) { var fileName = _stream.Name + ".lz4"; _stream.Seek(0, SeekOrigin.Begin); using (var chunkFile = File.Create(fileName)) { var flags = LZ4StreamFlags.HighCompression | LZ4StreamFlags.IsolateInnerStream; using (var aes = new AesCryptoServiceProvider()) { aes.Key = symmetricKey; aes.Mode = CipherMode.CBC; aes.Padding = PaddingMode.PKCS7; aes.GenerateIV(); var iv = aes.IV; chunkFile.Write(iv, 0, iv.Length); using (var enc = aes.CreateEncryptor()) using (var crypto = new CryptoStream(chunkFile, enc, CryptoStreamMode.Write)) using (var lz4 = new LZ4Stream(crypto, LZ4StreamMode.Compress, flags, 1048576 * 4)) { Streams.Copy(_stream, lz4, (int)_pos); } } } var size = new FileInfo(fileName).Length; return(Lmdb.NewChunkDto() .SetChunkFileName(fileName) .SetUncompressedByteSize(_pos) .SetChunkRecords(_records) .SetCompressedDiskSize((int)size) .SetChunkStartPos(_startPos)); }
public void Read(BinaryReader reader) { TestName = reader.ReadString(); CurrentVersion = reader.ReadString(); Frame = reader.ReadString(); // Read image header var width = reader.ReadInt32(); var height = reader.ReadInt32(); var format = (PixelFormat)reader.ReadInt32(); var textureSize = reader.ReadInt32(); // Read image data var imageData = new byte[textureSize]; using (var lz4Stream = new LZ4Stream(reader.BaseStream, CompressionMode.Decompress, false, textureSize)) { if (lz4Stream.Read(imageData, 0, textureSize) != textureSize) { throw new EndOfStreamException("Unexpected end of stream"); } } var pinnedImageData = GCHandle.Alloc(imageData, GCHandleType.Pinned); var description = new ImageDescription { Dimension = TextureDimension.Texture2D, Width = width, Height = height, ArraySize = 1, Depth = 1, Format = format, MipLevels = 1, }; Image = Image.New(description, pinnedImageData.AddrOfPinnedObject(), 0, pinnedImageData, false); }
static void Main(string[] args) { var filename = args[0]; using (var fileStream = File.OpenRead(filename)) using (var stream = new LZ4Stream(fileStream, LZ4StreamMode.Decompress)) using (var reader = new StreamReader(stream)) { var text = reader.ReadToEnd(); dynamic replay = JsonConvert.DeserializeObject(text); var nodes = (JArray)replay; var header = nodes.First(t => t.Value <string>("__type") == "BBI.Game.Replay.ReplayHelpers+ReplayableGameSessionHeader"); var frames = nodes.Where(t => t.Value <string>("__type") == "BBI.Game.Replay.ReplayableGameSession+FrameData"); var players = header["SessionPlayers"].ToList(); foreach (var player in players) { header["LocalPlayerID"] = player["PlayerID"]; var origPlayerName = player["PlayerName"]; player["PlayerName"] = JToken.FromObject(origPlayerName.Value <string>() + " (H)"); var teams = players.GroupBy(p => p["TeamID"].Value <int>()); var playersString = string.Join(" vs ", teams.Select(t => string.Join(", ", t.Select(p => formatUsername(p["PlayerName"].Value <string>()))))); var newFilename = string.Format("{2:yyyy-MM-dd (HH-mm)} {0} - {1}.dokreplay", playersString, readableMapName(header["SceneName"].Value <string>()), header["SaveTime"].Value <DateTime>()); saveReplay(replay, Path.Combine(Path.GetDirectoryName(filename), newFilename)); player["PlayerName"] = origPlayerName; } } }
public void LengthAndPositionInStream(string filename, string options, int chunkSize) { var original = Tools.FindFile($".corpus/{filename}"); var expectedLength = new FileInfo(original).Length; var expectedPosition = 0L; var encoded = Path.GetTempFileName(); try { ReferenceLZ4.Encode(options, original, encoded); using (var stream = LZ4Stream.Decode(File.OpenRead(encoded))) { var random = new Random(0); Assert.Equal(expectedLength, stream.Length); var buffer = new byte[chunkSize]; while (true) { var read = stream.Read(buffer, 0, random.Next(1, chunkSize)); if (read == 0) { break; } expectedPosition += read; Assert.Equal(expectedPosition, stream.Position); } Assert.Equal(expectedLength, stream.Position); } } finally { File.Delete(encoded); } }
public void TcpServer(int port) { var listener = new TcpListener(IPAddress.Any, port); listener.Start(); try { Console.WriteLine("Waiting for client..."); var client = listener.AcceptTcpClient(); using (var tcpStream = client.GetStream()) using (var lz4Stream = new LZ4Stream(tcpStream, CompressionMode.Compress, blockSize: 128 * 1024)) using (var writer = new BinaryWriter(lz4Stream)) { foreach (var file in Directory.GetFiles(Utilities.GetSilesiaCorpusFolder(), "*", SearchOption.AllDirectories)) { Console.WriteLine("server: {0}", file); writer.Write(file); var bytes = File.ReadAllBytes(file); writer.Write(bytes.Length); writer.Write(bytes); Thread.Sleep(500); // pause to force client to wait } writer.Write(string.Empty); } } catch (Exception e) { Console.WriteLine("{0}: {1}", e.GetType().Name, e.Message); } finally { listener.Stop(); } }
/// <summary> /// Decompresses <paramref name="input"/> to <paramref name="output"/> with progress reports. /// </summary> /// <param name="input">The <see cref="Stream"/> with the data to decompress.</param> /// <param name="length">The length of the data in the <paramref name="input"/>.</param> /// <param name="output">The <see cref="Stream"/> to decompress to.</param> /// <param name="progressReport">Invokes at interval, based on <paramref name="progressInterval"/>, the amount of bytes written.</param> /// <param name="progressInterval">The interval at which to invoke <paramref name="progressReport"/>.</param> public static void DecompressLZ4(Stream input, uint length, Stream output, Action <ulong> progressReport, long progressInterval = DefaultProgressInterval) { int count; ulong written = 0; byte[] buffer = new byte[BufferSize]; byte[] data = new byte[length]; input.Read(data, 0, data.Length); using (var ms = new MemoryStream(data, false)) using (var lz4Stream = LZ4Stream.CreateDecompressor(ms, LZ4StreamMode.Read)) { var sw = Stopwatch.StartNew(); while ((count = lz4Stream.Read(buffer, 0, buffer.Length)) > 0) { output.Write(buffer, 0, count); written += (ulong)count; if (sw.ElapsedMilliseconds >= progressInterval) { progressReport?.Invoke(written); sw.Restart(); } } sw.Stop(); } progressReport?.Invoke(written); }
static void TestLz4(string inputPath) { string lz4Path = inputPath + ".lz4"; string outPath = inputPath + ".out"; using (Stream source = File.OpenRead(inputPath)) using (Stream target = LZ4Stream.Encode(File.Create(lz4Path), LZ4Level.L09_HC)) { source.CopyTo(target); } using (Stream source = LZ4Stream.Decode(File.OpenRead(lz4Path))) using (Stream target = File.Create(outPath)) { source.CopyTo(target); } bool success = AreFilesIdentical(inputPath, outPath); if (success) { Console.WriteLine($"LZ4 roundtripped file was identical to original file."); } }
public void WritingFileByteByByteYieldsSameResults(string filename) { var original = Tools.FindFile($".corpus/{filename}"); var encoded = Path.GetTempFileName(); var decoded = Path.GetTempFileName(); try { using (var reader = File.OpenRead(original)) using (var encoder = LZ4Stream.Encode(File.OpenWrite(encoded))) { var buffer = new byte[0x10000]; while (true) { var read = reader.Read(buffer, 0, buffer.Length); if (read == 0) { break; } for (var i = 0; i < read; i++) { encoder.WriteByte(buffer[i]); } } } ReferenceLZ4.Decode(encoded, decoded); Tools.SameFiles(original, decoded); } finally { File.Delete(encoded); File.Delete(decoded); } }
public void CopyTo() { var tempFileName = Path.GetTempFileName(); var builder = new StringBuilder(); for (var i = 0; i < 1000; i++) { builder.AppendLine(Utilities.LoremIpsum); } var data = Encoding.UTF8.GetBytes(builder.ToString()); using (var ostream = File.Create(tempFileName)) using (var zstream = new LZ4Stream(ostream, CompressionMode.Compress)) { zstream.Write(data, 0, data.Length); } using (var istream = File.OpenRead(tempFileName)) using (var zstream = new LZ4Stream(istream, CompressionMode.Decompress, LZ4StreamFlags.InteractiveRead)) using (var ostream = File.Create(tempFileName + ".orig")) { zstream.CopyTo(ostream); } }
public static void PackImaginaryObjectToFile(string path, ImaginaryObject toStore) { Encoding encoding = Encoding.UTF8; Utilities.CreateEmptyFile(path); using (var fileStream = new FileStream(path, FileMode.Create)) #if DEBUG using (var writer = new BinaryWriter(fileStream, encoding)) #else using (LZ4EncoderStream compressionStream = LZ4Stream.Encode(fileStream)) using (BinaryWriter writer = new BinaryWriter(compressionStream, encoding)) #endif { // Write the current CrystalClear version to the file. writer.Write(CrystalClearInformation.CrystalClearVersion.ToString()); // Write the constructor data. toStore.WriteThis(writer); // Save the file. writer.Flush(); } }
private Dictionary <string, FPakEntry> GetOldFiles(EPakLoader mode) { var diff = new Dictionary <string, FPakEntry>(); var ofd = new OpenFileDialog() { Title = Properties.Resources.SelectFile, InitialDirectory = Properties.Settings.Default.OutputPath + "\\Backups\\", Filter = Properties.Resources.FbkpFilter, Multiselect = false }; if ((bool)ofd.ShowDialog()) { string n = Path.GetFileName(ofd.FileName); StatusBarVm.statusBarViewModel.Set(string.Format(Properties.Resources.Analyzing, n), Properties.Resources.Processing); DebugHelper.WriteLine("{0} {1} {2} {3}", "[FModel]", "[PakMenuItemViewModel]", "[Loader]", $"Backup file is {n}"); var oldFilesTemp = new Dictionary <string, FPakEntry>(); using FileStream fileStream = new FileStream(ofd.FileName, FileMode.Open); BinaryReader checkReader = new BinaryReader(fileStream); bool isLz4 = checkReader.ReadUInt32() == 0x184D2204u; fileStream.Seek(0, SeekOrigin.Begin); var target = new MemoryStream(); if (isLz4) { using LZ4DecoderStream compressionStream = LZ4Stream.Decode(fileStream); compressionStream.CopyTo(target); } else { fileStream.CopyTo(target); } using (target) { target.Position = 0; using BinaryReader reader = new BinaryReader(target); while (reader.BaseStream.Position < reader.BaseStream.Length) { // we must follow this order long offset = reader.ReadInt64(); long size = reader.ReadInt64(); long uncompressedSize = reader.ReadInt64(); bool encrypted = reader.ReadBoolean(); long structSize = reader.ReadInt32(); string name = reader.ReadString(); int compressionMethodIndex = reader.ReadInt32(); // we only need name and uncompressedSize to compare FPakEntry entry = new FPakEntry("CatsWillDominateTheWorld.pak", name, offset, size, uncompressedSize, new byte[20], null, 0, (uint)compressionMethodIndex, 0); oldFilesTemp[entry.Name] = entry; } } var newFiles = new Dictionary <string, FPakEntry>(); foreach (var fileReader in Globals.CachedPakFiles) { foreach (var files in fileReader.Value) { newFiles.Add(files.Key, files.Value); } } Paks.Merge(oldFilesTemp, out var oldFiles, string.Empty); switch (mode) { case EPakLoader.New: foreach (var kvp in newFiles) { if (!oldFiles.TryGetValue(kvp.Key, out var entry)) { diff.Add(kvp.Key, kvp.Value); } } break; case EPakLoader.Modified: foreach (var kvp in newFiles) { if (oldFiles.TryGetValue(kvp.Key, out var entry)) { if (entry.UncompressedSize != kvp.Value.UncompressedSize) { diff.Add(kvp.Key, kvp.Value); } } } break; case EPakLoader.NewModified: foreach (var kvp in newFiles) { if (oldFiles.TryGetValue(kvp.Key, out var entry)) { if (entry.UncompressedSize != kvp.Value.UncompressedSize) { diff.Add(kvp.Key, kvp.Value); } } else { diff.Add(kvp.Key, kvp.Value); } } break; } var deleted = oldFiles.Where(kvp => !newFiles.TryGetValue(kvp.Key, out var _) && kvp.Key.StartsWith("/FortniteGame/Content/Athena/Items/Cosmetics/")).ToDictionary(x => x.Key, x => x.Value); if (deleted.Count > 0) { FConsole.AppendText(Properties.Resources.RemovedRenamedCosmetics, FColors.Red, true); foreach (var kvp in deleted) { FConsole.AppendText($" - {kvp.Value.Name.Substring(1)}", FColors.LightGray, true); } } } return(diff); }
// ReSharper disable InconsistentNaming private static void DoAction(Action<byte[], Stream> action, bool read) { var provider = new BlockDataProvider(Utilities.GetSilesiaCorpusFolder()); var r = new Random(0); Console.WriteLine("Architecture: {0}bit", IntPtr.Size*8); Console.WriteLine("CodecName: {0}", LZ4Codec.CodecName); var fileName = Path.Combine(Path.GetTempPath(), "BlockCompressionStream.dat"); using (var stream = new LZ4Stream( read ? File.OpenRead(fileName) : File.Create(fileName), read ? CompressionMode.Decompress : CompressionMode.Compress, true)) { var total = 0; const long limit = TOTAL_SIZE; var last_pct = 0; while (total < limit) { var length = Utilities.RandomLength(r, CHUNK_SIZE); var block = provider.GetBytes(length); action(block, stream); total += block.Length; var pct = (int)((double)total*100/limit); if (pct > last_pct) { Console.WriteLine("{0}%...", pct); last_pct = pct; } } } }
public void WriteData(Stream outputStream, byte[] data) { using (var lz4Stream = new LZ4Stream(outputStream, LZ4StreamMode.Compress)) lz4Stream.Write(data, 0, data.Length); }
public void TcpServer() { var listener = new TcpListener(IPAddress.Any, 4444); listener.Start(); try { Console.WriteLine("Waiting for client..."); var client = listener.AcceptTcpClient(); using (var tcpStream = client.GetStream()) using (var lz4Stream = new LZ4Stream(tcpStream, CompressionMode.Compress, blockSize: 128*1024)) using (var writer = new BinaryWriter(lz4Stream)) { foreach (var file in Directory.GetFiles(Utilities.GetSilesiaCorpusFolder(), "*", SearchOption.AllDirectories)) { Console.WriteLine("server: {0}", file); writer.Write(file); var bytes = File.ReadAllBytes(file); writer.Write(bytes.Length); writer.Write(bytes); Thread.Sleep(500); // pause to force client to wait } writer.Write(string.Empty); } } catch (Exception e) { Console.WriteLine("{0}: {1}", e.GetType().Name, e.Message); } finally { listener.Stop(); } }
public static void CreateBundle(string bundleUrl, IOdbBackend backend, ObjectId[] objectIds, ISet <ObjectId> disableCompressionIds, Dictionary <string, ObjectId> indexMap, IList <string> dependencies, bool useIncrementalBundle) { if (objectIds.Length == 0) { throw new InvalidOperationException("Nothing to pack."); } var objectsToIndex = new Dictionary <ObjectId, int>(objectIds.Length); var objects = new List <KeyValuePair <ObjectId, ObjectInfo> >(); for (int i = 0; i < objectIds.Length; ++i) { objectsToIndex.Add(objectIds[i], objects.Count); objects.Add(new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], new ObjectInfo())); } var incrementalBundles = new List <ObjectId>(); // If there is a .bundle, add incremental id before it var bundleExtensionLength = (bundleUrl.EndsWith(BundleExtension) ? BundleExtension.Length : 0); // Early exit if package didn't change (header-check only) if (VirtualFileSystem.FileExists(bundleUrl)) { try { using (var packStream = VirtualFileSystem.OpenStream(bundleUrl, VirtualFileMode.Open, VirtualFileAccess.Read)) { var bundle = ReadBundleDescription(packStream); // If package didn't change since last time, early exit! if (ArrayExtensions.ArraysEqual(bundle.Dependencies, dependencies) && ArrayExtensions.ArraysEqual(bundle.Assets.OrderBy(x => x.Key).ToList(), indexMap.OrderBy(x => x.Key).ToList()) && ArrayExtensions.ArraysEqual(bundle.Objects.Select(x => x.Key).OrderBy(x => x).ToList(), objectIds.OrderBy(x => x).ToList())) { // Make sure all incremental bundles exist // Also, if we don't want incremental bundles but we have some (or vice-versa), let's force a regeneration if ((useIncrementalBundle == (bundle.IncrementalBundles.Count > 0)) && bundle.IncrementalBundles.Select(x => bundleUrl.Insert(bundleUrl.Length - bundleExtensionLength, "." + x)).All(x => { if (!VirtualFileSystem.FileExists(x)) { return(false); } using (var incrementalStream = VirtualFileSystem.OpenStream(x, VirtualFileMode.Open, VirtualFileAccess.Read)) return(ValidateHeader(incrementalStream)); })) { return; } } } // Process existing incremental bundles one by one // Try to find if there is enough to reuse in each of them var filename = VirtualFileSystem.GetFileName(bundleUrl); var directory = VirtualFileSystem.GetParentFolder(bundleUrl); foreach (var incrementalBundleUrl in VirtualFileSystem.ListFiles(directory, filename.Insert(filename.Length - bundleExtensionLength, ".*"), VirtualSearchOption.TopDirectoryOnly).Result) { var incrementalIdString = incrementalBundleUrl.Substring(incrementalBundleUrl.Length - bundleExtensionLength - ObjectId.HashStringLength, ObjectId.HashStringLength); ObjectId incrementalId; if (!ObjectId.TryParse(incrementalIdString, out incrementalId)) { continue; } // If we don't want incremental bundles, delete old ones from previous build if (!useIncrementalBundle) { VirtualFileSystem.FileDelete(incrementalBundleUrl); continue; } long sizeNeededItems = 0; long sizeTotal = 0; BundleDescription incrementalBundle; try { using (var packStream = VirtualFileSystem.OpenStream(incrementalBundleUrl, VirtualFileMode.Open, VirtualFileAccess.Read)) { incrementalBundle = ReadBundleDescription(packStream); } // Compute size of objects (needed ones and everything) foreach (var @object in incrementalBundle.Objects) { var objectCompressedSize = @object.Value.EndOffset - @object.Value.StartOffset; // TODO: Detect object that are stored without ObjectId being content hash: we need to check actual content hash is same in this case if (objectsToIndex.ContainsKey(@object.Key)) { sizeNeededItems += objectCompressedSize; } sizeTotal += objectCompressedSize; } // Check if we would reuse at least 50% of the incremental bundle, otherwise let's just get rid of it var reuseRatio = (float)((double)sizeNeededItems / (double)sizeTotal); if (reuseRatio < 0.5f) { VirtualFileSystem.FileDelete(incrementalBundleUrl); } else { // We will reuse this incremental bundle // Let's add ObjectId entries foreach (var @object in incrementalBundle.Objects) { int objectIndex; if (objectsToIndex.TryGetValue(@object.Key, out objectIndex)) { var objectInfo = @object.Value; objectInfo.IncrementalBundleIndex = incrementalBundles.Count + 1; objects[objectIndex] = new KeyValuePair <ObjectId, ObjectInfo>(@object.Key, objectInfo); } } // Add this incremental bundle in the list incrementalBundles.Add(incrementalId); } } catch (Exception) { // Could not read incremental bundle (format changed?) // Let's delete it VirtualFileSystem.FileDelete(incrementalBundleUrl); } } } catch (Exception) { // Could not read previous bundle (format changed?) // Let's just mute this error as new bundle will overwrite it anyway } } // Count objects which needs to be saved var incrementalObjects = new List <KeyValuePair <ObjectId, ObjectInfo> >(); if (useIncrementalBundle) { for (int i = 0; i < objectIds.Length; ++i) { // Skip if already part of an existing incremental package if (objects[i].Value.IncrementalBundleIndex > 0) { continue; } incrementalObjects.Add(new KeyValuePair <ObjectId, ObjectInfo>(objects[i].Key, new ObjectInfo())); } } // Create an incremental package var newIncrementalId = ObjectId.New(); var incrementalBundleIndex = incrementalBundles.Count; if (useIncrementalBundle && incrementalObjects.Count > 0) { incrementalBundles.Add(newIncrementalId); } using (var packStream = VirtualFileSystem.OpenStream(bundleUrl, VirtualFileMode.Create, VirtualFileAccess.Write)) { var header = new Header(); header.MagicHeader = Header.MagicHeaderValid; var packBinaryWriter = new BinarySerializationWriter(packStream); packBinaryWriter.Write(header); // Write dependencies packBinaryWriter.Write(dependencies.ToList()); // Write inecremental bundles packBinaryWriter.Write(incrementalBundles.ToList()); // Save location of object ids var packObjectIdPosition = packStream.Position; // Write empty object ids (reserve space, will be rewritten later) packBinaryWriter.Write(objects); // Write index packBinaryWriter.Write(indexMap.ToList()); using (var incrementalStream = incrementalObjects.Count > 0 ? VirtualFileSystem.OpenStream(bundleUrl.Insert(bundleUrl.Length - bundleExtensionLength, "." + newIncrementalId), VirtualFileMode.Create, VirtualFileAccess.Write) : null) { var incrementalBinaryWriter = incrementalStream != null ? new BinarySerializationWriter(incrementalStream) : null; long incrementalObjectIdPosition = 0; if (incrementalStream != null) { incrementalBinaryWriter.Write(header); // Write dependencies incrementalBinaryWriter.Write(new List <string>()); // Write inecremental bundles incrementalBinaryWriter.Write(new List <ObjectId>()); // Save location of object ids incrementalObjectIdPosition = incrementalStream.Position; // Write empty object ids (reserve space, will be rewritten later) incrementalBinaryWriter.Write(incrementalObjects); // Write index incrementalBinaryWriter.Write(new List <KeyValuePair <string, ObjectId> >()); } var objectOutputStream = incrementalStream ?? packStream; int incrementalObjectIndex = 0; for (int i = 0; i < objectIds.Length; ++i) { // Skip if already part of an existing incremental package if (objects[i].Value.IncrementalBundleIndex > 0) { continue; } using (var objectStream = backend.OpenStream(objectIds[i])) { // Prepare object info var objectInfo = new ObjectInfo { StartOffset = objectOutputStream.Position, SizeNotCompressed = objectStream.Length }; // re-order the file content so that it is not necessary to seek while reading the input stream (header/object/refs -> header/refs/object) var inputStream = objectStream; var originalStreamLength = objectStream.Length; var streamReader = new BinarySerializationReader(inputStream); var chunkHeader = ChunkHeader.Read(streamReader); if (chunkHeader != null) { // create the reordered stream var reorderedStream = new MemoryStream((int)originalStreamLength); // copy the header var streamWriter = new BinarySerializationWriter(reorderedStream); chunkHeader.Write(streamWriter); // copy the references var newOffsetReferences = reorderedStream.Position; inputStream.Position = chunkHeader.OffsetToReferences; inputStream.CopyTo(reorderedStream); // copy the object var newOffsetObject = reorderedStream.Position; inputStream.Position = chunkHeader.OffsetToObject; inputStream.CopyTo(reorderedStream, chunkHeader.OffsetToReferences - chunkHeader.OffsetToObject); // rewrite the chunk header with correct offsets chunkHeader.OffsetToObject = (int)newOffsetObject; chunkHeader.OffsetToReferences = (int)newOffsetReferences; reorderedStream.Position = 0; chunkHeader.Write(streamWriter); // change the input stream to use reordered stream inputStream = reorderedStream; inputStream.Position = 0; } // compress the stream if (!disableCompressionIds.Contains(objectIds[i])) { objectInfo.IsCompressed = true; var lz4OutputStream = new LZ4Stream(objectOutputStream, CompressionMode.Compress); inputStream.CopyTo(lz4OutputStream); lz4OutputStream.Flush(); } // copy the stream "as is" else { // Write stream inputStream.CopyTo(objectOutputStream); } // release the reordered created stream if (chunkHeader != null) { inputStream.Dispose(); } // Add updated object info objectInfo.EndOffset = objectOutputStream.Position; // Note: we add 1 because 0 is reserved for self; first incremental bundle starts at 1 objectInfo.IncrementalBundleIndex = objectOutputStream == incrementalStream ? incrementalBundleIndex + 1 : 0; objects[i] = new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], objectInfo); if (useIncrementalBundle) { // Also update incremental bundle object info objectInfo.IncrementalBundleIndex = 0; // stored in same bundle incrementalObjects[incrementalObjectIndex++] = new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], objectInfo); } } } // First finish to write incremental package so that main one can't be valid on the HDD without the incremental one being too if (incrementalStream != null) { // Rewrite headers header.Size = incrementalStream.Length; incrementalStream.Position = 0; incrementalBinaryWriter.Write(header); // Rewrite object with updated offsets/size incrementalStream.Position = incrementalObjectIdPosition; incrementalBinaryWriter.Write(incrementalObjects); } } // Rewrite headers header.Size = packStream.Length; packStream.Position = 0; packBinaryWriter.Write(header); // Rewrite object with updated offsets/size packStream.Position = packObjectIdPosition; packBinaryWriter.Write(objects); } }
public static void CreateBundle(string vfsUrl, IOdbBackend backend, ObjectId[] objectIds, ISet <ObjectId> disableCompressionIds, Dictionary <string, ObjectId> indexMap, IList <string> dependencies) { if (objectIds.Length == 0) { throw new InvalidOperationException("Nothing to pack."); } // Early exit if package didn't change (header-check only) if (VirtualFileSystem.FileExists(vfsUrl)) { try { using (var packStream = VirtualFileSystem.OpenStream(vfsUrl, VirtualFileMode.Open, VirtualFileAccess.Read)) { var bundle = ReadBundleDescription(packStream); // If package didn't change since last time, early exit! if (ArrayExtensions.ArraysEqual(bundle.Dependencies, dependencies) && ArrayExtensions.ArraysEqual(bundle.Assets.OrderBy(x => x.Key).ToList(), indexMap.OrderBy(x => x.Key).ToList()) && ArrayExtensions.ArraysEqual(bundle.Objects.Select(x => x.Key).OrderBy(x => x).ToList(), objectIds.OrderBy(x => x).ToList())) { return; } } } catch (Exception) { // Could not read previous bundle (format changed?) // Let's just mute this error as new bundle will overwrite it anyway } } using (var packStream = VirtualFileSystem.OpenStream(vfsUrl, VirtualFileMode.Create, VirtualFileAccess.Write)) { var header = new Header(); header.MagicHeader = Header.MagicHeaderValid; var binaryWriter = new BinarySerializationWriter(packStream); binaryWriter.Write(header); // Write dependencies binaryWriter.Write(dependencies.ToList()); // Save location of object ids var objectIdPosition = packStream.Position; // Write empty object ids (reserve space, will be rewritten later) var objects = new List <KeyValuePair <ObjectId, ObjectInfo> >(); for (int i = 0; i < objectIds.Length; ++i) { objects.Add(new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], new ObjectInfo())); } binaryWriter.Write(objects); objects.Clear(); // Write index binaryWriter.Write(indexMap.ToList()); for (int i = 0; i < objectIds.Length; ++i) { using (var objectStream = backend.OpenStream(objectIds[i])) { // Prepare object info var objectInfo = new ObjectInfo { StartOffset = packStream.Position, SizeNotCompressed = objectStream.Length }; // re-order the file content so that it is not necessary to seek while reading the input stream (header/object/refs -> header/refs/object) var inputStream = objectStream; var originalStreamLength = objectStream.Length; var streamReader = new BinarySerializationReader(inputStream); var chunkHeader = ChunkHeader.Read(streamReader); if (chunkHeader != null) { // create the reordered stream var reorderedStream = new MemoryStream((int)originalStreamLength); // copy the header var streamWriter = new BinarySerializationWriter(reorderedStream); chunkHeader.Write(streamWriter); // copy the references var newOffsetReferences = reorderedStream.Position; inputStream.Position = chunkHeader.OffsetToReferences; inputStream.CopyTo(reorderedStream); // copy the object var newOffsetObject = reorderedStream.Position; inputStream.Position = chunkHeader.OffsetToObject; inputStream.CopyTo(reorderedStream, chunkHeader.OffsetToReferences - chunkHeader.OffsetToObject); // rewrite the chunk header with correct offsets chunkHeader.OffsetToObject = (int)newOffsetObject; chunkHeader.OffsetToReferences = (int)newOffsetReferences; reorderedStream.Position = 0; chunkHeader.Write(streamWriter); // change the input stream to use reordered stream inputStream = reorderedStream; inputStream.Position = 0; } // compress the stream if (!disableCompressionIds.Contains(objectIds[i])) { objectInfo.IsCompressed = true; var lz4OutputStream = new LZ4Stream(packStream, CompressionMode.Compress); inputStream.CopyTo(lz4OutputStream); lz4OutputStream.Flush(); } else // copy the stream "as is" { // Write stream inputStream.CopyTo(packStream); } // release the reordered created stream if (chunkHeader != null) { inputStream.Dispose(); } // Add updated object info objectInfo.EndOffset = packStream.Position; objects.Add(new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], objectInfo)); } } // Rewrite header header.Size = packStream.Length; packStream.Position = 0; binaryWriter.Write(header); // Rewrite object locations packStream.Position = objectIdPosition; binaryWriter.Write(objects); } }