public void WriteTest() { var writer = new ArcXWriter(); writer.AddFile(new ArcXWriterFile("dir/testfilename", () => new MemoryStream(TestData))); using (MemoryStream ms = new MemoryStream()) { writer.Write(ms, true); ms.Position = 0; ArcXContainer container = new ArcXContainer(ms); container.Files.First().GetStream(); using (Stream fs = container.Files.First().GetStream()) { Assert.AreEqual(TestData.Length, fs.Length); byte[] buffer = new byte[TestData.Length]; fs.Read(buffer, 0, TestData.Length); for (int i = 0; i < TestData.Length; i++) { if (buffer[i] != TestData[i]) { Assert.Fail("Incorrect data read from file."); } } } container.Files.First().GetStream(); } }
public void WriteMultipleChunkTest() { var writer = new ArcXWriter(new ArcXWriterSettings { TargetChunkSize = 512 }); writer.AddFile(new ArcXWriterFile("dir/testfilename", () => new MemoryStream(TestData))); writer.AddFile(new ArcXWriterFile("dir/testfilename2", () => new MemoryStream(TestData))); using (MemoryStream ms = new MemoryStream()) { writer.Write(ms, true); ms.Position = 0; ArcXContainer container = new ArcXContainer(ms); Assert.AreEqual(2, container.Chunks.Count()); } writer = new ArcXWriter(new ArcXWriterSettings { TargetChunkSize = 1024 }); writer.AddFile(new ArcXWriterFile("dir/testfilename", () => new MemoryStream(TestData))); writer.AddFile(new ArcXWriterFile("dir/testfilename2", () => new MemoryStream(TestData))); using (MemoryStream ms = new MemoryStream()) { writer.Write(ms, true); ms.Position = 0; ArcXContainer container = new ArcXContainer(ms); Assert.AreEqual(1, container.Chunks.Count()); } writer = new ArcXWriter(new ArcXWriterSettings { ChunkingEnabled = false }); writer.AddFile(new ArcXWriterFile("dir/testfilename", () => new MemoryStream(TestData))); writer.AddFile(new ArcXWriterFile("dir/testfilename2", () => new MemoryStream(TestData))); using (MemoryStream ms = new MemoryStream()) { writer.Write(ms, true); ms.Position = 0; ArcXContainer container = new ArcXContainer(ms); Assert.AreEqual(2, container.Chunks.Count()); } }
static void Compress(Dictionary <string, string> arguments, List <string> additionalArgs) { ArcXWriterSettings settings = new ArcXWriterSettings(); if (arguments.ContainsKey("c:type")) { switch (arguments["c:type"].ToLowerInvariant()) { case "lz4": settings.CompressionType = CompressionType.LZ4; break; case "zstd": default: settings.CompressionType = CompressionType.Zstd; break; case "brotli": settings.CompressionType = CompressionType.Brotli; break; case "lzham": settings.CompressionType = CompressionType.LZHAM; break; case "lzma": settings.CompressionType = CompressionType.LZMA; break; } } if (arguments.ContainsKey("c:level")) { settings.CompressionLevel = int.Parse(arguments["c:level"]); } if (arguments.ContainsKey("c:threads")) { settings.Threads = int.Parse(arguments["c:threads"]); } ArcXWriter writer = new ArcXWriter(settings); List <Tuple <string, string> > filesList = new List <Tuple <string, string> >(); foreach (string arg in additionalArgs.Skip(1)) { if (File.Exists(arg)) { if (ArcReader.Read(arg, out List <ArcXWriterFile> arcFiles)) { writer.Files.AddRange(arcFiles); } else { filesList.Add(new Tuple <string, string>(Path.GetFileName(arg), arg)); } } else if (Directory.Exists(arg)) { filesList.AddRange(Directory.EnumerateFiles(arg, "*", SearchOption.AllDirectories) .Select(x => new Tuple <string, string>(x.Substring(arg.Length), x))); } } foreach (var file in filesList) { writer.AddFile(new ArcXWriterFile(file.Item1, () => File.OpenRead(file.Item2))); } writer.Write(File.Open(additionalArgs[0], FileMode.Create)); }