/// <summary> /// This is the "real" main. The class main() instantiates a tar object /// for the application and then calls this method. Process the arguments /// and perform the requested operation. /// </summary> public void InstanceMain(string[] argv) { TarArchive archive = null; int argIdx = this.ProcessArguments(argv); if (this.archiveName != null && !this.archiveName.Equals("-")) { if (operation == Operation.Create) { string dirName = Path.GetDirectoryName(archiveName); if ((dirName.Length > 0) && !Directory.Exists(dirName)) { Console.Error.WriteLine("Directory for archive doesnt exist"); return; } } else { if (File.Exists(this.archiveName) == false) { Console.Error.WriteLine("File does not exist " + this.archiveName); return; } } } if (operation == Operation.Create) { // WRITING Stream outStream = Console.OpenStandardOutput(); if (this.archiveName != null && !this.archiveName.Equals("-")) { outStream = File.Create(archiveName); } if (outStream != null) { switch (this.compression) { case Compression.Compress: outStream = new DeflaterOutputStream(outStream); break; case Compression.Gzip: outStream = new GZipOutputStream(outStream); break; case Compression.Bzip2: outStream = new BZip2OutputStream(outStream, 9); break; } archive = TarArchive.CreateOutputTarArchive(outStream, this.blockingFactor); } } else { // EXTRACTING OR LISTING Stream inStream = Console.OpenStandardInput(); if (this.archiveName != null && !this.archiveName.Equals("-")) { inStream = File.OpenRead(archiveName); } if (inStream != null) { switch (this.compression) { case Compression.Compress: inStream = new InflaterInputStream(inStream); break; case Compression.Gzip: inStream = new GZipInputStream(inStream); break; case Compression.Bzip2: inStream = new BZip2InputStream(inStream); break; } archive = TarArchive.CreateInputTarArchive(inStream, this.blockingFactor); } } if (archive != null) { // SET ARCHIVE OPTIONS archive.SetKeepOldFiles(this.keepOldFiles); archive.AsciiTranslate = this.asciiTranslate; archive.SetUserInfo(this.userId, this.userName, this.groupId, this.groupName); } if (archive == null) { Console.Error.WriteLine("no processing due to errors"); } else if (operation == Operation.Create) { // WRITING if (verbose) { archive.ProgressMessageEvent += new ProgressMessageHandler(ShowTarProgressMessage); } for (; argIdx < argv.Length; ++argIdx) { string[] fileNames = GetFilesForSpec(argv[argIdx]); if (fileNames.Length > 0) { foreach (string name in fileNames) { TarEntry entry = TarEntry.CreateEntryFromFile(name); archive.WriteEntry(entry, true); } } else { Console.Error.Write("No files for " + argv[argIdx]); } } } else if (operation == Operation.List) { // LISTING archive.ProgressMessageEvent += new ProgressMessageHandler(ShowTarProgressMessage); archive.ListContents(); } else { // EXTRACTING string userDir = Environment.CurrentDirectory; if (verbose) { archive.ProgressMessageEvent += new ProgressMessageHandler(ShowTarProgressMessage); } if (userDir != null) { archive.ExtractContents(userDir); } } if (archive != null) { // CLOSE ARCHIVE archive.Close(); } }
public DotNetZipBZip2OutputStream(Stream inputStream) { zipStream = new BZip2OutputStream(inputStream); }
public override void SetBaseStream(Stream baseStream) { zipStream = new BZip2OutputStream(baseStream); }
public bool RewriteArchive() { DataBuffer data_buffer = new DataBuffer(new byte[8192]); for (int i = 0; i < FileCount; i++) { if (FileBuffers[i] == null) { ExtractFile(i); } PackFile(ref data_buffer, i, FileBuffers[i], !ArchiveDecompressed); } DataBuffer header = new DataBuffer(new byte[2 + FileCount * 10]); header.WriteShort(FileCount); for (int i = 0; i < FileCount; i++) { header.WriteInteger(FileNames[i]); header.Write3Bytes(UncompressedFileSizes[i]); header.Write3Bytes(CompressedFileSizes[i]); //pos += CompressedFileSizes[i]; } if (ArchiveDecompressed) { DataBuffer d = new DataBuffer(new byte[data_buffer.Buffer.Length + header.Buffer.Length]); //d.Write3Bytes(data_buffer.Buffer.Length); //d.Write3Bytes(0); d.Write(header.Buffer, 0, header.Location); d.Write(data_buffer.Buffer, 0, data_buffer.Location); MemoryStream ms = new MemoryStream(); BZip2OutputStream os = new BZip2OutputStream(ms, 1); os.Write(d.Buffer, 0, d.Location); os.Close(); byte[] c = ms.GetBuffer(); DataBuffer final = new DataBuffer(new byte[os.BytesWritten + 6]); final.Write3Bytes(d.Buffer.Length); final.Write3Bytes(os.BytesWritten); final.Write(c, 0, os.BytesWritten); MainBuffer = final.Buffer; } else { DataBuffer final = new DataBuffer(new byte[data_buffer.Buffer.Length + header.Buffer.Length + 100000]); final.Write3Bytes(data_buffer.Buffer.Length); if (ArchiveDecompressed) { final.Write3Bytes(0); } else { final.Write3Bytes(data_buffer.Buffer.Length); } final.Write(header.Buffer, 0, header.Location); final.Write(data_buffer.Buffer, 0, data_buffer.Location); MainBuffer = final.Buffer; } LoadBuffer(MainBuffer); return(true); }
internal void SaveToFile(String filePath) { byte[] bytes; byte[] hash; using (MemoryStream mstr = new MemoryStream()) { using (BinaryWriter bstr = new BinaryWriter(mstr)) { foreach (Type type in myDictionaries.Keys) { Dictionary <String, object> dict = myDictionaries[type]; if (dict.Count == 0) { continue; } RManager manager = Res.GetManager(type); bstr.Write(type.FullName); long pos = mstr.Position; bstr.Write((uint)0xFFFFFFFF); foreach (String key in dict.Keys) { bstr.Write(key); manager.SaveToArchive(bstr, dict[key]); } uint len = (uint)(mstr.Position); mstr.Seek(pos, SeekOrigin.Begin); bstr.Write(len); mstr.Seek(0, SeekOrigin.End); } } bytes = mstr.ToArray(); hash = new SHA256CryptoServiceProvider().ComputeHash(bytes); } myHash = Encoding.ASCII.GetString(hash); using (MemoryStream mstr = new MemoryStream()) { using (BZip2OutputStream cstr = new BZip2OutputStream(mstr)) { cstr.Write(bytes, 0, bytes.Length); } bytes = mstr.ToArray(); } using (FileStream fstr = new FileStream(filePath, FileMode.Create, FileAccess.Write)) { using (BinaryWriter bstr = new BinaryWriter(fstr)) { bstr.Write(ArchiverVersion); bstr.Write(Name); bstr.Write(Author); bstr.Write(Version); bstr.Write(AuthorWebsite); bstr.Write(AuthorEmail); bstr.Write(Description); bstr.Write((byte)Destination); bstr.Write(bytes.Length); bstr.Write(bytes); bstr.Write(hash.Length); bstr.Write(hash); } } }
private void BtnCompress_Click(object sender, EventArgs e) { var filesList = Form1.filesList; string savePath = txtSavePath.Text; string fileName = savePath + @"\" + txtFileName.Text; if (optZip.Checked == true) { if (chkEncrypt.Checked == true) { if (txtPass.Text == txtPass2.Text) { //do encrypt using (ZipFile zip = new ZipFile()) { zip.Password = txtPass2.Text; Stopwatch stopwatch = Stopwatch.StartNew(); foreach (string file in filesList) { zip.AddFile(file, ""); } zip.Save(fileName + ".zip"); stopwatch.Stop(); lblElapsed.Text += stopwatch.Elapsed.TotalMilliseconds.ToString(); } } } else { //ustvari ZIP using (ZipFile zip = new ZipFile()) { Stopwatch stopwatch = Stopwatch.StartNew(); foreach (string file in filesList) { zip.AddFile(file, ""); } zip.Save(fileName + ".zip"); stopwatch.Stop(); lblElapsed.Text += stopwatch.Elapsed.TotalMilliseconds.ToString(); } } if (File.Exists(fileName + ".zip")) { MessageBox.Show("Datoteka uspešno kreirana", "Uspeh!", MessageBoxButtons.OK, MessageBoxIcon.Information); this.Hide(); } else { MessageBox.Show("Prišlo je do napake, poskusite ponovno.", "Napaka", MessageBoxButtons.RetryCancel, MessageBoxIcon.Error); } } else if (optBz.Checked) { using (Stream bzFile = File.Create(fileName + ".bz2")) using (Stream bzipStream = new BZip2OutputStream(bzFile)) using (TarArchive tar = TarArchive.CreateOutputTarArchive(bzipStream)) { Stopwatch stopwatch = Stopwatch.StartNew(); foreach (string file in filesList) { TarEntry tarEntry = TarEntry.CreateEntryFromFile(file); tarEntry.Name = Path.GetFileName(file); tar.WriteEntry(tarEntry, false); } stopwatch.Stop(); lblElapsed.Text += stopwatch.Elapsed.TotalMilliseconds.ToString(); } if (File.Exists(fileName + ".bz2")) { MessageBox.Show("Datoteka uspešno kreirana", "Uspeh!", MessageBoxButtons.OK, MessageBoxIcon.Information); this.Hide(); } else { MessageBox.Show("Prišlo je do napake, poskusite ponovno.", "Napaka", MessageBoxButtons.RetryCancel, MessageBoxIcon.Error); } } else if (optTar.Checked) { using (Stream tarFile = File.Create(fileName + ".tar")) using (Stream tarStream = new TarOutputStream(tarFile)) using (TarArchive tar = TarArchive.CreateOutputTarArchive(tarStream)) { Stopwatch stopwatch = Stopwatch.StartNew(); foreach (string file in filesList) { TarEntry tarEntry = TarEntry.CreateEntryFromFile(file); tarEntry.Name = Path.GetFileName(file); tar.WriteEntry(tarEntry, false); } stopwatch.Stop(); lblElapsed.Text += stopwatch.Elapsed.TotalMilliseconds.ToString(); } if (File.Exists(fileName + ".tar")) { MessageBox.Show("Datoteka uspešno kreirana", "Uspeh!", MessageBoxButtons.OK, MessageBoxIcon.Information); this.Hide(); } else { MessageBox.Show("Prišlo je do napake, poskusite ponovno.", "Napaka", MessageBoxButtons.RetryCancel, MessageBoxIcon.Error); } } else if (optTgz.Checked) { using (Stream tgzFile = File.Create(fileName + ".tgz")) using (Stream tgzStream = new GZipOutputStream(tgzFile)) using (TarArchive tgz = TarArchive.CreateOutputTarArchive(tgzStream)) { Stopwatch stopwatch = Stopwatch.StartNew(); foreach (string file in filesList) { TarEntry tgzEntry = TarEntry.CreateEntryFromFile(file); tgzEntry.Name = Path.GetFileName(file); tgz.WriteEntry(tgzEntry, false); } stopwatch.Stop(); lblElapsed.Text += stopwatch.Elapsed.TotalMilliseconds.ToString(); } if (File.Exists(fileName + ".tgz")) { MessageBox.Show("Datoteka uspešno kreirana", "Uspeh!", MessageBoxButtons.OK, MessageBoxIcon.Information); this.Hide(); } else { MessageBox.Show("Prišlo je do napake, poskusite ponovno.", "Napaka", MessageBoxButtons.RetryCancel, MessageBoxIcon.Error); } } }
private static async Task CreateTarBzip2Async(IEnumerable <FileSystemStorageItemBase> SourceItemGroup, string NewZipPath, ProgressChangedEventHandler ProgressHandler = null) { if (await FileSystemStorageItemBase.CreateAsync(NewZipPath, StorageItemTypes.File, CreateOption.GenerateUniqueName).ConfigureAwait(false) is FileSystemStorageFile NewFile) { ulong TotalSize = 0; ulong CurrentPosition = 0; foreach (FileSystemStorageItemBase StorageItem in SourceItemGroup) { switch (StorageItem) { case FileSystemStorageFile File: { TotalSize += File.SizeRaw; break; } case FileSystemStorageFolder Folder: { TotalSize += await Folder.GetFolderSizeAsync().ConfigureAwait(false); break; } } } if (TotalSize > 0) { using (FileStream NewFileStream = await NewFile.GetFileStreamFromFileAsync(AccessMode.Exclusive).ConfigureAwait(false)) using (BZip2OutputStream OutputBZip2Stream = new BZip2OutputStream(NewFileStream)) using (TarOutputStream OutputTarStream = new TarOutputStream(OutputBZip2Stream, EncodingSetting)) { OutputBZip2Stream.IsStreamOwner = false; OutputTarStream.IsStreamOwner = false; foreach (FileSystemStorageItemBase StorageItem in SourceItemGroup) { switch (StorageItem) { case FileSystemStorageFile File: { using (FileStream FileStream = await File.GetFileStreamFromFileAsync(AccessMode.Read).ConfigureAwait(false)) { TarEntry NewEntry = TarEntry.CreateTarEntry(File.Name); NewEntry.ModTime = DateTime.Now; NewEntry.Size = FileStream.Length; OutputTarStream.PutNextEntry(NewEntry); await FileStream.CopyToAsync(OutputTarStream, ProgressHandler : (s, e) => { ProgressHandler?.Invoke(null, new ProgressChangedEventArgs(Convert.ToInt32((CurrentPosition + Convert.ToUInt64(e.ProgressPercentage / 100d * File.SizeRaw)) * 100d / TotalSize), null)); }).ConfigureAwait(false); } OutputTarStream.CloseEntry(); CurrentPosition += File.SizeRaw; ProgressHandler?.Invoke(null, new ProgressChangedEventArgs(Convert.ToInt32(CurrentPosition * 100d / TotalSize), null)); break; } case FileSystemStorageFolder Folder: { ulong InnerFolderSize = 0; await TarFolderCore(Folder, OutputTarStream, Folder.Name, (ByteRead) => { InnerFolderSize = ByteRead; ProgressHandler?.Invoke(null, new ProgressChangedEventArgs(Convert.ToInt32((CurrentPosition + ByteRead) * 100d / TotalSize), null)); }).ConfigureAwait(false); CurrentPosition += InnerFolderSize; ProgressHandler?.Invoke(null, new ProgressChangedEventArgs(Convert.ToInt32(CurrentPosition * 100d / TotalSize), null)); break; } } } await OutputTarStream.FlushAsync().ConfigureAwait(false); } } } else { throw new UnauthorizedAccessException(); } }
/// <summary> /// Clean out the files that need to be updated and generate the update package. /// </summary> /// <param name="oldfilePath">Old version file path.</param> /// <param name="newfilePath">New version file path</param> /// <param name="patchPath">Patch file generation path.</param> /// <returns></returns> /// <exception cref="Exception"></exception> public async Task Clean(string oldfilePath, string newfilePath, string patchPath) { _oldfilePath = oldfilePath; _newfilePath = newfilePath; _patchPath = patchPath; ValidationParameters(); try { await Task.Run(() => { using (FileStream output = new FileStream(patchPath, FileMode.Create)) { var oldBytes = File.ReadAllBytes(_oldfilePath); var newBytes = File.ReadAllBytes(_newfilePath); /* Header is * 0 8 "BSDIFF40" * 8 8 length of bzip2ed ctrl block * 16 8 length of bzip2ed diff block * 24 8 length of new file */ /* File is * 0 32 Header * 32 ?? Bzip2ed ctrl block * ?? ?? Bzip2ed diff block * ?? ?? Bzip2ed extra block */ byte[] header = new byte[c_headerSize]; WriteInt64(c_fileSignature, header, 0); // "BSDIFF40" WriteInt64(0, header, 8); WriteInt64(0, header, 16); WriteInt64(newBytes.Length, header, 24); long startPosition = output.Position; output.Write(header, 0, header.Length); int[] I = SuffixSort(oldBytes); byte[] db = new byte[newBytes.Length]; byte[] eb = new byte[newBytes.Length]; int dblen = 0; int eblen = 0; using (var bz2Stream = new BZip2OutputStream(output) { IsStreamOwner = false }) { // compute the differences, writing ctrl as we go int scan = 0; int pos = 0; int len = 0; int lastscan = 0; int lastpos = 0; int lastoffset = 0; while (scan < newBytes.Length) { int oldscore = 0; for (int scsc = scan += len; scan < newBytes.Length; scan++) { len = Search(I, oldBytes, newBytes, scan, 0, oldBytes.Length, out pos); for (; scsc < scan + len; scsc++) { if ((scsc + lastoffset < oldBytes.Length) && (oldBytes[scsc + lastoffset] == newBytes[scsc])) { oldscore++; } } if ((len == oldscore && len != 0) || (len > oldscore + 8)) { break; } if ((scan + lastoffset < oldBytes.Length) && (oldBytes[scan + lastoffset] == newBytes[scan])) { oldscore--; } } if (len != oldscore || scan == newBytes.Length) { int s = 0; int sf = 0; int lenf = 0; for (int i = 0; (lastscan + i < scan) && (lastpos + i < oldBytes.Length);) { if (oldBytes[lastpos + i] == newBytes[lastscan + i]) { s++; } i++; if (s * 2 - i > sf * 2 - lenf) { sf = s; lenf = i; } } int lenb = 0; if (scan < newBytes.Length) { s = 0; int sb = 0; for (int i = 1; (scan >= lastscan + i) && (pos >= i); i++) { if (oldBytes[pos - i] == newBytes[scan - i]) { s++; } if (s * 2 - i > sb * 2 - lenb) { sb = s; lenb = i; } } } if (lastscan + lenf > scan - lenb) { int overlap = (lastscan + lenf) - (scan - lenb); s = 0; int ss = 0; int lens = 0; for (int i = 0; i < overlap; i++) { if (newBytes[lastscan + lenf - overlap + i] == oldBytes[lastpos + lenf - overlap + i]) { s++; } if (newBytes[scan - lenb + i] == oldBytes[pos - lenb + i]) { s--; } if (s > ss) { ss = s; lens = i + 1; } } lenf += lens - overlap; lenb -= lens; } for (int i = 0; i < lenf; i++) { db[dblen + i] = (byte)(newBytes[lastscan + i] - oldBytes[lastpos + i]); } for (int i = 0; i < (scan - lenb) - (lastscan + lenf); i++) { eb[eblen + i] = newBytes[lastscan + lenf + i]; } dblen += lenf; eblen += (scan - lenb) - (lastscan + lenf); byte[] buf = new byte[8]; WriteInt64(lenf, buf, 0); bz2Stream.Write(buf, 0, 8); WriteInt64((scan - lenb) - (lastscan + lenf), buf, 0); bz2Stream.Write(buf, 0, 8); WriteInt64((pos - lenb) - (lastpos + lenf), buf, 0); bz2Stream.Write(buf, 0, 8); lastscan = scan - lenb; lastpos = pos - lenb; lastoffset = pos - scan; } } } // compute size of compressed ctrl data long controlEndPosition = output.Position; WriteInt64(controlEndPosition - startPosition - c_headerSize, header, 8); // write compressed diff data using (var bz2Stream = new BZip2OutputStream(output) { IsStreamOwner = false }) { bz2Stream.Write(db, 0, dblen); } // compute size of compressed diff data long diffEndPosition = output.Position; WriteInt64(diffEndPosition - controlEndPosition, header, 16); // write compressed extra data using (var bz2Stream = new BZip2OutputStream(output) { IsStreamOwner = false }) { bz2Stream.Write(eb, 0, eblen); } // seek to the beginning, write the header, then seek back to end long endPosition = output.Position; output.Position = startPosition; output.Write(header, 0, header.Length); output.Position = endPosition; } }); } catch (Exception ex) { throw new Exception($"Clean error : { ex.Message } !", ex.InnerException); } }
public static void CreateArchive(ProgressMonitor mon, string folder, string targetFile) { string tf = Path.GetFileNameWithoutExtension(targetFile); if (tf.EndsWith(".tar")) { tf = Path.GetFileNameWithoutExtension(tf); } if (File.Exists(targetFile)) { File.Delete(targetFile); } using (Stream os = File.Create(targetFile)) { Stream outStream = os; // Create the zip file switch (GetArchiveExtension(targetFile)) { case ".tar.gz": outStream = new GZipOutputStream(outStream); goto case ".tar"; case ".tar.bz2": outStream = new BZip2OutputStream(outStream, 9); goto case ".tar"; case ".tar": TarArchive archive = TarArchive.CreateOutputTarArchive(outStream); archive.SetAsciiTranslation(false); archive.RootPath = folder; archive.ProgressMessageEvent += delegate(TarArchive ac, TarEntry e, string message) { if (message != null) { mon.Log.WriteLine(message); } }; foreach (FilePath f in GetFilesRec(new DirectoryInfo(folder))) { TarEntry entry = TarEntry.CreateEntryFromFile(f); entry.Name = f.ToRelative(folder); if (!Platform.IsWindows) { UnixFileInfo fi = new UnixFileInfo(f); entry.TarHeader.Mode = (int)fi.Protection; } else { entry.Name = entry.Name.Replace('\\', '/'); FilePermissions p = FilePermissions.S_IFREG | FilePermissions.S_IROTH | FilePermissions.S_IRGRP | FilePermissions.S_IRUSR; if (!new FileInfo(f).IsReadOnly) { p |= FilePermissions.S_IWUSR; } entry.TarHeader.Mode = (int)p; } archive.WriteEntry(entry, false); } // HACK: GNU tar expects to find a double zero record at the end of the archive. TarArchive only emits one. // This hack generates the second zero block. FieldInfo tarOutField = typeof(TarArchive).GetField("tarOut", BindingFlags.Instance | BindingFlags.NonPublic); if (tarOutField != null) { TarOutputStream tarOut = (TarOutputStream)tarOutField.GetValue(archive); tarOut.Finish(); } archive.CloseArchive(); break; case ".zip": ZipOutputStream zs = new ZipOutputStream(outStream); zs.SetLevel(5); byte[] buffer = new byte [8092]; foreach (FilePath f in GetFilesRec(new DirectoryInfo(folder))) { string name = f.ToRelative(folder); if (Platform.IsWindows) { name = name.Replace('\\', '/'); } ZipEntry infoEntry = new ZipEntry(name); zs.PutNextEntry(infoEntry); using (Stream s = File.OpenRead(f)) { int nr; while ((nr = s.Read(buffer, 0, buffer.Length)) > 0) { zs.Write(buffer, 0, nr); } } zs.CloseEntry(); } zs.Finish(); zs.Close(); break; default: mon.Log.WriteLine("Unsupported file format: " + Path.GetFileName(targetFile)); return; } } }
public void ConvertAll(string captureDirPath, int numEvents, IProgressFeedback progress) { List <BinaryReader> readers = new List <BinaryReader>(1); SortedList <uint, KeyValuePair <BinaryReader, uint> > ids = new SortedList <uint, KeyValuePair <BinaryReader, uint> >(numEvents); uint i = 0; foreach (string filePath in Directory.GetFiles(captureDirPath, "*.log", SearchOption.TopDirectoryOnly)) { FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Read); BinaryReader r = new BinaryReader(fs); readers.Add(r); while (fs.Position < fs.Length) { i++; int pct = (int)(((float)i / (float)numEvents) * 100.0f); progress.ProgressUpdate("Indexing", pct); uint id = r.ReadUInt32(); uint size = r.ReadUInt32(); ids.Add(id, new KeyValuePair <BinaryReader, uint>(r, (uint)fs.Position)); fs.Seek(size, SeekOrigin.Current); } } string resultPath = String.Format("{0}\\capture.osd", captureDirPath); BZip2OutputStream outStream = new BZip2OutputStream(new FileStream(resultPath, FileMode.Create)); XmlTextWriter xtw = new XmlTextWriter(outStream, System.Text.Encoding.UTF8); xtw.Formatting = Formatting.Indented; xtw.Indentation = 4; xtw.IndentChar = ' '; xtw.WriteStartDocument(true); xtw.WriteStartElement("events"); i = 0; foreach (KeyValuePair <BinaryReader, uint> pair in ids.Values) { i++; int pct = (int)(((float)i / (float)numEvents) * 100.0f); progress.ProgressUpdate(String.Format("Converting event {0} of {1}", i, numEvents), pct); BinaryReader r = pair.Key; uint offset = pair.Value; r.BaseStream.Seek(offset, SeekOrigin.Begin); UnserializeNode(r, xtw); } xtw.WriteEndElement(); xtw.WriteEndDocument(); xtw.Close(); foreach (BinaryReader r in readers) { r.Close(); } }
public override byte[] Encode() { if (this.Info == null) { throw new DecodeException("File info must be set before encoding binary file."); } // Encrypt data if (this.Info.EncryptionKey != null) { throw new NotImplementedException("RuneTek5 file encryption is not yet supported. Nag me about it if you encounter this error."); } // Compression var uncompressedSize = this.Data.Length; byte[] compressedData; switch (this.Info.CompressionType) { case CompressionType.Bzip2: using (var bzip2CompressionStream = new MemoryStream()) { using (var bzip2Stream = new BZip2OutputStream(bzip2CompressionStream, 1)) { bzip2Stream.Write(this.Data, 0, this.Data.Length); } // Remove BZh1 compressedData = bzip2CompressionStream.ToArray().Skip(4).ToArray(); } break; case CompressionType.Gzip: using (var gzipCompressionStream = new MemoryStream()) { using (var gzipStream = new GZipOutputStream(gzipCompressionStream)) { gzipStream.Write(this.Data, 0, this.Data.Length); } compressedData = gzipCompressionStream.ToArray(); } break; case CompressionType.Lzma: using (var lzmaCompressionStream = new MemoryStream()) using (var dataStream = new MemoryStream(this.Data)) { var lzmaEncoder = new SevenZip.Compression.LZMA.Encoder(); lzmaEncoder.Code(dataStream, lzmaCompressionStream, this.Data.Length, -1, null); compressedData = lzmaCompressionStream.ToArray(); } break; case CompressionType.None: compressedData = this.Data; break; default: throw new ArgumentException("Invalid compression type."); } var memoryStream = new MemoryStream(); var writer = new BinaryWriter(memoryStream); writer.Write((byte)this.Info.CompressionType); // Compressed/total size writer.WriteInt32BigEndian(compressedData.Length); // Add uncompressed size when compressing if (this.Info.CompressionType != CompressionType.None) { writer.WriteInt32BigEndian(uncompressedSize); } writer.Write(compressedData); // Suffix with version truncated to two bytes (not part of data for whatever reason) if (this.Info.Version != null) { writer.WriteUInt16BigEndian((ushort)this.Info.Version); } var result = memoryStream.ToArray(); // Update file info with sizes this.Info.CompressedSize = compressedData.Length; this.Info.UncompressedSize = uncompressedSize; // Update file info with CRC var crc = new Crc32(); crc.Update(result.Take(result.Length - 2).ToArray()); this.Info.Crc = (int)crc.Value; // Update file info with whirlpool digest var whirlpool = new WhirlpoolDigest(); whirlpool.BlockUpdate(result, 0, result.Length - 2); this.Info.WhirlpoolDigest = new byte[whirlpool.GetDigestSize()]; whirlpool.DoFinal(this.Info.WhirlpoolDigest, 0); return(result); }
private static void CreateInternal(byte[] oldData, byte[] newData, Stream output) { long num4; if (oldData == null) { throw new ArgumentNullException("oldData"); } if (newData == null) { throw new ArgumentNullException("newData"); } if (output == null) { throw new ArgumentNullException("output"); } if (!output.CanSeek) { throw new ArgumentException("Output stream must be seekable.", "output"); } if (!output.CanWrite) { throw new ArgumentException("Output stream must be writable.", "output"); } byte[] buf = new byte[0x20]; WriteInt64(0x3034464649445342L, buf, 0); WriteInt64(0L, buf, 8); WriteInt64(0L, buf, 0x10); WriteInt64((long)newData.Length, buf, 0x18); long position = output.Position; output.Write(buf, 0, buf.Length); int[] i = SuffixSort(oldData); byte[] buffer = new byte[newData.Length]; byte[] buffer3 = new byte[newData.Length]; int count = 0; int num3 = 0; using (WrappingStream stream = new WrappingStream(output, Ownership.None)) { using (BZip2OutputStream stream2 = new BZip2OutputStream(stream)) { int num12; int newOffset = 0; int pos = 0; int num8 = 0; int num9 = 0; int num10 = 0; int num11 = 0; goto TR_0059; TR_0049: if ((num8 != num12) || (newOffset == newData.Length)) { int num14 = 0; int num15 = 0; int num16 = 0; int num18 = 0; while (true) { if (((num9 + num18) >= newOffset) || ((num10 + num18) >= oldData.Length)) { int num17 = 0; if (newOffset < newData.Length) { num14 = 0; int num19 = 0; for (int j = 1; (newOffset >= (num9 + j)) && (pos >= j); j++) { if (oldData[pos - j] == newData[newOffset - j]) { num14++; } if (((num14 * 2) - j) > ((num19 * 2) - num17)) { num19 = num14; num17 = j; } } } if ((num9 + num16) > (newOffset - num17)) { int num21 = (num9 + num16) - (newOffset - num17); num14 = 0; int num22 = 0; int num23 = 0; int num24 = 0; while (true) { if (num24 >= num21) { num16 += num23 - num21; num17 -= num23; break; } if (newData[((num9 + num16) - num21) + num24] == oldData[((num10 + num16) - num21) + num24]) { num14++; } if (newData[(newOffset - num17) + num24] == oldData[(pos - num17) + num24]) { num14--; } if (num14 > num22) { num22 = num14; num23 = num24 + 1; } num24++; } } int num25 = 0; while (true) { if (num25 >= num16) { int num26 = 0; while (true) { if (num26 >= ((newOffset - num17) - (num9 + num16))) { count += num16; num3 += (newOffset - num17) - (num9 + num16); byte[] buffer4 = new byte[8]; WriteInt64((long)num16, buffer4, 0); stream2.Write(buffer4, 0, 8); WriteInt64((long)((newOffset - num17) - (num9 + num16)), buffer4, 0); stream2.Write(buffer4, 0, 8); WriteInt64((long)((pos - num17) - (num10 + num16)), buffer4, 0); stream2.Write(buffer4, 0, 8); num9 = newOffset - num17; num10 = pos - num17; num11 = pos - newOffset; break; } buffer3[num3 + num26] = newData[(num9 + num16) + num26]; num26++; } break; } buffer[count + num25] = (byte)(newData[num9 + num25] - oldData[num10 + num25]); num25++; } break; } if (oldData[num10 + num18] == newData[num9 + num18]) { num14++; } num18++; if (((num14 * 2) - num18) > ((num15 * 2) - num16)) { num15 = num14; num16 = num18; } } } TR_0059: while (true) { if (newOffset < newData.Length) { num12 = 0; int num1 = newOffset + num8; int index = newOffset = num1; while (true) { if (newOffset >= newData.Length) { break; } num8 = Search(i, oldData, newData, newOffset, 0, oldData.Length, out pos); while (true) { if (index < (newOffset + num8)) { if (((index + num11) < oldData.Length) && (oldData[index + num11] == newData[index])) { num12++; } index++; continue; } if (((num8 != num12) || (num8 == 0)) && (num8 <= (num12 + 8))) { if (((newOffset + num11) < oldData.Length) && (oldData[newOffset + num11] == newData[newOffset])) { num12--; } newOffset++; break; } break; } } } else { goto TR_0021; } break; } goto TR_0049; } } TR_0021: num4 = output.Position; WriteInt64((num4 - position) - 0x20, buf, 8); using (WrappingStream stream3 = new WrappingStream(output, Ownership.None)) { using (BZip2OutputStream stream4 = new BZip2OutputStream(stream3)) { stream4.Write(buffer, 0, count); } } WriteInt64(output.Position - num4, buf, 0x10); using (WrappingStream stream5 = new WrappingStream(output, Ownership.None)) { using (BZip2OutputStream stream6 = new BZip2OutputStream(stream5)) { stream6.Write(buffer3, 0, num3); } } long num5 = output.Position; output.Position = position; output.Write(buf, 0, buf.Length); output.Position = num5; }
public static void Create(byte[] oldData, byte[] newData, Stream output) { if (oldData == null) { throw new ArgumentNullException("oldData"); } if (newData == null) { throw new ArgumentNullException("newData"); } if (output == null) { throw new ArgumentNullException("output"); } if (!output.get_CanSeek()) { throw new ArgumentException("Output stream must be seekable.", "output"); } if (!output.get_CanWrite()) { throw new ArgumentException("Output stream must be writable.", "output"); } byte[] array = new byte[32]; BinaryPatchUtility.WriteInt64(3473478480300364610L, array, 0); BinaryPatchUtility.WriteInt64(0L, array, 8); BinaryPatchUtility.WriteInt64(0L, array, 16); BinaryPatchUtility.WriteInt64((long)newData.Length, array, 24); long position = output.get_Position(); output.Write(array, 0, array.Length); int[] i = BinaryPatchUtility.SuffixSort(oldData); byte[] array2 = new byte[newData.Length]; byte[] array3 = new byte[newData.Length]; int num = 0; int num2 = 0; BZip2OutputStream bZip2OutputStream = new BZip2OutputStream(output); bZip2OutputStream.set_IsStreamOwner(false); using (BZip2OutputStream bZip2OutputStream2 = bZip2OutputStream) { int j = 0; int num3 = 0; int num4 = 0; int num5 = 0; int num6 = 0; int num7 = 0; while (j < newData.Length) { int num8 = 0; int k; for (j = (k = j + num4); j < newData.Length; j++) { num4 = BinaryPatchUtility.Search(i, oldData, newData, j, 0, oldData.Length, out num3); while (k < j + num4) { if (k + num7 < oldData.Length && oldData[k + num7] == newData[k]) { num8++; } k++; } if ((num4 == num8 && num4 != 0) || num4 > num8 + 8) { break; } if (j + num7 < oldData.Length && oldData[j + num7] == newData[j]) { num8--; } } if (num4 != num8 || j == newData.Length) { int num9 = 0; int num10 = 0; int num11 = 0; int num12 = 0; while (num5 + num12 < j && num6 + num12 < oldData.Length) { if (oldData[num6 + num12] == newData[num5 + num12]) { num9++; } num12++; if (num9 * 2 - num12 > num10 * 2 - num11) { num10 = num9; num11 = num12; } } int num13 = 0; if (j < newData.Length) { num9 = 0; int num14 = 0; int num15 = 1; while (j >= num5 + num15 && num3 >= num15) { if (oldData[num3 - num15] == newData[j - num15]) { num9++; } if (num9 * 2 - num15 > num14 * 2 - num13) { num14 = num9; num13 = num15; } num15++; } } if (num5 + num11 > j - num13) { int num16 = num5 + num11 - (j - num13); num9 = 0; int num17 = 0; int num18 = 0; for (int l = 0; l < num16; l++) { if (newData[num5 + num11 - num16 + l] == oldData[num6 + num11 - num16 + l]) { num9++; } if (newData[j - num13 + l] == oldData[num3 - num13 + l]) { num9--; } if (num9 > num17) { num17 = num9; num18 = l + 1; } } num11 += num18 - num16; num13 -= num18; } for (int m = 0; m < num11; m++) { array2[num + m] = newData[num5 + m] - oldData[num6 + m]; } for (int n = 0; n < j - num13 - (num5 + num11); n++) { array3[num2 + n] = newData[num5 + num11 + n]; } num += num11; num2 += j - num13 - (num5 + num11); byte[] array4 = new byte[8]; BinaryPatchUtility.WriteInt64((long)num11, array4, 0); bZip2OutputStream2.Write(array4, 0, 8); BinaryPatchUtility.WriteInt64((long)(j - num13 - (num5 + num11)), array4, 0); bZip2OutputStream2.Write(array4, 0, 8); BinaryPatchUtility.WriteInt64((long)(num3 - num13 - (num6 + num11)), array4, 0); bZip2OutputStream2.Write(array4, 0, 8); num5 = j - num13; num6 = num3 - num13; num7 = num3 - j; } } } long position2 = output.get_Position(); BinaryPatchUtility.WriteInt64(position2 - position - 32L, array, 8); bZip2OutputStream = new BZip2OutputStream(output); bZip2OutputStream.set_IsStreamOwner(false); using (BZip2OutputStream bZip2OutputStream3 = bZip2OutputStream) { bZip2OutputStream3.Write(array2, 0, num); } long position3 = output.get_Position(); BinaryPatchUtility.WriteInt64(position3 - position2, array, 16); bZip2OutputStream = new BZip2OutputStream(output); bZip2OutputStream.set_IsStreamOwner(false); using (BZip2OutputStream bZip2OutputStream4 = bZip2OutputStream) { bZip2OutputStream4.Write(array3, 0, num2); } long position4 = output.get_Position(); output.set_Position(position); output.Write(array, 0, array.Length); output.set_Position(position4); }
public static void CreateArchive(ArchiveOptions archiveOptions, ProcessingEntry processingEntry, CancellationTokenEx cancellationToken, ReportCompressionStatus reportCompressionStatus) { var folderName = new Lazy <string>(() => { var firstEntry = archiveOptions.Entries[0]; return((firstEntry.IsDirectory ? new DirectoryInfo(firstEntry.Path).Parent.FullName : new FileInfo(firstEntry.Path).DirectoryName).TrimEnd('\\')); }); Stream outputStream = new FileStream(archiveOptions.ArchivePath, FileMode.Create, FileAccess.ReadWrite); switch (archiveOptions.CompressionMethod) { case CompressionMethod.None: //dont wrap the stream break; case CompressionMethod.Zip: using (var zipStream = new ZipOutputStream(outputStream) { IsStreamOwner = true }) { zipStream.SetLevel(archiveOptions.CompressionLevel); zipStream.Password = archiveOptions.Password; var folderOffset = folderName.Value.Length; var fileList = new List <FileInfo>(); foreach (var entry in archiveOptions.Entries) { if (entry.IsDirectory) { CollectFiles(fileList, new DirectoryInfo(entry.Path)); } else { fileList.Add(new FileInfo(entry.Path)); } } double totalLength = fileList.Sum(x => x.Length); long currentLength = 0; var updateStopwatch = Stopwatch.StartNew(); void UpdateProgress(float progress) { //important for a lot of small files if (updateStopwatch.ElapsedMilliseconds > 1000) { updateStopwatch.Reset(); processingEntry.Progress = progress; processingEntry.Size = zipStream.Length; ThreadPool.QueueUserWorkItem(state => reportCompressionStatus.Invoke(processingEntry)); updateStopwatch.Start(); } } foreach (var fileInfo in fileList) { var entryName = ZipEntry.CleanName(fileInfo.FullName.Substring(folderOffset)); var zipEntry = new ZipEntry(entryName) { DateTime = fileInfo.LastWriteTime, AESKeySize = string.IsNullOrEmpty(archiveOptions.Password) ? 0 : 256, Size = fileInfo.Length }; byte[] buffer = new byte[4096]; FileStream zipEntryStream; try { zipEntryStream = fileInfo.OpenRead(); } catch (Exception) { continue; //access denied } zipStream.PutNextEntry(zipEntry); using (zipEntryStream) { StreamUtils.Copy(zipEntryStream, zipStream, buffer, (sender, args) => { UpdateProgress((float)((currentLength + args.Processed) / totalLength)); args.ContinueRunning = !cancellationToken.IsCanceled; }, TimeSpan.FromSeconds(1), null, null); } if (cancellationToken.IsCanceled) { //force update processingEntry.Progress = -1; ThreadPool.QueueUserWorkItem(state => reportCompressionStatus.Invoke(processingEntry)); return; } currentLength += fileInfo.Length; zipStream.CloseEntry(); UpdateProgress((float)(currentLength / totalLength)); } //force update processingEntry.Size = zipStream.Length; processingEntry.Progress = 1; ThreadPool.QueueUserWorkItem(state => reportCompressionStatus.Invoke(processingEntry)); } return; case CompressionMethod.Gzip: var gzipStream = new GZipOutputStream(outputStream) { IsStreamOwner = true }; gzipStream.SetLevel(archiveOptions.CompressionLevel); gzipStream.Password = archiveOptions.Password; outputStream = gzipStream; break; case CompressionMethod.Bzip2: outputStream = new BZip2OutputStream(outputStream) { IsStreamOwner = true }; break; default: throw new ArgumentException("Unknown compression method: " + archiveOptions.CompressionMethod); } using (outputStream) { if (archiveOptions.UseTarPacker) { using (var tarOutputStream = new TarOutputStream(outputStream)) { var rootPath = Path.GetDirectoryName(archiveOptions.Entries[0].Path).Replace('\\', '/').TrimEnd('/'); var fileList = new List <FileInfo>(); foreach (var entry in archiveOptions.Entries) { if (entry.IsDirectory) { CollectFiles(fileList, new DirectoryInfo(entry.Path)); } else { fileList.Add(new FileInfo(entry.Path)); } } var buffer = new byte[4096]; double totalLength = fileList.Sum(x => x.Length); long currentLength = 0; var updateStopwatch = Stopwatch.StartNew(); foreach (var fileInfo in fileList) { Stream fileStream; try { fileStream = new FileStream(fileInfo.FullName, FileMode.Open, FileAccess.Read); } catch (Exception) { continue; } using (fileStream) { var tarEntry = TarEntry.CreateEntryFromFile(fileInfo.FullName); tarEntry.Name = fileInfo.FullName.Substring(rootPath.Length + 1); tarOutputStream.PutNextEntry(tarEntry); StreamUtils.Copy(fileStream, tarOutputStream, buffer, (sender, args) => { args.ContinueRunning = !cancellationToken.IsCanceled; if (updateStopwatch.ElapsedMilliseconds > 1000) { updateStopwatch.Reset(); processingEntry.Progress = (float)((currentLength + args.Processed) / totalLength); processingEntry.Size = tarOutputStream.Length; ThreadPool.QueueUserWorkItem( state => reportCompressionStatus.Invoke(processingEntry)); updateStopwatch.Start(); } }, TimeSpan.FromSeconds(1), null, null); tarOutputStream.CloseEntry(); } currentLength += fileInfo.Length; if (cancellationToken.IsCanceled) { processingEntry.Progress = -1; ThreadPool.QueueUserWorkItem(state => reportCompressionStatus.Invoke(processingEntry)); return; } } } } else { var entry = archiveOptions.Entries[0]; if (entry.IsDirectory) { throw new ArgumentException("Cannot pack directory without tar/zip"); } byte[] dataBuffer = new byte[4096]; using (var sourceStream = new FileStream(entry.Path, FileMode.Open, FileAccess.Read)) StreamUtils.Copy(sourceStream, outputStream, dataBuffer, (sender, args) => { //no stopwatch needed because it is only one entry processingEntry.Progress = args.PercentComplete / 100; processingEntry.Size = outputStream.Length; args.ContinueRunning = !cancellationToken.IsCanceled; ThreadPool.QueueUserWorkItem(state => reportCompressionStatus.Invoke(processingEntry)); }, TimeSpan.FromSeconds(1), null, null); if (cancellationToken.IsCanceled) { //force update processingEntry.Progress = -1; ThreadPool.QueueUserWorkItem(state => reportCompressionStatus.Invoke(processingEntry)); return; } } processingEntry.Size = outputStream.Length; processingEntry.Progress = 1; ThreadPool.QueueUserWorkItem(state => reportCompressionStatus.Invoke(processingEntry)); } }
/// <summary> /// Zips the specified directory files to a zip file. /// </summary> /// <param name="zipFilename">The filename and path of the zip file to create.</param> /// <param name="zipDirectorPath">The directory of the files to zip.</param> /// <param name="pattern">The directory search pattern.</param> /// <param name="searchOption">The directory search option.</param> /// <param name="extensionRegularExpression">The regular expression for excluding files from being compressed.</param> /// <param name="filesToInclude">The list of files that are only to be compressed.</param> /// <remarks>Extension Regular Expression should be in the form 'jpg|JPG|gif|GIF|doc|DOC|pdf|PDF'</remarks> public static void Compress(string zipFilename, string zipDirectorPath, string pattern, SearchOption searchOption, string extensionRegularExpression, List <string> filesToInclude) { // Get the collection of files in the directory string[] files = Directory.GetFiles(zipDirectorPath.TrimEnd('\\') + "\\", pattern, searchOption); // Create all the streams using (Stream zipStream = File.Create(zipFilename)) using (BZip2OutputStream stream = new BZip2OutputStream(zipStream, 9)) using (TarArchive archive = TarArchive.CreateOutputTarArchive(stream, TarBuffer.DefaultBlockFactor)) { // Assign the archive properties archive.SetKeepOldFiles(false); archive.AsciiTranslate = true; archive.SetUserInfo(0, "anonymous", 0, "None"); // For each file found foreach (string file in files) { bool excludeFile = false; // If a regular expression has been supplied. if (!String.IsNullOrEmpty(extensionRegularExpression)) { excludeFile = Regex.IsMatch(file.Trim(), @".*\.(" + extensionRegularExpression + @")$"); } // Find all files that need to be included. if (filesToInclude != null && !excludeFile) { // Should the current file be included. IEnumerable <string> includeFiles = filesToInclude.Where(u => u.ToLower() == file.ToLower()); if (includeFiles.Count() > 0) { excludeFile = false; } else { excludeFile = true; } } // If file should not be excluded if (!excludeFile) { // Get the relative info string relativePath = Path.GetDirectoryName(file).TrimEnd('\\') + "\\"; relativePath = relativePath.Replace(zipDirectorPath.TrimEnd('\\') + "\\", "").Replace("\\", "/"); // Get the file entry and set the relative // path as the name of the entry. TarEntry entry = TarEntry.CreateEntryFromFile(file); entry.Name = (!String.IsNullOrEmpty(relativePath) ? relativePath.TrimEnd('/') + "/" : "") + Path.GetFileName(file); // Write to the zip file. archive.WriteEntry(entry, true); } } // CLose all the streams. archive.Close(); stream.Close(); zipStream.Close(); } }
/// <summary> /// Creates the tar file. /// </summary> protected override void ExecuteTask() { TarArchive archive = null; Stream outstream = null; Log(Level.Info, "Tarring {0} files to '{1}'.", TarFileSets.FileCount, DestFile.FullName); try { if (!Directory.Exists(DestFile.DirectoryName)) { Directory.CreateDirectory(DestFile.DirectoryName); } outstream = File.Create(DestFile.FullName); // wrap outputstream with corresponding compression method switch (CompressionMethod) { case TarCompressionMethod.GZip: outstream = new GZipOutputStream(outstream); break; case TarCompressionMethod.BZip2: outstream = new BZip2OutputStream(outstream); break; } // create tar archive archive = TarArchive.CreateOutputTarArchive(outstream, TarBuffer.DefaultBlockFactor); // do not use convert line endings of text files to \n, as this // converts all content to ASCII archive.AsciiTranslate = false; // process all filesets foreach (TarFileSet fileset in TarFileSets) { string basePath = fileset.BaseDirectory.FullName; if (Path.GetPathRoot(basePath) != basePath) { basePath = Path.GetDirectoryName(basePath + Path.DirectorySeparatorChar); } // add files to tar foreach (string file in fileset.FileNames) { // ensure file exists (in case "asis" was used) if (!File.Exists(file)) { throw new BuildException(string.Format(CultureInfo.InvariantCulture, "File '{0}' does not exist.", file), Location); } // the filename of the tar entry string entryFileName; // the directory of the tar entry string entryDirName = string.Empty; // determine name of the tar entry if (!Flatten && file.StartsWith(basePath)) { entryFileName = file.Substring(basePath.Length); if (entryFileName.Length > 0 && entryFileName[0] == Path.DirectorySeparatorChar) { entryFileName = entryFileName.Substring(1); } // get directory part of entry entryDirName = Path.GetDirectoryName(entryFileName); // ensure directory separators are understood on linux if (Path.DirectorySeparatorChar == '\\') { entryDirName = entryDirName.Replace(@"\", "/"); } // get filename part of entry entryFileName = Path.GetFileName(entryFileName); } else { entryFileName = Path.GetFileName(file); } // add prefix if specified if (fileset.Prefix != null) { entryDirName = fileset.Prefix + entryDirName; } // ensure directory has trailing slash if (entryDirName.Length != 0) { if (!entryDirName.EndsWith("/")) { entryDirName += '/'; } // create directory entry in archive CreateDirectoryEntry(archive, entryDirName, fileset); } TarEntry entry = TarEntry.CreateEntryFromFile(file); entry.Name = entryDirName + entryFileName; entry.GroupId = fileset.Gid; entry.GroupName = fileset.GroupName; entry.UserId = fileset.Uid; entry.UserName = fileset.UserName; entry.TarHeader.Mode = fileset.FileMode; // write file to tar file archive.WriteEntry(entry, true); } // add (possibly empty) directories to zip if (IncludeEmptyDirs) { // add (possibly empty) directories to tar foreach (string directory in fileset.DirectoryNames) { // skip directories that are not located beneath the base // directory if (!directory.StartsWith(basePath) || directory.Length <= basePath.Length) { continue; } // determine tar entry name string entryName = directory.Substring(basePath.Length + 1); // add prefix if specified if (fileset.Prefix != null) { entryName = fileset.Prefix + entryName; } // ensure directory separators are understood on linux if (Path.DirectorySeparatorChar == '\\') { entryName = entryName.Replace(@"\", "/"); } if (!entryName.EndsWith("/")) { // trailing directory signals to #ziplib that we're // dealing with directory entry entryName += "/"; } // create directory entry in archive CreateDirectoryEntry(archive, entryName, fileset); } } } // close the tar archive archive.Close(); } catch (Exception ex) { // close the tar output stream if (outstream != null) { outstream.Close(); } // close the tar archive if (archive != null) { archive.Close(); } // delete the (possibly corrupt) tar file if (DestFile.Exists) { DestFile.Delete(); } throw new BuildException(string.Format(CultureInfo.InvariantCulture, "Tar file '{0}' could not be created.", DestFile.FullName), Location, ex); } }
static void CreateInternal(byte[] oldData, byte[] newData, Stream output) { // check arguments if (oldData == null) { throw new ArgumentNullException("oldData"); } if (newData == null) { throw new ArgumentNullException("newData"); } if (output == null) { throw new ArgumentNullException("output"); } if (!output.CanSeek) { throw new ArgumentException("Output stream must be seekable.", "output"); } if (!output.CanWrite) { throw new ArgumentException("Output stream must be writable.", "output"); } /* Header is * 0 8 "BSDIFF40" * 8 8 length of bzip2ed ctrl block * 16 8 length of bzip2ed diff block * 24 8 length of new file */ /* File is * 0 32 Header * 32 ?? Bzip2ed ctrl block * ?? ?? Bzip2ed diff block * ?? ?? Bzip2ed extra block */ byte[] header = new byte[c_headerSize]; WriteInt64(c_fileSignature, header, 0); // "BSDIFF40" WriteInt64(0, header, 8); WriteInt64(0, header, 16); WriteInt64(newData.Length, header, 24); long startPosition = output.Position; output.Write(header, 0, header.Length); int[] I = SuffixSort(oldData); byte[] db = new byte[newData.Length]; byte[] eb = new byte[newData.Length]; int dblen = 0; int eblen = 0; using (BZip2OutputStream bz2Stream = new BZip2OutputStream(new WrappingStream(output, Ownership.None))) { // compute the differences, writing ctrl as we go int scan = 0; int pos = 0; int len = 0; int lastscan = 0; int lastpos = 0; int lastoffset = 0; while (scan < newData.Length) { int oldscore = 0; for (int scsc = scan += len; scan < newData.Length; scan++) { len = Search(I, oldData, newData, scan, 0, oldData.Length, out pos); for (; scsc < scan + len; scsc++) { if ((scsc + lastoffset < oldData.Length) && (oldData[scsc + lastoffset] == newData[scsc])) { oldscore++; } } if ((len == oldscore && len != 0) || (len > oldscore + 8)) { break; } if ((scan + lastoffset < oldData.Length) && (oldData[scan + lastoffset] == newData[scan])) { oldscore--; } } if (len != oldscore || scan == newData.Length) { int s = 0; int sf = 0; int lenf = 0; for (int i = 0; (lastscan + i < scan) && (lastpos + i < oldData.Length);) { if (oldData[lastpos + i] == newData[lastscan + i]) { s++; } i++; if (s * 2 - i > sf * 2 - lenf) { sf = s; lenf = i; } } int lenb = 0; if (scan < newData.Length) { s = 0; int sb = 0; for (int i = 1; (scan >= lastscan + i) && (pos >= i); i++) { if (oldData[pos - i] == newData[scan - i]) { s++; } if (s * 2 - i > sb * 2 - lenb) { sb = s; lenb = i; } } } if (lastscan + lenf > scan - lenb) { int overlap = (lastscan + lenf) - (scan - lenb); s = 0; int ss = 0; int lens = 0; for (int i = 0; i < overlap; i++) { if (newData[lastscan + lenf - overlap + i] == oldData[lastpos + lenf - overlap + i]) { s++; } if (newData[scan - lenb + i] == oldData[pos - lenb + i]) { s--; } if (s > ss) { ss = s; lens = i + 1; } } lenf += lens - overlap; lenb -= lens; } for (int i = 0; i < lenf; i++) { db[dblen + i] = (byte)(newData[lastscan + i] - oldData[lastpos + i]); } for (int i = 0; i < (scan - lenb) - (lastscan + lenf); i++) { eb[eblen + i] = newData[lastscan + lenf + i]; } dblen += lenf; eblen += (scan - lenb) - (lastscan + lenf); byte[] buf = new byte[8]; WriteInt64(lenf, buf, 0); bz2Stream.Write(buf, 0, 8); WriteInt64((scan - lenb) - (lastscan + lenf), buf, 0); bz2Stream.Write(buf, 0, 8); WriteInt64((pos - lenb) - (lastpos + lenf), buf, 0); bz2Stream.Write(buf, 0, 8); lastscan = scan - lenb; lastpos = pos - lenb; lastoffset = pos - scan; } } } // compute size of compressed ctrl data long controlEndPosition = output.Position; WriteInt64(controlEndPosition - startPosition - c_headerSize, header, 8); // write compressed diff data using (BZip2OutputStream bz2Stream = new BZip2OutputStream(new WrappingStream(output, Ownership.None))) { bz2Stream.Write(db, 0, dblen); } // compute size of compressed diff data long diffEndPosition = output.Position; WriteInt64(diffEndPosition - controlEndPosition, header, 16); // write compressed extra data using (BZip2OutputStream bz2Stream = new BZip2OutputStream(new WrappingStream(output, Ownership.None))) { bz2Stream.Write(eb, 0, eblen); } // seek to the beginning, write the header, then seek back to end long endPosition = output.Position; output.Position = startPosition; output.Write(header, 0, header.Length); output.Position = endPosition; }
public async Task UploadInputFilesAsync() { using var client = new AmazonS3Client(new BasicAWSCredentials( Environment.GetEnvironmentVariable("S3_ACCESS_KEY"), Environment.GetEnvironmentVariable("S3_SECRET_KEY")), RegionEndpoint.USWest2); var manifest = await _client.DownloadManifest(); manifest.Entries = manifest.Entries.Select(s => { s.File = s.File.Replace('\\', Path.DirectorySeparatorChar); return(s); }).ToArray(); var fileslist = _parserInputFiles.Select(o => $"NostaleData{Path.DirectorySeparatorChar}{o}").ToList(); manifest.Entries = manifest.Entries.Where(s => fileslist.Contains(s.File)).ToArray(); var request = new GetObjectRequest { BucketName = Environment.GetEnvironmentVariable("S3_BUCKET"), Key = "clientmanifest.json", }; ClientManifest previousManifest; try { { using var response = await client.GetObjectAsync(request); await using var responseStream = response.ResponseStream; using var reader = new StreamReader(responseStream); previousManifest = JsonConvert.DeserializeObject <ClientManifest>(await reader.ReadToEndAsync()); } } catch { previousManifest = new ClientManifest() { Entries = new Entry[0] }; } var previoussha1s = previousManifest.Entries.Select(s => s.Sha1); if (!manifest.Entries.Select(s => s.Sha1).All(s => previoussha1s.Contains(s))) { var archiveName = $".{Path.DirectorySeparatorChar}output{Path.DirectorySeparatorChar}parser-input-files.tar.bz2"; await _client.DownloadClientAsync(manifest); await Task.WhenAll(fileslist.Select(file => { var rename = file.Contains("NScliData"); var dest = file.Contains("NStcData") ? $".{Path.DirectorySeparatorChar}output{Path.DirectorySeparatorChar}parser{Path.DirectorySeparatorChar}map{Path.DirectorySeparatorChar}" : $".{Path.DirectorySeparatorChar}output{Path.DirectorySeparatorChar}parser{Path.DirectorySeparatorChar}"; var fileInfo = new FileInfo($".{Path.DirectorySeparatorChar}output{Path.DirectorySeparatorChar}{file}"); return(_extractor.ExtractAsync(fileInfo, dest, rename)); })); var directoryOfFilesToBeTarred = $".{Path.DirectorySeparatorChar}output{Path.DirectorySeparatorChar}parser"; var filesInDirectory = Directory.GetFiles(directoryOfFilesToBeTarred, "*.*", SearchOption.AllDirectories); if (File.Exists(archiveName)) { File.Delete(archiveName); } await Task.Delay(10000); { await using var targetStream = new BZip2OutputStream(File.Create(archiveName)); using var tarArchive = TarArchive.CreateOutputTarArchive(targetStream, TarBuffer.DefaultBlockFactor); foreach (var file in filesInDirectory) { var entry = TarEntry.CreateEntryFromFile(file); tarArchive.WriteEntry(entry, true); } } var emptyfile = JsonConvert.SerializeObject(manifest); { await using var stream = new MemoryStream(Encoding.UTF8.GetBytes(emptyfile)); var putRequest = new PutObjectRequest { BucketName = Environment.GetEnvironmentVariable("S3_BUCKET"), Key = "clientmanifest.json", ContentType = "text/json", InputStream = stream }; await client.PutObjectAsync(putRequest); } if (_discord.GetChannel(719772084968095775) is SocketTextChannel channel) { var file = new FileInfo(archiveName); if (file.Length > 8388119) { var send = await channel.SendMessageAsync($"<:altz:699420721088168036><:altz:699420721088168036><:altz:699420721088168036>Parser Too Heavy<:altz:699420721088168036><:altz:699420721088168036><:altz:699420721088168036>\n - Size : {file.Length}"); } else { var alq = string.Concat(Enumerable.Repeat("<:altq:699420721130242159>", 20)); var send = await channel.SendFileAsync(archiveName, $"{alq}\n<:altp:699420720819732651><:altp:699420720819732651><:altp:699420720819732651><:altp:699420720819732651><:altp:699420720819732651><:altp:699420720819732651>PARSER FILES GENERATED<:altp:699420720819732651><:altp:699420720819732651><:altp:699420720819732651><:altp:699420720819732651><:altp:699420720819732651><:altp:699420720819732651>\n{alq}"); } } } }