public static void ExtractWrappedArchive(string filePath, string targetPath) { Inits.EnsureBinaries(); FileInfo ufil = new FileInfo(filePath); if (!ufil.Exists) throw new FileNotFoundException(); using (FileStream fs = ufil.OpenRead()) { using (MemoryStream ms = new MemoryStream()) { using (LzmaDecodeStream dec = new LzmaDecodeStream(fs)) { int byt = 0; while ((byt = dec.ReadByte()) != -1) { ms.WriteByte((byte)byt); } } using (SevenZipExtractor ex = new SevenZipExtractor(ms)) { ex.ExtractFiles( f => { string file = string.Format("{0}{1}{2}", targetPath.TrimEnd(Path.DirectorySeparatorChar), Path.DirectorySeparatorChar, f.ArchiveFileInfo.FileName ); string dir = Path.GetDirectoryName(file); if (!Directory.Exists(dir)) Directory.CreateDirectory(dir); f.ExtractToFile = file; } ); } } } }
public bool unzipFile(string filePath, out string result) { FileInfo fileInfo = new FileInfo(filePath); // Step 1. Fix file fixFile(filePath, fileInfo); // Step 2. Unzip file string outputPath = fileInfo.FullName.Replace(".csv", "_r.csv"); try { using (var input = new FileStream(filePath, FileMode.Open, FileAccess.ReadWrite)) { var decoder = new LzmaDecodeStream(input); using (var output = new FileStream(outputPath, FileMode.Create)) { int bufSize = 24576, count; byte[] buf = new byte[bufSize]; while ((count = decoder.Read(buf, 0, bufSize)) > 0) { output.Write(buf, 0, count); } } decoder.Close(); } } catch(Exception) { result = "[Broken file]\t" + fileInfo.Name; File.Delete(outputPath); return false; } result = "[Unzip file]\t" + fileInfo.Name; return true; }
public static Stream GetFile(string url) { var memoryStream = new MemoryStream(); using (WebClient wc = new WebClient()) { Stream response = wc.OpenRead(url); using (LzmaDecodeStream lzma = new LzmaDecodeStream(response)) { lzma.CopyTo(memoryStream); return memoryStream; } } }
IReadOnlyDictionary<string, IFileFingerprint> LoadBlobsImpl(CancellationToken cancellationToken) { var blobs = new Dictionary<string, IFileFingerprint>(); var needRebuild = false; var blobCount = 0; _fileSequence.Rescan(); var totalSize = 0L; var compressedSize = 0L; foreach (var fileInfo in _fileSequence.Files) { try { fileInfo.Refresh(); if (fileInfo.Length < 5) continue; using (var fileStream = OpenBsonFileForRead(fileInfo)) using (var lzmaDecodeStream = new LzmaDecodeStream(fileStream)) using (var bs = new SequentialReadStream(lzmaDecodeStream)) using (var br = new BsonReader(bs) { DateTimeKindHandling = DateTimeKind.Utc, SupportMultipleContent = true }) { while (br.Read()) { cancellationToken.ThrowIfCancellationRequested(); try { var fileFingerprint = ReadFileFingerprint(br); if (null == fileFingerprint) { needRebuild = true; break; } if (blobs.ContainsKey(fileFingerprint.FullFilePath)) Debug.WriteLine($"Collision for {fileFingerprint.FullFilePath}"); blobs[fileFingerprint.FullFilePath] = fileFingerprint; ++blobCount; } catch (IOException ex) { needRebuild = true; // The entry might or might not be valid. Debug.WriteLine("BsonFileFingerprintStore.LoadBlobsImpl() read failed: " + ex.Message); } } totalSize += bs.Position; compressedSize += fileStream.Length; } } catch (IOException) { needRebuild = true; } catch (LzmaException) { needRebuild = true; } catch (JsonException) { needRebuild = true; } } Debug.WriteLine($"Read {SizeConversion.BytesToMiB(totalSize):F2}MiB bytes from {SizeConversion.BytesToMiB(compressedSize):F2}MiB file"); var count = (double)blobs.Count; Debug.WriteLine($"Average size {totalSize / count:F1} bytes or {compressedSize / count:F1} compressed"); if (blobCount > blobs.Count + 100 + blobs.Count / 8) needRebuild = true; if (needRebuild) { Console.WriteLine("Rebuilding cache files"); RebuildCache(blobs); } return blobs; }