public HashLookupHelper(string indexfile, CompressedFileMRUCache cache, int blocksize, int hashsize) { m_cache = cache; m_blocksize = blocksize; m_hashsize = hashsize; m_indexfile = File.OpenRead(indexfile); // Build index .... var hashes = 0L; string prev = null; m_indexfile.Position = 0; foreach (var n in AllFileLines()) { if (n.Key != prev) { hashes++; prev = n.Key; } } Console.WriteLine("Index file has {0} hashes in total", hashes); var lookuptablesize = Math.Max(1, Math.Min(LOOKUP_TABLE_SIZE, hashes) - 1); var lookupincrements = hashes / lookuptablesize; Console.WriteLine("Building lookup table with {0} entries, giving increments of {1}", lookuptablesize, lookupincrements); prev = null; var prevoff = 0L; var hc = 0L; m_indexfile.Position = 0; foreach (var n in AllFileLines()) { if (n.Key != prev) { if ((hc % lookupincrements) == 0) { m_lookup.Add(n.Key); m_offsets.Add(prevoff); } hc++; prev = n.Key; prevoff = m_indexfile.Position; } } }
public HashLookupHelper(string indexfile, CompressedFileMRUCache cache, int blocksize, int hashsize) { m_cache = cache; m_blocksize = blocksize; m_hashsize = hashsize; m_indexfile = File.OpenRead(indexfile); // Build index .... var hashes = 0L; string prev = null; m_indexfile.Position = 0; foreach(var n in AllFileLines()) { if (n.Key != prev) { hashes++; prev = n.Key; } } Console.WriteLine("Index file has {0} hashes in total", hashes); var lookuptablesize = Math.Max(1, Math.Min(LOOKUP_TABLE_SIZE, hashes) - 1); var lookupincrements = hashes / lookuptablesize; Console.WriteLine("Building lookup table with {0} entries, giving increments of {1}", lookuptablesize, lookupincrements); prev = null; var prevoff = 0L; var hc = 0L; m_indexfile.Position = 0; foreach(var n in AllFileLines()) { if (n.Key != prev) { if ((hc % lookupincrements) == 0) { m_lookup.Add(n.Key); m_offsets.Add(prevoff); } hc++; prev = n.Key; prevoff = m_indexfile.Position; } } }
public static int Run(List <string> args, Dictionary <string, string> options, Library.Utility.IFilter filter) { if (args.Count != 2 && args.Count != 3) { Console.WriteLine("Invalid argument count ({0} expected 2 or 3): {1}{2}", args.Count, Environment.NewLine, string.Join(Environment.NewLine, args)); return(100); } var folder = Path.GetFullPath(args[1]); if (!Directory.Exists(folder)) { Console.WriteLine("Folder not found: {0}", folder); return(100); } Directory.SetCurrentDirectory(folder); string targetpath; options.TryGetValue("targetpath", out targetpath); string ixfile; options.TryGetValue("indexfile", out ixfile); if (string.IsNullOrWhiteSpace(ixfile)) { ixfile = "index.txt"; } ixfile = Path.GetFullPath(ixfile); if (!File.Exists(ixfile)) { Console.WriteLine("Index file not found, perhaps you need to run the index command?"); return(100); } Console.Write("Sorting index file ..."); Index.SortFile(ixfile, ixfile); Console.WriteLine(" done!"); string filelist; if (args.Count == 2) { var time = List.ParseListFiles(folder).First(); filelist = time.Value; Console.WriteLine("Using set 0 with timestamp {0}", time.Key.ToLocalTime()); } else { filelist = List.SelectListFile(args[2], folder); } Library.Main.Volumes.VolumeReaderBase.UpdateOptionsFromManifest(Path.GetExtension(filelist).Trim('.'), filelist, new Duplicati.Library.Main.Options(options)); string blocksize_str; options.TryGetValue("blocksize", out blocksize_str); string blockhash_str; options.TryGetValue("block-hash-algorithm", out blockhash_str); string filehash_str; options.TryGetValue("block-hash-algorithm", out filehash_str); long blocksize = string.IsNullOrWhiteSpace(blocksize_str) ? 0 : Library.Utility.Sizeparser.ParseSize(blocksize_str); if (blocksize <= 0) { Console.WriteLine("Invalid blocksize: {0}, try setting --blocksize manually"); return(100); } var blockhasher = string.IsNullOrWhiteSpace(blockhash_str) ? null : System.Security.Cryptography.HashAlgorithm.Create(blockhash_str); var filehasher = string.IsNullOrWhiteSpace(filehash_str) ? null : System.Security.Cryptography.HashAlgorithm.Create(filehash_str); if (blockhasher == null) { throw new Duplicati.Library.Interface.UserInformationException(string.Format("Block hash algorithm not valid: {0}", blockhash_str)); } if (filehasher == null) { throw new Duplicati.Library.Interface.UserInformationException(string.Format("File hash algorithm not valid: {0}", filehash_str)); } var hashesprblock = blocksize / (blockhasher.HashSize / 8); using (var mru = new CompressedFileMRUCache(options)) { Console.WriteLine("Building lookup table for file hashes"); var lookup = new HashLookupHelper(ixfile, mru, (int)blocksize, blockhasher.HashSize / 8); var filecount = 0L; string largestprefix = null; string[] largestprefixparts = null; if (!string.IsNullOrWhiteSpace(targetpath)) { Console.WriteLine("Computing restore path"); } foreach (var f in List.EnumerateFilesInDList(filelist, filter, options)) { if (largestprefix == null) { largestprefix = f.Path; largestprefixparts = largestprefix.Split(new char[] { Path.DirectorySeparatorChar }); } else if (largestprefix.Length > 1) { var parts = f.Path.Split(new char[] { Path.DirectorySeparatorChar }); var ni = 0; for (; ni < Math.Min(parts.Length, largestprefixparts.Length); ni++) { if (!Library.Utility.Utility.ClientFilenameStringComparer.Equals(parts[ni], largestprefixparts[ni])) { break; } } if (ni != largestprefixparts.Length) { if (ni == 0) { largestprefixparts = new string[0]; largestprefix = string.Empty; } else { Array.Resize(ref largestprefixparts, ni - 1); largestprefix = string.Join(Path.DirectorySeparatorChar.ToString(), largestprefixparts); } } } filecount++; } Console.WriteLine("Restoring {0} files to {1}", filecount, string.IsNullOrWhiteSpace(targetpath) ? "original position" : targetpath); if (Library.Utility.Utility.IsClientLinux || largestprefix.Length > 0) { largestprefix = Library.Utility.Utility.AppendDirSeparator(largestprefix); } if (!string.IsNullOrEmpty(largestprefix)) { Console.WriteLine("Removing common prefix {0} from files", largestprefix); } var i = 0L; var errors = 0L; foreach (var f in List.EnumerateFilesInDList(filelist, filter, options)) { try { var targetfile = MapToRestorePath(f.Path, largestprefix, targetpath); if (!Directory.Exists(Path.GetDirectoryName(targetfile))) { Directory.CreateDirectory(Path.GetDirectoryName(targetfile)); } Console.Write("{0}: {1} ({2})", i, targetfile, Library.Utility.Utility.FormatSizeString(f.Size)); using (var tf = new Library.Utility.TempFile()) { using (var sw = File.OpenWrite(tf)) { if (f.BlocklistHashes == null) { lookup.WriteHash(sw, f.Hash); } else { var blhi = 0L; foreach (var blh in f.BlocklistHashes) { Console.Write(" {0}", blhi); var blockhashoffset = blhi * hashesprblock * blocksize; try { var bi = 0; foreach (var h in lookup.ReadBlocklistHashes(blh)) { try { sw.Position = blockhashoffset + (bi * blocksize); lookup.WriteHash(sw, h); } catch (Exception ex) { Console.WriteLine("Failed to read hash: {0}{1}{2}", h, Environment.NewLine, ex.ToString()); } bi++; } } catch (Exception ex) { Console.WriteLine("Failed to read Blocklist hash: {0}{1}{2}", blh, Environment.NewLine, ex.ToString()); } blhi++; } } } string fh; using (var fs = File.OpenRead(tf)) fh = Convert.ToBase64String(filehasher.ComputeHash(fs)); if (fh == f.Hash) { Console.WriteLine(" done!"); File.Copy(tf, targetfile, true); } else { Console.Write(" - Restored file hash mismatch"); if (File.Exists(targetfile)) { Console.WriteLine(" - not overwriting existing file: {0}", targetfile); } else { Console.WriteLine(" - restoring file in damaged condition"); } } } } catch (Exception ex) { Console.WriteLine(" error: {0}", ex.ToString()); errors++; } i++; } } return(0); }
public static int Run(List<string> args, Dictionary<string, string> options, Library.Utility.IFilter filter) { if (args.Count != 2 && args.Count != 3) { Console.WriteLine("Invalid argument count ({0} expected 2 or 3): {1}{2}", args.Count, Environment.NewLine, string.Join(Environment.NewLine, args)); return 100; } var folder = Path.GetFullPath(args[1]); if (!Directory.Exists(folder)) { Console.WriteLine("Folder not found: {0}", folder); return 100; } Directory.SetCurrentDirectory(folder); string targetpath; options.TryGetValue("targetpath", out targetpath); string ixfile; options.TryGetValue("indexfile", out ixfile); if (string.IsNullOrWhiteSpace(ixfile)) ixfile = "index.txt"; ixfile = Path.GetFullPath(ixfile); if (!File.Exists(ixfile)) { Console.WriteLine("Index file not found, perhaps you need to run the index command?"); return 100; } Console.Write("Sorting index file ..."); Index.SortFile(ixfile, ixfile); Console.WriteLine(" done!"); string filelist; if (args.Count == 2) { var time = List.ParseListFiles(folder).First(); filelist = time.Value; Console.WriteLine("Using set 0 with timestamp {0}", time.Key.ToLocalTime()); } else { filelist = List.SelectListFile(args[2], folder); } Library.Main.Volumes.VolumeReaderBase.UpdateOptionsFromManifest(Path.GetExtension(filelist).Trim('.'), filelist, new Duplicati.Library.Main.Options(options)); string blocksize_str; options.TryGetValue("blocksize", out blocksize_str); string blockhash_str; options.TryGetValue("block-hash-algorithm", out blockhash_str); string filehash_str; options.TryGetValue("block-hash-algorithm", out filehash_str); long blocksize = string.IsNullOrWhiteSpace(blocksize_str) ? 0 : Library.Utility.Sizeparser.ParseSize(blocksize_str); if (blocksize <= 0) { Console.WriteLine("Invalid blocksize: {0}, try setting --blocksize manually"); return 100; } var blockhasher = string.IsNullOrWhiteSpace(blockhash_str) ? null : System.Security.Cryptography.HashAlgorithm.Create(blockhash_str); var filehasher = string.IsNullOrWhiteSpace(filehash_str) ? null : System.Security.Cryptography.HashAlgorithm.Create(filehash_str); if (blockhasher == null) throw new Exception(string.Format("Block hash algorithm not valid: {0}", blockhash_str)); if (filehasher == null) throw new Exception(string.Format("File hash algorithm not valid: {0}", filehash_str)); var hashesprblock = blocksize / (blockhasher.HashSize / 8); using(var mru = new CompressedFileMRUCache(options)) { Console.WriteLine("Building lookup table for file hashes"); var lookup = new HashLookupHelper(ixfile, mru, (int)blocksize, blockhasher.HashSize / 8); var filecount = 0L; string largestprefix = null; string[] largestprefixparts = null; if (!string.IsNullOrWhiteSpace(targetpath)) Console.WriteLine("Computing restore path"); foreach(var f in List.EnumerateFilesInDList(filelist, filter, options)) { if (largestprefix == null) { largestprefix = f.Path; largestprefixparts = largestprefix.Split(new char[] { Path.DirectorySeparatorChar }); } else if (largestprefix.Length > 1) { var parts = f.Path.Split(new char[] { Path.DirectorySeparatorChar }); var ni = 0; for(; ni < Math.Min(parts.Length, largestprefixparts.Length); ni++) if (!Library.Utility.Utility.ClientFilenameStringComparer.Equals(parts[ni], largestprefixparts[ni])) break; if (ni != largestprefixparts.Length) { if (ni == 0) { largestprefixparts = new string[0]; largestprefix = string.Empty; } else { Array.Resize(ref largestprefixparts, ni - 1); largestprefix = string.Join(Path.DirectorySeparatorChar.ToString(), largestprefixparts); } } } filecount++; } Console.WriteLine("Restoring {0} files to {1}", filecount, string.IsNullOrWhiteSpace(targetpath) ? "original position" : targetpath); if (Library.Utility.Utility.IsClientLinux || largestprefix.Length > 0) largestprefix = Library.Utility.Utility.AppendDirSeparator(largestprefix); if (!string.IsNullOrEmpty(largestprefix)) Console.WriteLine("Removing common prefix {0} from files", largestprefix); var i = 0L; var errors = 0L; foreach(var f in List.EnumerateFilesInDList(filelist, filter, options)) { try { var targetfile = MapToRestorePath(f.Path, largestprefix, targetpath); if (!Directory.Exists(Path.GetDirectoryName(targetfile))) Directory.CreateDirectory(Path.GetDirectoryName(targetfile)); Console.Write("{0}: {1} ({2})", i, targetfile, Library.Utility.Utility.FormatSizeString(f.Size)); using(var tf = new Library.Utility.TempFile()) { using(var sw = File.OpenWrite(tf)) { if (f.BlocklistHashes == null) { lookup.WriteHash(sw, f.Hash); } else { var blhi = 0L; foreach(var blh in f.BlocklistHashes) { Console.Write(" {0}", blhi); var blockhashoffset = blhi * hashesprblock * blocksize; try { var bi = 0; foreach(var h in lookup.ReadBlocklistHashes(blh)) { try { sw.Position = blockhashoffset + (bi * blocksize); lookup.WriteHash(sw, h); } catch(Exception ex) { Console.WriteLine("Failed to read hash: {0}{1}{2}", h, Environment.NewLine, ex.ToString()); } bi++; } } catch (Exception ex) { Console.WriteLine("Failed to read Blocklist hash: {0}{1}{2}", blh, Environment.NewLine, ex.ToString()); } blhi++; } } } string fh; using(var fs = File.OpenRead(tf)) fh = Convert.ToBase64String(filehasher.ComputeHash(fs)); if (fh == f.Hash) { Console.WriteLine(" done!"); File.Copy(tf, targetfile, true); } else { Console.Write(" - Restored file hash mismatch"); if (File.Exists(targetfile)) Console.WriteLine(" - not overwriting existing file: {0}", targetfile); else Console.WriteLine(" - restoring file in damaged condition"); } } } catch (Exception ex) { Console.WriteLine(" error: {0}", ex.ToString()); errors++; } i++; } } return 0; }