public void Run(IEnumerable<string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(System.IO.File.Exists(m_options.Dbpath) ? m_options.Dbpath : (string)tmpdb, "ListControlFiles")) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(filterstrings), compositefilter); try { var filteredList = ListFilesHandler.ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) throw new Exception("No filesets found on remote target"); Exception lastEx = new Exception("No suitable files found on remote target"); foreach(var fileversion in filteredList) try { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return; var file = fileversion.Value.File; long size; string hash; RemoteVolumeType type; RemoteVolumeState state; if (!db.GetRemoteVolume(file.Name, out hash, out size, out type, out state)) size = file.Size; var files = new List<Library.Interface.IListResultFile>(); using (var tmpfile = backend.Get(file.Name, size, hash)) using (var tmp = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(file.Name), tmpfile, m_options)) foreach (var cf in tmp.ControlFiles) if (Library.Utility.FilterExpression.Matches(filter, cf.Key)) files.Add(new ListResultFile(cf.Key, null)); m_result.SetResult(new Library.Interface.IListResultFileset[] { new ListResultFileset(fileversion.Key, fileversion.Value.Time, -1, -1) }, files); lastEx = null; break; } catch(Exception ex) { lastEx = ex; if (ex is System.Threading.ThreadAbortException) throw; } if (lastEx != null) throw lastEx; } finally { backend.WaitForComplete(db, null); } } } }
/// <summary> /// Tests the volume by examining the internal contents /// </summary> /// <param name="vol">The remote volume being examined</param> /// <param name="tf">The path to the downloaded copy of the file</param> /// <param name="sample_percent">A value between 0 and 1 that indicates how many blocks are tested in a dblock file</param> public static KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > > TestVolumeInternals(LocalTestDatabase db, IRemoteVolume vol, string tf, Options options, ILogWriter log, double sample_percent) { var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(options.BlockHashAlgorithm); if (blockhasher == null) { throw new Exception(Strings.Common.InvalidHashAlgorithm(options.BlockHashAlgorithm)); } if (!blockhasher.CanReuseTransform) { throw new Exception(Strings.Common.InvalidCryptoSystem(options.BlockHashAlgorithm)); } var hashsize = blockhasher.HashSize / 8; var parsedInfo = Volumes.VolumeBase.ParseFilename(vol.Name); sample_percent = Math.Min(1, Math.Max(sample_percent, 0.01)); if (parsedInfo.FileType == RemoteVolumeType.Files) { //Compare with db and see if all files are accounted for // with correct file hashes and blocklist hashes using (var fl = db.CreateFilelist(vol.Name)) { using (var rd = new Volumes.FilesetVolumeReader(parsedInfo.CompressionModule, tf, options)) foreach (var f in rd.Files) { fl.Add(f.Path, f.Size, f.Hash, f.Metasize, f.Metahash, f.BlocklistHashes, f.Type, f.Time); } return(new KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > >(vol.Name, fl.Compare().ToList())); } } else if (parsedInfo.FileType == RemoteVolumeType.Index) { var blocklinks = new List <Tuple <string, string, long> >(); IEnumerable <KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> > combined = new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0]; //Compare with db and see that all hashes and volumes are listed using (var rd = new Volumes.IndexVolumeReader(parsedInfo.CompressionModule, tf, options, hashsize)) foreach (var v in rd.Volumes) { blocklinks.Add(new Tuple <string, string, long>(v.Filename, v.Hash, v.Length)); using (var bl = db.CreateBlocklist(v.Filename)) { foreach (var h in v.Blocks) { bl.AddBlock(h.Key, h.Value); } combined = combined.Union(bl.Compare().ToArray()); } } using (var il = db.CreateIndexlist(vol.Name)) { foreach (var t in blocklinks) { il.AddBlockLink(t.Item1, t.Item2, t.Item3); } combined = combined.Union(il.Compare()).ToList(); } return(new KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > >(vol.Name, combined.ToList())); } else if (parsedInfo.FileType == RemoteVolumeType.Blocks) { using (var bl = db.CreateBlocklist(vol.Name)) using (var rd = new Volumes.BlockVolumeReader(parsedInfo.CompressionModule, tf, options)) { //Verify that all blocks are in the file foreach (var b in rd.Blocks) { bl.AddBlock(b.Key, b.Value); } //Select random blocks and verify their hashes match the filename and size var hashsamples = new List <KeyValuePair <string, long> >(rd.Blocks); var sampleCount = Math.Min(Math.Max(0, (int)(hashsamples.Count * sample_percent)), hashsamples.Count - 1); var rnd = new Random(); while (hashsamples.Count > sampleCount) { hashsamples.RemoveAt(rnd.Next(hashsamples.Count)); } var blockbuffer = new byte[options.Blocksize]; var changes = new List <KeyValuePair <Library.Interface.TestEntryStatus, string> >(); foreach (var s in hashsamples) { var size = rd.ReadBlock(s.Key, blockbuffer); if (size != s.Value) { changes.Add(new KeyValuePair <Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key)); } else { var hash = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size)); if (hash != s.Key) { changes.Add(new KeyValuePair <Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key)); } } } return(new KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > >(vol.Name, changes.Union(bl.Compare().ToList()))); } } log.AddWarning(string.Format("Unexpected file type {0} for {1}", parsedInfo.FileType, vol.Name), null); return(new KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > >(vol.Name, null)); }
public void Run(IEnumerable <string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { var parsedfilter = new Library.Utility.FilterExpression(filterstrings); var simpleList = !(parsedfilter.Type == Library.Utility.FilterType.Simple || m_options.AllVersions); var filter = Library.Utility.JoinedFilterExpression.Join(parsedfilter, compositefilter); //Use a speedy local query if (!m_options.NoLocalDb && System.IO.File.Exists(m_options.Dbpath)) { using (var db = new Database.LocalListDatabase(m_options.Dbpath)) { m_result.SetDatabase(db); using (var filesets = db.SelectFileSets(m_options.Time, m_options.Version)) { if (parsedfilter.Type != Library.Utility.FilterType.Empty) { if (simpleList || (m_options.ListFolderContents && !m_options.AllVersions)) { filesets.TakeFirst(); } } IEnumerable <Database.LocalListDatabase.IFileversion> files; if (m_options.ListFolderContents) { files = filesets.SelectFolderContents(filter); } else if (m_options.ListPrefixOnly) { files = filesets.GetLargestPrefix(filter); } else if (parsedfilter.Type == Duplicati.Library.Utility.FilterType.Empty) { files = null; } else { files = filesets.SelectFiles(filter); } if (m_options.ListSetsOnly) { m_result.SetResult( filesets.QuickSets.Select(x => new ListResultFileset(x.Version, x.Time, x.FileCount, x.FileSizes)).ToArray(), null ); } else { m_result.SetResult( filesets.Sets.Select(x => new ListResultFileset(x.Version, x.Time, x.FileCount, x.FileSizes)).ToArray(), files == null ? null : (from n in files select(Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Path, n.Sizes.ToArray()))) .ToArray() ); } return; } } } m_result.AddMessage("No local database, accessing remote store"); //TODO: Add prefix and foldercontents if (m_options.ListFolderContents) { throw new Exception("Listing folder contents is not supported without a local database, consider using the \"repair\" option to rebuild the database."); } else if (m_options.ListPrefixOnly) { throw new Exception("Listing prefixes is not supported without a local database, consider using the \"repair\" option to rebuild the database."); } // Otherwise, grab info from remote location using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(tmpdb, "List", true)) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filteredList = ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) { throw new Exception("No filesets found on remote target"); } var numberSeq = CreateResultSequence(filteredList); if (parsedfilter.Type == Library.Utility.FilterType.Empty) { m_result.SetResult(numberSeq, null); m_result.EncryptedFiles = filteredList.Any(x => !string.IsNullOrWhiteSpace(x.Value.EncryptionModule)); return; } var firstEntry = filteredList[0].Value; filteredList.RemoveAt(0); Dictionary <string, List <long> > res; if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { return; } using (var tmpfile = backend.Get(firstEntry.File.Name, firstEntry.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(firstEntry.File.Name), tmpfile, m_options)) if (simpleList) { m_result.SetResult( numberSeq.Take(1), (from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) orderby n.Path select new ListResultFile(n.Path, new long[] { n.Size })) .ToArray() ); return; } else { res = rd.Files .Where(x => Library.Utility.FilterExpression.Matches(filter, x.Path)) .ToDictionary( x => x.Path, y => { var lst = new List <long>(); lst.Add(y.Size); return(lst); }, Library.Utility.Utility.ClientFilenameStringComparer ); } long flindex = 1; foreach (var flentry in filteredList) { using (var tmpfile = backend.Get(flentry.Value.File.Name, flentry.Value.File == null ? -1 : flentry.Value.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(flentry.Value.CompressionModule, tmpfile, m_options)) { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { return; } foreach (var p in from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) select n) { List <long> lst; if (!res.TryGetValue(p.Path, out lst)) { lst = new List <long>(); res[p.Path] = lst; for (var i = 0; i < flindex; i++) { lst.Add(-1); } } lst.Add(p.Size); } foreach (var n in from i in res where i.Value.Count < flindex + 1 select i) { n.Value.Add(-1); } flindex++; } } m_result.SetResult( numberSeq, from n in res orderby n.Key select(Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Key, n.Value)) ); } }
public void Run(IEnumerable <string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(System.IO.File.Exists(m_options.Dbpath) ? m_options.Dbpath : (string)tmpdb, "ListControlFiles", true)) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(filterstrings), compositefilter); try { var filteredList = ListFilesHandler.ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) { throw new Exception("No filesets found on remote target"); } Exception lastEx = new Exception("No suitable files found on remote target"); foreach (var fileversion in filteredList) { try { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { return; } var file = fileversion.Value.File; var entry = db.GetRemoteVolume(file.Name); var files = new List <Library.Interface.IListResultFile>(); using (var tmpfile = backend.Get(file.Name, entry.Size < 0 ? file.Size : entry.Size, entry.Hash)) using (var tmp = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(file.Name), tmpfile, m_options)) foreach (var cf in tmp.ControlFiles) { if (Library.Utility.FilterExpression.Matches(filter, cf.Key)) { files.Add(new ListResultFile(cf.Key, null)); } } m_result.SetResult(new Library.Interface.IListResultFileset[] { new ListResultFileset(fileversion.Key, LocalDatabase.BackupType.PARTIAL_BACKUP, fileversion.Value.Time, -1, -1) }, files); lastEx = null; break; } catch (Exception ex) { lastEx = ex; if (ex is System.Threading.ThreadAbortException) { throw; } } } if (lastEx != null) { throw lastEx; } } finally { backend.WaitForComplete(db, null); } } }
public void Run(IEnumerable<string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { if (string.IsNullOrEmpty(m_options.Restorepath)) throw new Exception("Cannot restore control files without --restore-path"); if (!System.IO.Directory.Exists(m_options.Restorepath)) System.IO.Directory.CreateDirectory(m_options.Restorepath); using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(System.IO.File.Exists(m_options.Dbpath) ? m_options.Dbpath : (string)tmpdb, "RestoreControlFiles")) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(filterstrings), compositefilter); try { var filteredList = ListFilesHandler.ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) throw new Exception("No filesets found on remote target"); Exception lastEx = new Exception("No suitable files found on remote target"); foreach(var fileversion in filteredList) try { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { backend.WaitForComplete(db, null); return; } var file = fileversion.Value.File; long size; string hash; RemoteVolumeType type; RemoteVolumeState state; if (!db.GetRemoteVolume(file.Name, out hash, out size, out type, out state)) size = file.Size; var res = new List<string>(); using (var tmpfile = backend.Get(file.Name, size, hash)) using (var tmp = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(file.Name), tmpfile, m_options)) foreach (var cf in tmp.ControlFiles) if (Library.Utility.FilterExpression.Matches(filter, cf.Key)) { var targetpath = System.IO.Path.Combine(m_options.Restorepath, cf.Key); using (var ts = System.IO.File.Create(targetpath)) Library.Utility.Utility.CopyStream(cf.Value, ts); res.Add(targetpath); } m_result.SetResult(res); lastEx = null; break; } catch(Exception ex) { lastEx = ex; if (ex is System.Threading.ThreadAbortException) throw; } if (lastEx != null) throw lastEx; } finally { backend.WaitForComplete(db, null); } db.WriteResults(); } }
/// <summary> /// Tests the volume by examining the internal contents /// </summary> /// <param name="vol">The remote volume being examined</param> /// <param name="tf">The path to the downloaded copy of the file</param> /// <param name="sample_percent">A value between 0 and 1 that indicates how many blocks are tested in a dblock file</param> public static KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>> TestVolumeInternals(LocalTestDatabase db, IRemoteVolume vol, string tf, Options options, ILogWriter log, double sample_percent = 0.2) { var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(options.BlockHashAlgorithm); if (blockhasher == null) throw new Exception(Strings.Foresthash.InvalidHashAlgorithm(options.BlockHashAlgorithm)); if (!blockhasher.CanReuseTransform) throw new Exception(Strings.Foresthash.InvalidCryptoSystem(options.BlockHashAlgorithm)); var hashsize = blockhasher.HashSize / 8; var parsedInfo = Volumes.VolumeBase.ParseFilename(vol.Name); sample_percent = Math.Min(1, Math.Max(sample_percent, 0.01)); if (parsedInfo.FileType == RemoteVolumeType.Files) { //Compare with db and see if all files are accounted for // with correct file hashes and blocklist hashes using(var fl = db.CreateFilelist(vol.Name)) { using(var rd = new Volumes.FilesetVolumeReader(parsedInfo.CompressionModule, tf, options)) foreach(var f in rd.Files) fl.Add(f.Path, f.Size, f.Hash, f.Metasize, f.Metahash, f.BlocklistHashes, f.Type, f.Time); return new KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>>(vol.Name, fl.Compare().ToList()); } } else if (parsedInfo.FileType == RemoteVolumeType.Index) { var blocklinks = new List<Tuple<string, string, long>>(); IEnumerable<KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>> combined = new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>[0]; //Compare with db and see that all hashes and volumes are listed using(var rd = new Volumes.IndexVolumeReader(parsedInfo.CompressionModule, tf, options, hashsize)) foreach(var v in rd.Volumes) { blocklinks.Add(new Tuple<string, string, long>(v.Filename, v.Hash, v.Length)); using(var bl = db.CreateBlocklist(v.Filename)) { foreach(var h in v.Blocks) bl.AddBlock(h.Key, h.Value); combined = combined.Union(bl.Compare().ToArray()); } } using(var il = db.CreateIndexlist(vol.Name)) { foreach(var t in blocklinks) il.AddBlockLink(t.Item1, t.Item2, t.Item3); combined = combined.Union(il.Compare()).ToList(); } return new KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>>(vol.Name, combined.ToList()); } else if (parsedInfo.FileType == RemoteVolumeType.Blocks) { using(var bl = db.CreateBlocklist(vol.Name)) using(var rd = new Volumes.BlockVolumeReader(parsedInfo.CompressionModule, tf, options)) { //Verify that all blocks are in the file foreach(var b in rd.Blocks) bl.AddBlock(b.Key, b.Value); //Select random blocks and verify their hashes match the filename and size var hashsamples = new List<KeyValuePair<string, long>>(rd.Blocks); var sampleCount = Math.Min(Math.Max(0, (int)(hashsamples.Count * sample_percent)), hashsamples.Count - 1); var rnd = new Random(); while (hashsamples.Count > sampleCount) hashsamples.RemoveAt(rnd.Next(hashsamples.Count)); var blockbuffer = new byte[options.Blocksize]; var changes = new List<KeyValuePair<Library.Interface.TestEntryStatus, string>>(); foreach(var s in hashsamples) { var size = rd.ReadBlock(s.Key, blockbuffer); if (size != s.Value) changes.Add(new KeyValuePair<Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key)); else { var hash = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size)); if (hash != s.Key) changes.Add(new KeyValuePair<Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key)); } } return new KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>>(vol.Name, changes.Union(bl.Compare().ToList())); } } log.AddWarning(string.Format("Unexpected file type {0} for {1}", parsedInfo.FileType, vol.Name), null); return new KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>>(vol.Name, null); }
public void DoRun(long samples, LocalTestDatabase db, BackendManager backend) { var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(m_options.BlockHashAlgorithm); if (blockhasher == null) { throw new Exception(string.Format(Strings.Foresthash.InvalidHashAlgorithm, m_options.BlockHashAlgorithm)); } if (!blockhasher.CanReuseTransform) { throw new Exception(string.Format(Strings.Foresthash.InvalidCryptoSystem, m_options.BlockHashAlgorithm)); } var hashsize = blockhasher.HashSize / 8; var files = db.SelectTestTargets(samples, m_options).ToList(); if (m_options.FullRemoteVerification) { foreach (var vol in new AsyncDownloader(files, backend)) { var parsedInfo = Volumes.VolumeBase.ParseFilename(vol.Name); try { if (m_results.TaskControlRendevouz() == TaskControlState.Stop) { backend.WaitForComplete(db, null); return; } using (var tf = vol.TempFile) { if (parsedInfo.FileType == RemoteVolumeType.Files) { //Compare with db and see if all files are accounted for // with correct file hashes and blocklist hashes using (var fl = db.CreateFilelist(vol.Name)) { using (var rd = new Volumes.FilesetVolumeReader(parsedInfo.CompressionModule, tf, m_options)) foreach (var f in rd.Files) { fl.Add(f.Path, f.Size, f.Hash, f.Metasize, f.Metahash, f.BlocklistHashes, f.Type, f.Time); } m_results.AddResult(vol.Name, fl.Compare().ToList()); } } else if (parsedInfo.FileType == RemoteVolumeType.Index) { var blocklinks = new List <Tuple <string, string, long> >(); IEnumerable <KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> > combined = new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0]; //Compare with db and see that all hashes and volumes are listed using (var rd = new Volumes.IndexVolumeReader(parsedInfo.CompressionModule, tf, m_options, hashsize)) foreach (var v in rd.Volumes) { blocklinks.Add(new Tuple <string, string, long>(v.Filename, v.Hash, v.Length)); using (var bl = db.CreateBlocklist(v.Filename)) { foreach (var h in v.Blocks) { bl.AddBlock(h.Key, h.Value); } combined = combined.Union(bl.Compare().ToArray()); } } using (var il = db.CreateIndexlist(vol.Name)) { foreach (var t in blocklinks) { il.AddBlockLink(t.Item1, t.Item2, t.Item3); } combined = combined.Union(il.Compare()).ToList(); } m_results.AddResult(vol.Name, combined.ToList()); } else if (parsedInfo.FileType == RemoteVolumeType.Blocks) { using (var bl = db.CreateBlocklist(vol.Name)) using (var rd = new Volumes.BlockVolumeReader(parsedInfo.CompressionModule, tf, m_options)) { //Verify that all blocks are in the file foreach (var b in rd.Blocks) { bl.AddBlock(b.Key, b.Value); } //Select 20% random blocks and verify their hashes match the filename and size var hashsamples = new List <KeyValuePair <string, long> >(rd.Blocks); var sampleCount = Math.Min(Math.Max(0, (int)(hashsamples.Count * 0.2)), hashsamples.Count - 1); var rnd = new Random(); while (hashsamples.Count > sampleCount) { hashsamples.RemoveAt(rnd.Next(hashsamples.Count)); } var blockbuffer = new byte[m_options.Blocksize]; var changes = new List <KeyValuePair <Library.Interface.TestEntryStatus, string> >(); foreach (var s in hashsamples) { var size = rd.ReadBlock(s.Key, blockbuffer); if (size != s.Value) { changes.Add(new KeyValuePair <Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key)); } else { var hash = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size)); if (hash != s.Key) { changes.Add(new KeyValuePair <Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key)); } } } m_results.AddResult(vol.Name, changes.Union(bl.Compare().ToList())); } } } db.UpdateVerificationCount(vol.Name); } catch (Exception ex) { m_results.AddResult(vol.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) }); m_results.AddError(string.Format("Failed to process file {0}", vol.Name), ex); if (ex is System.Threading.ThreadAbortException) { throw; } } } } else { foreach (var f in files) { try { if (m_results.TaskControlRendevouz() == TaskControlState.Stop) { return; } backend.GetForTesting(f.Name, f.Size, f.Hash); db.UpdateVerificationCount(f.Name); m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0]); } catch (Exception ex) { m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) }); m_results.AddError(string.Format("Failed to process file {0}", f.Name), ex); if (ex is System.Threading.ThreadAbortException) { throw; } } } } }
public void Run(string baseVersion, string compareVersion, IEnumerable <string> filterstrings = null, Library.Utility.IFilter compositefilter = null, Action <IListChangesResults, IEnumerable <Tuple <Library.Interface.ListChangesChangeType, Library.Interface.ListChangesElementType, string> > > callback = null) { var filter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(filterstrings), compositefilter); var useLocalDb = !m_options.NoLocalDb && System.IO.File.Exists(m_options.Dbpath); baseVersion = string.IsNullOrEmpty(baseVersion) ? "1" : baseVersion; compareVersion = string.IsNullOrEmpty(compareVersion) ? "0" : compareVersion; long baseVersionIndex = -1; long compareVersionIndex = -1; DateTime baseVersionTime = new DateTime(0); DateTime compareVersionTime = new DateTime(0); using (var tmpdb = useLocalDb ? null : new Library.Utility.TempFile()) using (var db = new Database.LocalListChangesDatabase(useLocalDb ? m_options.Dbpath : (string)tmpdb)) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) using (var storageKeeper = db.CreateStorageHelper()) { m_result.SetDatabase(db); if (useLocalDb) { var dbtimes = db.FilesetTimes.ToList(); if (dbtimes.Count < 2) { throw new UserInformationException(string.Format("Need at least two backups to show differences, database contains {0} backups", dbtimes.Count), "NeedTwoBackupsToStartDiff"); } long baseVersionId; long compareVersionId; var times = dbtimes.Zip(Enumerable.Range(0, dbtimes.Count), (a, b) => new Tuple <long, DateTime, long>(b, a.Value, a.Key)).ToList(); var bt = SelectTime(baseVersion, times, out baseVersionIndex, out baseVersionTime, out baseVersionId); times.Remove(bt); SelectTime(compareVersion, times, out compareVersionIndex, out compareVersionTime, out compareVersionId); storageKeeper.AddFromDb(baseVersionId, false, filter); storageKeeper.AddFromDb(compareVersionId, true, filter); } else { Logging.Log.WriteInformationMessage(LOGTAG, "NoLocalDatabase", "No local database, accessing remote store"); var parsedlist = (from n in backend.List() let p = Volumes.VolumeBase.ParseFilename(n) where p != null && p.FileType == RemoteVolumeType.Files orderby p.Time descending select p).ToArray(); var numberedList = parsedlist.Zip(Enumerable.Range(0, parsedlist.Length), (a, b) => new Tuple <long, DateTime, Volumes.IParsedVolume>(b, a.Time, a)).ToList(); if (numberedList.Count < 2) { throw new UserInformationException(string.Format("Need at least two backups to show differences, database contains {0} backups", numberedList.Count), "NeedTwoBackupsToStartDiff"); } Volumes.IParsedVolume baseFile; Volumes.IParsedVolume compareFile; var bt = SelectTime(baseVersion, numberedList, out baseVersionIndex, out baseVersionTime, out baseFile); numberedList.Remove(bt); SelectTime(compareVersion, numberedList, out compareVersionIndex, out compareVersionTime, out compareFile); Func <FilelistEntryType, Library.Interface.ListChangesElementType> conv = (x) => { switch (x) { case FilelistEntryType.File: return(Library.Interface.ListChangesElementType.File); case FilelistEntryType.Folder: return(Library.Interface.ListChangesElementType.Folder); case FilelistEntryType.Symlink: return(Library.Interface.ListChangesElementType.Symlink); default: return((Library.Interface.ListChangesElementType)(-1)); } }; if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { return; } using (var tmpfile = backend.Get(baseFile.File.Name, baseFile.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(baseFile.File.Name), tmpfile, m_options)) foreach (var f in rd.Files) { if (Library.Utility.FilterExpression.Matches(filter, f.Path)) { storageKeeper.AddElement(f.Path, f.Hash, f.Metahash, f.Size, conv(f.Type), false); } } if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { return; } using (var tmpfile = backend.Get(compareFile.File.Name, compareFile.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(compareFile.File.Name), tmpfile, m_options)) foreach (var f in rd.Files) { if (Library.Utility.FilterExpression.Matches(filter, f.Path)) { storageKeeper.AddElement(f.Path, f.Hash, f.Metahash, f.Size, conv(f.Type), true); } } } var changes = storageKeeper.CreateChangeCountReport(); var sizes = storageKeeper.CreateChangeSizeReport(); var lst = (m_options.FullResult || callback != null) ? (from n in storageKeeper.CreateChangedFileReport() select n) : null; m_result.SetResult( baseVersionTime, baseVersionIndex, compareVersionTime, compareVersionIndex, changes.AddedFolders, changes.AddedSymlinks, changes.AddedFiles, changes.DeletedFolders, changes.DeletedSymlinks, changes.DeletedFiles, changes.ModifiedFolders, changes.ModifiedSymlinks, changes.ModifiedFiles, sizes.AddedSize, sizes.DeletedSize, sizes.PreviousSize, sizes.CurrentSize, (lst == null || callback == null) ? null : lst.ToArray() ); if (callback != null) { callback(m_result, lst); } return; } }
public void Run(IEnumerable<string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { var parsedfilter = new Library.Utility.FilterExpression(filterstrings); var simpleList = !(parsedfilter.Type == Library.Utility.FilterType.Simple || m_options.AllVersions); var filter = Library.Utility.JoinedFilterExpression.Join(parsedfilter, compositefilter); //Use a speedy local query if (!m_options.NoLocalDb && System.IO.File.Exists(m_options.Dbpath)) using(var db = new Database.LocalListDatabase(m_options.Dbpath)) { m_result.SetDatabase(db); using(var filesets = db.SelectFileSets(m_options.Time, m_options.Version)) { if (parsedfilter.Type != Library.Utility.FilterType.Empty) { if (simpleList || (m_options.ListFolderContents && !m_options.AllVersions)) filesets.TakeFirst(); } IEnumerable<Database.LocalListDatabase.IFileversion> files; if (m_options.ListFolderContents) files = filesets.SelectFolderContents(filter); else if (m_options.ListPrefixOnly) files = filesets.GetLargestPrefix(filter); else if (parsedfilter.Type == Duplicati.Library.Utility.FilterType.Empty) files = null; else files = filesets.SelectFiles(filter); if (m_options.ListSetsOnly) m_result.SetResult( filesets.QuickSets.Select(x => new ListResultFileset(x.Version, x.Time, x.FileCount, x.FileSizes)).ToArray(), null ); else m_result.SetResult( filesets.Sets.Select(x => new ListResultFileset(x.Version, x.Time, x.FileCount, x.FileSizes)).ToArray(), files == null ? null : (from n in files select (Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Path, n.Sizes.ToArray()))) .ToArray() ); return; } } m_result.AddMessage("No local database, accessing remote store"); //TODO: Add prefix and foldercontents // Otherwise, grab info from remote location using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(tmpdb, "List")) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filteredList = ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) throw new Exception("No filesets found on remote target"); var numberSeq = CreateResultSequence(filteredList); if (parsedfilter.Type == Library.Utility.FilterType.Empty) { m_result.SetResult(numberSeq, null); m_result.EncryptedFiles = filteredList.Any(x => !string.IsNullOrWhiteSpace(x.Value.EncryptionModule)); return; } var firstEntry = filteredList[0].Value; filteredList.RemoveAt(0); Dictionary<string, List<long>> res; if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return; using (var tmpfile = backend.Get(firstEntry.File.Name, firstEntry.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(firstEntry.File.Name), tmpfile, m_options)) if (simpleList) { m_result.SetResult( numberSeq.Take(1), (from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) orderby n.Path select new ListResultFile(n.Path, new long[] { n.Size })) .ToArray() ); return; } else { res = rd.Files .Where(x => Library.Utility.FilterExpression.Matches(filter, x.Path)) .ToDictionary( x => x.Path, y => { var lst = new List<long>(); lst.Add(y.Size); return lst; }, Library.Utility.Utility.ClientFilenameStringComparer ); } long flindex = 1; foreach(var flentry in filteredList) using(var tmpfile = backend.Get(flentry.Value.File.Name, flentry.Value.File == null ? -1 : flentry.Value.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(flentry.Value.CompressionModule, tmpfile, m_options)) { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return; foreach(var p in from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) select n) { List<long> lst; if (!res.TryGetValue(p.Path, out lst)) { lst = new List<long>(); res[p.Path] = lst; for(var i = 0; i < flindex; i++) lst.Add(-1); } lst.Add(p.Size); } foreach(var n in from i in res where i.Value.Count < flindex + 1 select i) n.Value.Add(-1); flindex++; } m_result.SetResult( numberSeq, from n in res orderby n.Key select (Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Key, n.Value)) ); } }
public void DoRun(long samples, LocalTestDatabase db, BackendManager backend) { var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(m_options.BlockHashAlgorithm); if (blockhasher == null) throw new Exception(string.Format(Strings.Foresthash.InvalidHashAlgorithm, m_options.BlockHashAlgorithm)); if (!blockhasher.CanReuseTransform) throw new Exception(string.Format(Strings.Foresthash.InvalidCryptoSystem, m_options.BlockHashAlgorithm)); var hashsize = blockhasher.HashSize / 8; var files = db.SelectTestTargets(samples, m_options).ToList(); if (m_options.FullRemoteVerification) { foreach(var vol in new AsyncDownloader(files, backend)) { var parsedInfo = Volumes.VolumeBase.ParseFilename(vol.Name); try { if (m_results.TaskControlRendevouz() == TaskControlState.Stop) { backend.WaitForComplete(db, null); return; } using(var tf = vol.TempFile) { if (parsedInfo.FileType == RemoteVolumeType.Files) { //Compare with db and see if all files are accounted for // with correct file hashes and blocklist hashes using(var fl = db.CreateFilelist(vol.Name)) { using(var rd = new Volumes.FilesetVolumeReader(parsedInfo.CompressionModule, tf, m_options)) foreach(var f in rd.Files) fl.Add(f.Path, f.Size, f.Hash, f.Metasize, f.Metahash, f.BlocklistHashes, f.Type, f.Time); m_results.AddResult(vol.Name, fl.Compare().ToList()); } } else if (parsedInfo.FileType == RemoteVolumeType.Index) { var blocklinks = new List<Tuple<string, string, long>>(); IEnumerable<KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>> combined = new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>[0]; //Compare with db and see that all hashes and volumes are listed using(var rd = new Volumes.IndexVolumeReader(parsedInfo.CompressionModule, tf, m_options, hashsize)) foreach(var v in rd.Volumes) { blocklinks.Add(new Tuple<string, string, long>(v.Filename, v.Hash, v.Length)); using(var bl = db.CreateBlocklist(v.Filename)) { foreach(var h in v.Blocks) bl.AddBlock(h.Key, h.Value); combined = combined.Union(bl.Compare().ToArray()); } } using(var il = db.CreateIndexlist(vol.Name)) { foreach(var t in blocklinks) il.AddBlockLink(t.Item1, t.Item2, t.Item3); combined = combined.Union(il.Compare()).ToList(); } m_results.AddResult(vol.Name, combined.ToList()); } else if (parsedInfo.FileType == RemoteVolumeType.Blocks) { using(var bl = db.CreateBlocklist(vol.Name)) using(var rd = new Volumes.BlockVolumeReader(parsedInfo.CompressionModule, tf, m_options)) { //Verify that all blocks are in the file foreach(var b in rd.Blocks) bl.AddBlock(b.Key, b.Value); //Select 20% random blocks and verify their hashes match the filename and size var hashsamples = new List<KeyValuePair<string, long>>(rd.Blocks); var sampleCount = Math.Min(Math.Max(0, (int)(hashsamples.Count * 0.2)), hashsamples.Count - 1); var rnd = new Random(); while (hashsamples.Count > sampleCount) hashsamples.RemoveAt(rnd.Next(hashsamples.Count)); var blockbuffer = new byte[m_options.Blocksize]; var changes = new List<KeyValuePair<Library.Interface.TestEntryStatus, string>>(); foreach(var s in hashsamples) { var size = rd.ReadBlock(s.Key, blockbuffer); if (size != s.Value) changes.Add(new KeyValuePair<Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key)); else { var hash = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size)); if (hash != s.Key) changes.Add(new KeyValuePair<Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key)); } } m_results.AddResult(vol.Name, changes.Union(bl.Compare().ToList())); } } } db.UpdateVerificationCount(vol.Name); } catch (Exception ex) { m_results.AddResult(vol.Name, new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) }); m_results.AddError(string.Format("Failed to process file {0}", vol.Name), ex); if (ex is System.Threading.ThreadAbortException) throw; } } } else { foreach(var f in files) { try { if (m_results.TaskControlRendevouz() == TaskControlState.Stop) return; backend.GetForTesting(f.Name, f.Size, f.Hash); db.UpdateVerificationCount(f.Name); m_results.AddResult(f.Name, new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>[0]); } catch (Exception ex) { m_results.AddResult(f.Name, new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) }); m_results.AddError(string.Format("Failed to process file {0}", f.Name), ex); if (ex is System.Threading.ThreadAbortException) throw; } } } }
public void Run(IEnumerable <string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { if (string.IsNullOrEmpty(m_options.Restorepath)) { throw new Exception("Cannot restore control files without --restore-path"); } if (!System.IO.Directory.Exists(m_options.Restorepath)) { System.IO.Directory.CreateDirectory(m_options.Restorepath); } using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(System.IO.File.Exists(m_options.Dbpath) ? m_options.Dbpath : (string)tmpdb, "RestoreControlFiles")) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(filterstrings), compositefilter); try { var filteredList = ListFilesHandler.ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) { throw new Exception("No filesets found on remote target"); } Exception lastEx = new Exception("No suitable files found on remote target"); foreach (var fileversion in filteredList) { try { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { backend.WaitForComplete(db, null); return; } var file = fileversion.Value.File; long size; string hash; RemoteVolumeType type; RemoteVolumeState state; if (!db.GetRemoteVolume(file.Name, out hash, out size, out type, out state)) { size = file.Size; } var res = new List <string>(); using (var tmpfile = backend.Get(file.Name, size, hash)) using (var tmp = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(file.Name), tmpfile, m_options)) foreach (var cf in tmp.ControlFiles) { if (Library.Utility.FilterExpression.Matches(filter, cf.Key)) { var targetpath = System.IO.Path.Combine(m_options.Restorepath, cf.Key); using (var ts = System.IO.File.Create(targetpath)) Library.Utility.Utility.CopyStream(cf.Value, ts); res.Add(targetpath); } } m_result.SetResult(res); lastEx = null; break; } catch (Exception ex) { lastEx = ex; if (ex is System.Threading.ThreadAbortException) { throw; } } } if (lastEx != null) { throw lastEx; } } finally { backend.WaitForComplete(db, null); } db.WriteResults(); } }