public void DoRun(long samples, LocalTestDatabase db, BackendManager backend) { var files = db.SelectTestTargets(samples, m_options).ToList(); m_results.OperationProgressUpdater.UpdatePhase(OperationPhase.Verify_Running); m_results.OperationProgressUpdater.UpdateProgress(0); var progress = 0L; if (m_options.FullRemoteVerification) { foreach (var vol in new AsyncDownloader(files, backend)) { try { if (m_results.TaskControlRendevouz() == TaskControlState.Stop) { backend.WaitForComplete(db, null); m_results.EndTime = DateTime.UtcNow; return; } progress++; m_results.OperationProgressUpdater.UpdateProgress((float)progress / files.Count); KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > > res; using (var tf = vol.TempFile) res = TestVolumeInternals(db, vol, tf, m_options, m_options.FullBlockVerification ? 1.0 : 0.2); m_results.AddResult(res.Key, res.Value); if (!string.IsNullOrWhiteSpace(vol.Hash) && vol.Size > 0) { if (res.Value == null || !res.Value.Any()) { var rv = db.GetRemoteVolume(vol.Name, null); if (rv.ID < 0) { if (string.IsNullOrWhiteSpace(rv.Hash) || rv.Size <= 0) { if (m_options.Dryrun) { Logging.Log.WriteDryrunMessage(LOGTAG, "CaptureHashAndSize", "Successfully captured hash and size for {0}, would update database", vol.Name); } else { Logging.Log.WriteInformationMessage(LOGTAG, "CaptureHashAndSize", "Successfully captured hash and size for {0}, updating database", vol.Name); db.UpdateRemoteVolume(vol.Name, RemoteVolumeState.Verified, vol.Size, vol.Hash); } } } } } db.UpdateVerificationCount(vol.Name); } catch (Exception ex) { m_results.AddResult(vol.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) }); Logging.Log.WriteErrorMessage(LOGTAG, "RemoteFileProcessingFailed", ex, "Failed to process file {0}", vol.Name); if (ex is System.Threading.ThreadAbortException) { m_results.EndTime = DateTime.UtcNow; throw; } } } } else { foreach (var f in files) { try { if (m_results.TaskControlRendevouz() == TaskControlState.Stop) { m_results.EndTime = DateTime.UtcNow; return; } progress++; m_results.OperationProgressUpdater.UpdateProgress((float)progress / files.Count); if (f.Size <= 0 || string.IsNullOrWhiteSpace(f.Hash)) { Logging.Log.WriteInformationMessage(LOGTAG, "MissingRemoteHash", "No hash or size recorded for {0}, performing full verification", f.Name); KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > > res; string hash; long size; using (var tf = backend.GetWithInfo(f.Name, out size, out hash)) res = TestVolumeInternals(db, f, tf, m_options, 1); m_results.AddResult(res.Key, res.Value); if (!string.IsNullOrWhiteSpace(hash) && size > 0) { if (res.Value == null || !res.Value.Any()) { if (m_options.Dryrun) { Logging.Log.WriteDryrunMessage(LOGTAG, "CapturedHashAndSize", "Successfully captured hash and size for {0}, would update database", f.Name); } else { Logging.Log.WriteInformationMessage(LOGTAG, "CapturedHashAndSize", "Successfully captured hash and size for {0}, updating database", f.Name); db.UpdateRemoteVolume(f.Name, RemoteVolumeState.Verified, size, hash); } } } } else { backend.GetForTesting(f.Name, f.Size, f.Hash); } db.UpdateVerificationCount(f.Name); m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0]); } catch (Exception ex) { m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) }); Logging.Log.WriteErrorMessage(LOGTAG, "FailedToProcessFile", ex, "Failed to process file {0}", f.Name); if (ex is System.Threading.ThreadAbortException) { m_results.EndTime = DateTime.UtcNow; throw; } } } } m_results.EndTime = DateTime.UtcNow; }
public void DoRun(long samples, LocalTestDatabase db, BackendManager backend) { var files = db.SelectTestTargets(samples, m_options).ToList(); m_results.OperationProgressUpdater.UpdatePhase(OperationPhase.Verify_Running); m_results.OperationProgressUpdater.UpdateProgress(0); var progress = 0L; if (m_options.FullRemoteVerification) { foreach (var vol in new AsyncDownloader(files, backend)) { try { if (m_results.TaskControlRendevouz() == TaskControlState.Stop) { backend.WaitForComplete(db, null); return; } progress++; m_results.OperationProgressUpdater.UpdateProgress((float)progress / files.Count); KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > > res; using (var tf = vol.TempFile) res = TestVolumeInternals(db, vol, tf, m_options, m_results); m_results.AddResult(res.Key, res.Value); db.UpdateVerificationCount(vol.Name); } catch (Exception ex) { m_results.AddResult(vol.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) }); m_results.AddError(string.Format("Failed to process file {0}", vol.Name), ex); if (ex is System.Threading.ThreadAbortException) { throw; } } } } else { foreach (var f in files) { try { if (m_results.TaskControlRendevouz() == TaskControlState.Stop) { return; } progress++; m_results.OperationProgressUpdater.UpdateProgress((float)progress / files.Count); if (f.Size < 0 || string.IsNullOrWhiteSpace(f.Hash)) { m_results.AddMessage(string.Format("No hash recorded for {0}, performing full verification", f.Name)); KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > > res; string hash; long size; using (var tf = backend.GetWithInfo(f.Name, out size, out hash)) res = TestVolumeInternals(db, f, tf, m_options, m_results, 1); m_results.AddResult(res.Key, res.Value); if (res.Value != null && !res.Value.Any() && !string.IsNullOrWhiteSpace(hash)) { if (!m_options.Dryrun) { m_results.AddMessage(string.Format("Sucessfully captured hash for {0}, updating database", f.Name)); db.UpdateRemoteVolume(f.Name, RemoteVolumeState.Verified, size, hash); } } } else { backend.GetForTesting(f.Name, f.Size, f.Hash); } db.UpdateVerificationCount(f.Name); m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0]); } catch (Exception ex) { m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) }); m_results.AddError(string.Format("Failed to process file {0}", f.Name), ex); if (ex is System.Threading.ThreadAbortException) { throw; } } } } }