public string Cleanup() { CommunicationStatistics stats = new CommunicationStatistics(XervBackupOperationMode.CleanUp); SetupCommonOptions(stats); bool anyRemoved = false; StringBuilder sb = new StringBuilder(); using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options)) { List<ManifestEntry> sorted = backend.GetBackupSets(); List<ManifestEntry> entries = new List<ManifestEntry>(); entries.AddRange(sorted); foreach (ManifestEntry be in sorted) entries.AddRange(be.Incrementals); string cleanup = backend.DeleteOrphans(false); if (!string.IsNullOrEmpty(cleanup)) sb.AppendLine(cleanup); if (m_options.SkipFileHashChecks) throw new Exception(Strings.Interface.CannotCleanWithoutHashesError); if (m_options.DontReadManifests) throw new Exception(Strings.Interface.CannotCleanWithoutHashesError); //We need the manifests anyway, so we verify the chain if (entries.Count > 0) VerifyManifestChain(backend, entries[0]); //Now compare the actual filelist with the manifest foreach (ManifestEntry be in entries) { Manifestfile manifest = GetManifest(backend, be); int count = manifest.ContentHashes.Count; for (int i = count - 1; i < be.Volumes.Count; i++) { anyRemoved = true; string sigmsg = string.Format(Strings.Interface.RemovingPartialFilesMessage, be.Volumes[i].Key.Filename); string cntmsg = string.Format(Strings.Interface.RemovingPartialFilesMessage, be.Volumes[i].Value.Filename); Logging.Log.WriteMessage(sigmsg, XervBackup.Library.Logging.LogMessageType.Information); Logging.Log.WriteMessage(cntmsg, XervBackup.Library.Logging.LogMessageType.Information); sb.AppendLine(sigmsg); sb.AppendLine(cntmsg); if (m_options.Force) { backend.Delete(be.Volumes[i].Key); backend.Delete(be.Volumes[i].Value); } } } } if (!m_options.Force && anyRemoved) { Logging.Log.WriteMessage(Strings.Interface.FilesAreNotForceDeletedMessage, XervBackup.Library.Logging.LogMessageType.Information); sb.AppendLine(Strings.Interface.FilesAreNotForceDeletedMessage); } return sb.ToString(); //TODO: Write a message here? }
private void SetupCommonOptions(CommunicationStatistics stats) { m_options.MainAction = stats.OperationMode; switch (m_options.MainAction) { case XervBackupOperationMode.Backup: case XervBackupOperationMode.BackupFull: case XervBackupOperationMode.BackupIncremental: break; default: //It only makes sense to enable auto-creation if we are writing files. if (!m_options.RawOptions.ContainsKey("disable-autocreate-folder")) m_options.RawOptions["disable-autocreate-folder"] = "true"; break; } Library.Logging.Log.LogLevel = m_options.Loglevel; OperationRunning(true); if (!string.IsNullOrEmpty(m_options.Logfile)) { m_hasSetLogging = true; Library.Logging.Log.CurrentLog = new Library.Logging.StreamLog(m_options.Logfile); } if (stats != null) { stats.VerboseErrors = m_options.DebugOutput; stats.VerboseRetryErrors = m_options.VerboseRetryErrors; } if (!string.IsNullOrEmpty(m_options.TempDir)) Utility.TempFolder.SystemTempPath = m_options.TempDir; if (!string.IsNullOrEmpty(m_options.ThreadPriority)) System.Threading.Thread.CurrentThread.Priority = Utility.Utility.ParsePriority(m_options.ThreadPriority); //Load all generic modules m_options.LoadedModules.Clear(); foreach (Library.Interface.IGenericModule m in DynamicLoader.GenericLoader.Modules) m_options.LoadedModules.Add(new KeyValuePair<bool, Library.Interface.IGenericModule>(Array.IndexOf<string>(m_options.DisableModules, m.Key.ToLower()) < 0 && (m.LoadAsDefault || Array.IndexOf<string>(m_options.EnableModules, m.Key.ToLower()) >= 0), m)); ValidateOptions(stats); foreach (KeyValuePair<bool, Library.Interface.IGenericModule> mx in m_options.LoadedModules) if (mx.Key) mx.Value.Configure(m_options.RawOptions); Library.Logging.Log.WriteMessage(string.Format(Strings.Interface.StartingOperationMessage, m_options.MainAction), Logging.LogMessageType.Information); }
/// <summary> /// This function will examine all options passed on the commandline, and test for unsupported or deprecated values. /// Any errors will be logged into the statistics module. /// </summary> /// <param name="options">The commandline options given</param> /// <param name="backend">The backend url</param> /// <param name="stats">The statistics into which warnings are written</param> private void ValidateOptions(CommunicationStatistics stats) { //No point in going through with this if we can't report if (stats == null) return; //Keep a list of all supplied options Dictionary<string, string> ropts = m_options.RawOptions; //Keep a list of all supported options Dictionary<string, Library.Interface.ICommandLineArgument> supportedOptions = new Dictionary<string, Library.Interface.ICommandLineArgument>(); //There are a few internal options that are not accessible from outside, and thus not listed foreach (string s in Options.InternalOptions) supportedOptions[s] = null; //Figure out what module options are supported in the current setup List<Library.Interface.ICommandLineArgument> moduleOptions = new List<XervBackup.Library.Interface.ICommandLineArgument>(); Dictionary<string, string> disabledModuleOptions = new Dictionary<string, string>(); foreach (KeyValuePair<bool, Library.Interface.IGenericModule> m in m_options.LoadedModules) if (m.Value.SupportedCommands != null) if (m.Key) moduleOptions.AddRange(m.Value.SupportedCommands); else { foreach (Library.Interface.ICommandLineArgument c in m.Value.SupportedCommands) { disabledModuleOptions[c.Name] = m.Value.DisplayName + " (" + m.Value.Key + ")"; if (c.Aliases != null) foreach (string s in c.Aliases) disabledModuleOptions[s] = disabledModuleOptions[c.Name]; } } //Now run through all supported options, and look for deprecated options foreach (IList<Library.Interface.ICommandLineArgument> l in new IList<Library.Interface.ICommandLineArgument>[] { m_options.SupportedCommands, DynamicLoader.BackendLoader.GetSupportedCommands(m_backend), m_options.NoEncryption ? null : DynamicLoader.EncryptionLoader.GetSupportedCommands(m_options.EncryptionModule), moduleOptions, DynamicLoader.CompressionLoader.GetSupportedCommands(m_options.CompressionModule) }) { if (l != null) foreach (Library.Interface.ICommandLineArgument a in l) { if (supportedOptions.ContainsKey(a.Name) && Array.IndexOf(Options.KnownDuplicates, a.Name.ToLower()) < 0) stats.LogWarning(string.Format(Strings.Interface.DuplicateOptionNameWarning, a.Name), null); supportedOptions[a.Name] = a; if (a.Aliases != null) foreach (string s in a.Aliases) { if (supportedOptions.ContainsKey(s) && Array.IndexOf(Options.KnownDuplicates, s.ToLower()) < 0) stats.LogWarning(string.Format(Strings.Interface.DuplicateOptionNameWarning, s), null); supportedOptions[s] = a; } if (a.Deprecated) { List<string> aliases = new List<string>(); aliases.Add(a.Name); if (a.Aliases != null) aliases.AddRange(a.Aliases); foreach (string s in aliases) if (ropts.ContainsKey(s)) { string optname = a.Name; if (a.Name != s) optname += " (" + s + ")"; stats.LogWarning(string.Format(Strings.Interface.DeprecatedOptionUsedWarning, optname, a.DeprecationMessage), null); } } } } //Now look for options that were supplied but not supported foreach (string s in ropts.Keys) if (!supportedOptions.ContainsKey(s)) if (disabledModuleOptions.ContainsKey(s)) stats.LogWarning(string.Format(Strings.Interface.UnsupportedOptionDisabledModuleWarning, s, disabledModuleOptions[s]), null); else stats.LogWarning(string.Format(Strings.Interface.UnsupportedOptionWarning, s), null); //Look at the value supplied for each argument and see if is valid according to its type foreach (string s in ropts.Keys) { Library.Interface.ICommandLineArgument arg; if (supportedOptions.TryGetValue(s, out arg) && arg != null) { string validationMessage = ValidateOptionValue(arg, s, ropts[s]); if (validationMessage != null) stats.LogWarning(validationMessage, null); } } //TODO: Based on the action, see if all options are relevant }
private void CreateFolder() { CommunicationStatistics stats = new CommunicationStatistics(XervBackupOperationMode.CreateFolder); SetupCommonOptions(stats); using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options)) backend.CreateFolder(); }
/// <summary> /// Downloads all required signature files from the backend. /// </summary> /// <param name="backend">The backend to read from</param> /// <param name="entries">The flattened list of manifests</param> /// <param name="tempfolder">The tempfolder set for this operation</param> /// <param name="allowHashFail">True to ignore files with failed hash signature</param> /// <returns>A list of file archives</returns> private List<KeyValuePair<ManifestEntry, Library.Interface.ICompression>> FindPatches(BackendWrapper backend, List<ManifestEntry> entries, string tempfolder, bool allowHashFail, CommunicationStatistics stat) { List<KeyValuePair<ManifestEntry, Library.Interface.ICompression>> patches = new List<KeyValuePair<ManifestEntry, Library.Interface.ICompression>>(); using (new Logging.Timer("Reading incremental data")) { OperationProgress(this, GetOperationType(), stat.OperationMode, (int)(m_progress * 100), -1, Strings.Interface.StatusReadingIncrementalData, ""); //Calculate the total number of files to download //, and verify their order int incCount = 0; foreach (ManifestEntry be in entries) { int volNo = 0; //Prevent order based bugs if (entries.IndexOf(be) > 0) if (entries[entries.IndexOf(be) - 1].Time >= be.Time) throw new Exception(Strings.Interface.BadSortingDetectedError); incCount++; foreach (KeyValuePair<SignatureEntry, ContentEntry> bes in be.Volumes) { incCount++; if (volNo + 1 != bes.Key.Volumenumber || bes.Key.Volumenumber != bes.Value.Volumenumber) throw new Exception(Strings.Interface.BadVolumeSortOrder); volNo++; } } //The incremental part has a fixed cost, and each file has a fixed fraction of that double unitCost = m_incrementalFraction / incCount; //Ensure that the manifest chain has not been tampered with // since we will read all manifest files anyway, there is no harm in doing it here if (!m_options.DontReadManifests && entries.Count > 0) VerifyManifestChain(backend, entries[entries.Count - 1]); foreach (ManifestEntry be in entries) { m_progress += unitCost; Manifestfile manifest = GetManifest(backend, be); foreach (KeyValuePair<SignatureEntry, ContentEntry> bes in be.Volumes) { m_progress += unitCost; //Skip non-listed incrementals if (manifest.SignatureHashes != null && bes.Key.Volumenumber > manifest.SignatureHashes.Count) { backend.AddOrphan(bes.Key); backend.AddOrphan(bes.Value); continue; } OperationProgress(this, GetOperationType(), stat.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusReadingSignatureFile, be.Time.ToShortDateString() + " " + be.Time.ToShortTimeString(), bes.Key.Volumenumber), ""); string filename = System.IO.Path.Combine(tempfolder, "patch-" + patches.Count.ToString() + ".zip"); //Check just before we download stuff CheckLiveControl(); try { using (new Logging.Timer("Get " + bes.Key.Filename)) backend.Get(bes.Key, manifest, filename, manifest.SignatureHashes == null ? null : manifest.SignatureHashes[bes.Key.Volumenumber - 1]); } catch (BackendWrapper.HashMismathcException hme) { if (allowHashFail) { if (stat != null) stat.LogError(string.Format(Strings.Interface.FileHashFailure, hme.Message), hme); continue; } else throw; } patches.Add(new KeyValuePair<ManifestEntry,XervBackup.Library.Interface.ICompression>(be, DynamicLoader.CompressionLoader.GetModule(bes.Key.Compression, filename, m_options.RawOptions))); } } } backend.DeleteOrphans(true); return patches; }
public List<KeyValuePair<RSync.RSyncDir.PatchFileType, string>> ListActualSignatureFiles() { CommunicationStatistics stats = new CommunicationStatistics(XervBackupOperationMode.ListActualSignatureFiles); SetupCommonOptions(stats); using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options)) { ManifestEntry bestFit = backend.GetBackupSet(m_options.RestoreTime); if (bestFit.Incrementals.Count > 0) //Get the most recent incremental bestFit = bestFit.Incrementals[bestFit.Incrementals.Count - 1]; using (Utility.TempFolder folder = new XervBackup.Library.Utility.TempFolder()) { List<Library.Interface.ICompression> patches = new List<XervBackup.Library.Interface.ICompression>(); foreach (KeyValuePair<ManifestEntry, Library.Interface.ICompression> entry in FindPatches(backend, new List<ManifestEntry>(new ManifestEntry[] { bestFit }), folder, false, stats)) patches.Add(entry.Value); using (RSync.RSyncDir dir = new XervBackup.Library.Main.RSync.RSyncDir(new string[] { folder }, stats, null)) return dir.ListPatchFiles(patches); } } }
public List<KeyValuePair<BackupEntryBase, Exception>> VerifyBackupChain() { CommunicationStatistics stats = new CommunicationStatistics(XervBackupOperationMode.Verify); SetupCommonOptions(stats); List<KeyValuePair<BackupEntryBase, Exception>> results = new List<KeyValuePair<BackupEntryBase, Exception>>(); if (m_options.DontReadManifests) throw new InvalidOperationException(Strings.Interface.ManifestsMustBeRead); if (m_options.SkipFileHashChecks) throw new InvalidOperationException(Strings.Interface.CannotVerifyWithoutHashes); if (!string.IsNullOrEmpty(m_options.SignatureCachePath)) { stats.LogWarning(Strings.Interface.DisablingSignatureCacheForVerification, null); m_options.SignatureCachePath = null; } using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options)) { //Find the spot in the chain where we start ManifestEntry bestFit = backend.GetBackupSet(m_options.RestoreTime); //Get the list of manifests to validate List<ManifestEntry> entries = new List<ManifestEntry>(); entries.Add(bestFit); entries.AddRange(bestFit.Incrementals); entries.Reverse(); foreach (ManifestEntry me in entries) { Manifestfile mf = null; try { mf = GetManifest(backend, me); VerifyBackupChainWithFiles(backend, me); if (mf.SignatureHashes.Count != me.Volumes.Count) results.Add(new KeyValuePair<BackupEntryBase,Exception>(me, new Exception(string.Format(Strings.Interface.ManifestAndFileCountMismatchError, mf.SelfFilename, mf.SignatureHashes.Count, me.Volumes.Count)))); else results.Add(new KeyValuePair<BackupEntryBase,Exception>(me, null)); } catch (Exception ex) { results.Add(new KeyValuePair<BackupEntryBase,Exception>(me, ex)); } if (mf != null) { int volumes = Math.Min(mf.SignatureHashes.Count, me.Volumes.Count); for(int i = 0; i <volumes; i++) { if (m_options.Verificationlevel == VerificationLevel.Signature || m_options.Verificationlevel == VerificationLevel.Full) { try { using(Utility.TempFile tf = new XervBackup.Library.Utility.TempFile()) backend.Get(me.Volumes[i].Key, mf, tf, mf.SignatureHashes[i]); results.Add(new KeyValuePair<BackupEntryBase, Exception>(me.Volumes[i].Key, null)); } catch (Exception ex) { results.Add(new KeyValuePair<BackupEntryBase,Exception>(me.Volumes[i].Key, ex)); } } if (m_options.Verificationlevel == VerificationLevel.Full) { try { using(Utility.TempFile tf = new XervBackup.Library.Utility.TempFile()) backend.Get(me.Volumes[i].Value, mf, tf, mf.ContentHashes[i]); results.Add(new KeyValuePair<BackupEntryBase, Exception>(me.Volumes[i].Value, null)); } catch (Exception ex) { results.Add(new KeyValuePair<BackupEntryBase,Exception>(me.Volumes[i].Value, ex)); } } } } } //Re-generate verification file if (m_options.CreateVerificationFile) { //Stop any async operations if (m_options.AsynchronousUpload) backend.ExtractPendingUploads(); VerificationFile vf = new VerificationFile(entries, new FilenameStrategy(m_options)); using (Utility.TempFile tf = new XervBackup.Library.Utility.TempFile()) { vf.Save(tf); tf.Protected = true; backend.Put(new VerificationEntry(entries[entries.Count - 1].Time), tf); } } } return results; }
public List<ManifestEntry> GetBackupSets() { CommunicationStatistics stats = new CommunicationStatistics(XervBackupOperationMode.GetBackupSets); SetupCommonOptions(stats); using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options)) return backend.GetBackupSets(); }
public string[] List() { CommunicationStatistics stats = new CommunicationStatistics(XervBackupOperationMode.List); SetupCommonOptions(stats); List<string> res = new List<string>(); using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options)) { if (OperationStarted != null) OperationStarted(this, XervBackupOperation.List, stats.OperationMode, 0, -1, Strings.Interface.StatusStarted, ""); foreach (XervBackup.Library.Interface.IFileEntry fe in backend.List(false)) res.Add(fe.Name); if (OperationCompleted != null) OperationCompleted(this, XervBackupOperation.List, stats.OperationMode, 100, -1, Strings.Interface.StatusCompleted, ""); return res.ToArray(); } }
/// <summary> /// Reads through a backup and finds the last backup entry that has a specific file /// </summary> /// <returns></returns> public List<KeyValuePair<string, DateTime>> FindLastFileVersion() { CommunicationStatistics stats = new CommunicationStatistics(XervBackupOperationMode.FindLastFileVersion); SetupCommonOptions(stats); if (m_options.DontReadManifests) throw new Exception(Strings.Interface.ManifestsMustBeRead); if (string.IsNullOrEmpty(m_options.FileToRestore)) throw new Exception(Strings.Interface.NoFilesGivenError); string[] filesToFind = m_options.FileToRestore.Split(System.IO.Path.PathSeparator); KeyValuePair<string, DateTime>[] results = new KeyValuePair<string, DateTime>[filesToFind.Length]; for (int i = 0; i < results.Length; i++) results[i] = new KeyValuePair<string, DateTime>(filesToFind[i], new DateTime(0)); using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options)) { //Extract the full backup set list List<ManifestEntry> fulls = backend.GetBackupSets(); //Flatten the list List<ManifestEntry> workList = new List<ManifestEntry>(); //The list is oldest first, this function work newest first fulls.Reverse(); foreach (ManifestEntry f in fulls) { f.Incrementals.Reverse(); workList.AddRange(f.Incrementals); workList.Add(f); } bool warned_manifest_v1 = false; foreach (ManifestEntry mf in workList) { List<Manifestfile.HashEntry> signatureHashes = null; Manifestfile mfi; using(Utility.TempFile tf = new XervBackup.Library.Utility.TempFile()) { backend.Get(mf, null, tf, null); mfi = new Manifestfile(tf, m_options.SkipFileHashChecks); if (!m_options.SkipFileHashChecks) signatureHashes = mfi.SignatureHashes; } //If there are no volumes, don't stop here bool any_unmatched = true; if (stats != null && !warned_manifest_v1 && (mfi.SourceDirs == null || mfi.SourceDirs.Length == 0)) { warned_manifest_v1 = true; stats.LogWarning(Strings.Interface.ManifestVersionRequiresRelativeNamesWarning, null); } foreach(KeyValuePair<SignatureEntry, ContentEntry> e in mf.Volumes) using (Utility.TempFile tf = new XervBackup.Library.Utility.TempFile()) { //Skip non-approved signature files if (signatureHashes != null && e.Key.Volumenumber > signatureHashes.Count) { stats.LogWarning(string.Format(Strings.Interface.SkippedUnlistedSignatureFileWarning, e.Key.Filename), null); continue; } backend.Get(e.Key, mfi, tf, signatureHashes == null ? null : signatureHashes[e.Key.Volumenumber - 1]); any_unmatched = false; RSync.RSyncDir.ContainsFile(mfi, filesToFind, DynamicLoader.CompressionLoader.GetModule(e.Key.Compression, tf, m_options.RawOptions)); for (int i = 0; i < filesToFind.Length; i++) { if (results[i].Value.Ticks == 0 && string.IsNullOrEmpty(filesToFind[i])) results[i] = new KeyValuePair<string,DateTime>(results[i].Key, mf.Time); else any_unmatched = true; } if (!any_unmatched) break; } if (!any_unmatched) break; } return new List<KeyValuePair<string,DateTime>>(results); } }
public string DeleteOlderThan() { StringBuilder sb = new StringBuilder(); CommunicationStatistics stats = new CommunicationStatistics(XervBackupOperationMode.DeleteOlderThan); SetupCommonOptions(stats); DateTime expires = m_options.RemoveOlderThan; using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options)) try { if (OperationStarted != null) OperationStarted(this, XervBackupOperation.Remove, stats.OperationMode, 0, -1, Strings.Interface.StatusStarted, ""); List<ManifestEntry> entries = backend.GetBackupSets(); List<ManifestEntry> toremove = new List<ManifestEntry>(); while (entries.Count > 0 && entries[0].Time <= expires) { if (entries.Count == 1 && !m_options.AllowFullRemoval) { sb.AppendLine(string.Format(Strings.Interface.NotDeletingLastFullMessage, entries[0].Time)); break; } ManifestEntry be = entries[0]; entries.RemoveAt(0); bool hasNewer = false; foreach (ManifestEntry bex in be.Incrementals) if (bex.Time >= expires) { hasNewer = true; break; } if (hasNewer) { List<ManifestEntry> t = new List<ManifestEntry>(be.Incrementals); t.Insert(0, be); for (int i = 0; i < t.Count; i++) if (t[i].Time <= expires) sb.AppendLine(string.Format(Strings.Interface.NotDeletingBackupSetMessage, t[i].Time.ToString(System.Globalization.CultureInfo.InvariantCulture))); break; } else { be.Incrementals.Reverse(); toremove.AddRange(be.Incrementals); toremove.Add(be); } } if (entries.Count == 0 && toremove.Count > 0 && !m_options.AllowFullRemoval) throw new Exception(Strings.Interface.InternalDeleteCountError); sb.Append(RemoveBackupSets(backend, toremove)); } finally { if (OperationCompleted != null) OperationCompleted(this, XervBackupOperation.Remove, stats.OperationMode, 100, -1, Strings.Interface.StatusCompleted, ""); } return sb.ToString(); }
public string DeleteAllButNFull() { CommunicationStatistics stats = new CommunicationStatistics(XervBackupOperationMode.DeleteAllButNFull); SetupCommonOptions(stats); int x = Math.Max(0, m_options.DeleteAllButNFull); StringBuilder sb = new StringBuilder(); using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options)) try { if (OperationStarted != null) OperationStarted(this, XervBackupOperation.Remove, stats.OperationMode, 0, -1, Strings.Interface.StatusStarted, ""); List<ManifestEntry> entries = backend.GetBackupSets(); List<ManifestEntry> toremove = new List<ManifestEntry>(); while (entries.Count > x) { if (entries.Count == 1 && !m_options.AllowFullRemoval) { sb.AppendLine(string.Format(Strings.Interface.NotDeletingLastFullMessage, entries[0].Time)); break; } ManifestEntry be = entries[0]; entries.RemoveAt(0); be.Incrementals.Reverse(); toremove.AddRange(be.Incrementals); toremove.Add(be); } if (entries.Count == 0 && toremove.Count > 0 && !m_options.AllowFullRemoval) throw new Exception(Strings.Interface.InternalDeleteCountError); sb.Append(RemoveBackupSets(backend, toremove)); } finally { if (OperationCompleted != null) OperationCompleted(this, XervBackupOperation.Remove, stats.OperationMode, 100, -1, Strings.Interface.StatusCompleted, ""); } return sb.ToString(); }
public string DeleteAllButN() { CommunicationStatistics stats = new CommunicationStatistics(XervBackupOperationMode.DeleteAllButN); SetupCommonOptions(stats); int x = Math.Max(0, m_options.DeleteAllButNFull); StringBuilder sb = new StringBuilder(); using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options)) try { if (OperationStarted != null) OperationStarted(this, XervBackupOperation.Remove, stats.OperationMode, 0, -1, Strings.Interface.StatusStarted, ""); List<ManifestEntry> flatlist = new List<ManifestEntry>(); List<ManifestEntry> entries = backend.GetBackupSets(); //Get all backups as a flat list foreach (ManifestEntry me in entries) { flatlist.Add(me); flatlist.AddRange(me.Incrementals); } //Now remove all but those requested List<ManifestEntry> toremove = new List<ManifestEntry>(); while (flatlist.Count > x) { toremove.Add(flatlist[0]); flatlist.RemoveAt(0); } //If there are still chains left, make sure we do not end up with a partial chain if (!m_options.AllowFullRemoval || flatlist.Count != 0) { //Go back until we have a full chain while (toremove.Count > 0 && (flatlist.Count == 0 || !flatlist[0].IsFull)) { sb.AppendLine(string.Format(Strings.Interface.NotDeletingBackupSetMessage, toremove[toremove.Count - 1].Time)); flatlist.Insert(0, toremove[toremove.Count - 1]); toremove.RemoveAt(toremove.Count - 1); } } if (toremove.Count > 0 && !m_options.AllowFullRemoval && (flatlist.Count == 0 || !flatlist[0].IsFull)) throw new Exception(Strings.Interface.InternalDeleteCountError); sb.Append(RemoveBackupSets(backend, toremove)); } finally { if (OperationCompleted != null) OperationCompleted(this, XervBackupOperation.Remove, stats.OperationMode, 100, -1, Strings.Interface.StatusCompleted, ""); } return sb.ToString(); }
/// <summary> /// Constructs a new BackendWrapper /// </summary> /// <param name="statistics">The statistics logging module, may be null</param> /// <param name="backend">The url to the backend to wrap</param> /// <param name="options">A set of backend options</param> public BackendWrapper(CommunicationStatistics statistics, string backend, Options options) { m_statistics = statistics; m_options = options; m_filenamestrategy = new FilenameStrategy(m_options); m_backendUrl = backend; m_backend = XervBackup.Library.DynamicLoader.BackendLoader.GetBackend(backend, m_options.RawOptions); if (m_backend == null) throw new Exception(string.Format(Strings.BackendWrapper.BackendNotFoundError, backend)); m_reuse_backend = !m_options.NoConnectionReuse; m_first_backend_use = true; m_backendSupportsCreateFolder = m_backend is Library.Interface.IBackend_v2; if (m_options.AutoCleanup) m_orphans = new List<BackupEntryBase>(); if (!string.IsNullOrEmpty(m_options.SignatureCachePath) && !System.IO.Directory.Exists(m_options.SignatureCachePath)) System.IO.Directory.CreateDirectory(m_options.SignatureCachePath); if (!string.IsNullOrEmpty(m_options.Backendlogdatabase)) { m_backendInterfaceLogger = new StateVerification.StateDatabase(m_options.Backendlogdatabase, statistics); m_backendInterfaceLogger.BeginOperation(m_options.MainAction.ToString()); } m_async = m_options.AsynchronousUpload; if (m_async) { //If we are using async operations, the entire class is actually threadsafe, //utilizing a common exclusive lock on all operations. But the implementation does //not prevent starvation, so it should not be called by multiple threads. m_pendingOperations = new Queue<KeyValuePair<BackupEntryBase, string>>(); m_asyncItemProcessed = new System.Threading.AutoResetEvent(false); m_asyncItemReady = new System.Threading.AutoResetEvent(false); m_workerThread = new System.Threading.Thread(ProcessQueue); m_workerThread.Name = "AsyncUploaderThread"; m_queuelock = new object(); m_workerThread.Start(); } }