public void TestBackup() { // e.g.: // XervBackup-full-manifestA.20121230T201547Z.manifest.aes // XervBackup-full-content.20121230T201547Z.vol1.zip.aes // XervBackup-full-signature.20121230T201547Z.vol1.zip.aes string[] BACKUP_FILES = new[] { "^XervBackup-full-manifest.*\\.aes$", "^XervBackup-full-content.*\\.aes$", "^XervBackup-full-signature.*\\.aes$" }; Interface.Backup(new string[] { this.m_source_dir }, this.m_backend_url, this.m_options); string[] backupFiles = System.IO.Directory.GetFiles(this.m_dest_dir); foreach (string expected_file in BACKUP_FILES) { Assert.AreEqual(1, backupFiles.Where(path => Regex.Match(System.IO.Path.GetFileName(path), expected_file).Success).Count(), "Expected backup file not found: " + expected_file); } using (var restore_target = new TempFolder()) { Interface.Restore(this.m_backend_url, new string[] { restore_target }, this.m_options); // backed-up files should be restored AssertHelper.AssertFilesMatch(restore_target, this.m_source_files.Select(x => (string) x).ToArray()); }; }
// helper that initializes the Program.DataConnection and invokes a callback protected static void WithProgramDbConnection(TempFolder tf, Action<IDataFetcherWithRelations> action) { using(System.Data.IDbConnection con = Utils.CreateDbConnection()) { Utils.InitializeDbConnection(tf, con); action(XervBackup.GUI.Program.DataConnection); } }
public override void SetUp() { base.SetUp(); this.m_source_dir = new TempFolder(); this.m_source_files = Utils.GenerateFiles(this.m_source_dir, this.m_content); this.m_dest_dir = new TempFolder(); this.m_backend_url = "file://" + m_dest_dir; // TODO: figure out how this gets properly set/defaulted this.m_options["signature-control-files"] = Program.DatabasePath;; }
// helper that invokes a closure with a loaded test XervBackup applications settings database protected static void WithApplicationSettingsDb(TempFolder tf, Action<TempFolder, ApplicationSettings> action) { WithProgramDbConnection(tf, (dataFetcher) => { var appSettings = new ApplicationSettings(dataFetcher); action(tf, appSettings); dataFetcher.CommitRecursive(dataFetcher.GetObjects<ApplicationSetting>()); }); }
// performs configuration prerequisites and invokes FinishedRestoreSetup.Restore protected void performRestore(Action<string> action) { using (TempFolder restore_dir = new TempFolder()) { // Set up program state needed to be present during UI-driven restore // we don't really care, we just need these fields to be present XervBackup.GUI.Program.LiveControl = new LiveControls(new ApplicationSettings(this.m_datafetcher)); // hopefully scheduler doesn't schedule anything; initialize WorkerThread as paused XervBackup.GUI.Program.Scheduler = new Scheduler(this.m_datafetcher, new WorkerThread<IDuplicityTask>(null, true), XervBackup.GUI.Program.MainLock); var page = new FinishedRestoreSetup(); var dialog = new Dialog(new[] { page }); // use the secret handshake to set the Backend configuration properties dialog.Settings["WSW_Backend"] = "file"; dialog.Settings["WSW_BackendSettings"] = new Dictionary<string, string>() { { "Destination", this.m_dest_dir }, //{ "passphrase", this.m_passphrase }, { "no-encryption", "true" } }; // causes the FinishedRestoreSetup page to be entered dialog.CurrentPage = page; // invoke page.Restore(null, null) via reflection typeof(FinishedRestoreSetup).InvokeMember("Restore", BindingFlags.InvokeMethod | BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance, null, page, new object[] { null, null }); action(restore_dir); } }
public string Backup(string[] sources) { BackupStatistics bs = new BackupStatistics(XervBackupOperationMode.Backup); SetupCommonOptions(bs); BackendWrapper backend = null; VerificationFile verification = null; if (m_options.DontReadManifests) throw new Exception(Strings.Interface.ManifestsMustBeReadOnBackups); if (m_options.SkipFileHashChecks) throw new Exception(Strings.Interface.CannotSkipHashChecksOnBackup); if (sources == null || sources.Length == 0) throw new Exception(Strings.Interface.NoSourceFoldersError); //Make sure they all have the same format and exist for (int i = 0; i < sources.Length; i++) { sources[i] = Utility.Utility.AppendDirSeparator(System.IO.Path.GetFullPath(sources[i])); if (!System.IO.Directory.Exists(sources[i])) throw new System.IO.IOException(String.Format(Strings.Interface.SourceFolderIsMissingError, sources[i])); } //Sanity check for duplicate folders and multiple inclusions of the same folder for (int i = 0; i < sources.Length - 1; i++) { for (int j = i + 1; j < sources.Length; j++) if (sources[i].Equals(sources[j], Utility.Utility.IsFSCaseSensitive ? StringComparison.CurrentCulture : StringComparison.CurrentCultureIgnoreCase)) throw new Exception(string.Format(Strings.Interface.SourceDirIsIncludedMultipleTimesError, sources[i])); else if (sources[i].StartsWith(sources[j], Utility.Utility.IsFSCaseSensitive ? StringComparison.CurrentCulture : StringComparison.CurrentCultureIgnoreCase)) throw new Exception(string.Format(Strings.Interface.SourceDirsAreRelatedError, sources[i], sources[j])); } if (m_options.AsynchronousUpload) { m_asyncReserved = ASYNC_RESERVED; m_allowUploadProgress = false; } //Unused, but triggers errors in the encryption setup here Library.Interface.IEncryption encryptionModule = m_options.NoEncryption ? null : DynamicLoader.EncryptionLoader.GetModule(m_options.EncryptionModule, m_options.Passphrase, m_options.RawOptions); using (new Logging.Timer("Backup from " + string.Join(";", sources) + " to " + m_backend)) { try { if (OperationStarted != null) OperationStarted(this, XervBackupOperation.Backup, bs.OperationMode, -1, -1, Strings.Interface.StatusLoadingFilelist, ""); OperationProgress(this, XervBackupOperation.Backup, bs.OperationMode, -1, -1, Strings.Interface.StatusLoadingFilelist, ""); CheckLiveControl(); bool full = m_options.Full; if (full) bs.SetTypeReason(string.Format(Strings.Interface.FullBecauseFlagWasSet, "full")); backend = new BackendWrapper(bs, m_backend, m_options); backend.ProgressEvent += new XervBackup.Library.Main.RSync.RSyncDir.ProgressEventDelegate(BackupTransfer_ProgressEvent); backend.AsyncItemProcessedEvent += new EventHandler(backend_AsyncItemProcessedEvent); m_progress = 0.0; OperationProgress(this, XervBackupOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, Strings.Interface.StatusReadingIncrementals, ""); CheckLiveControl(); List<ManifestEntry> backupsets; if (full) { //This will create the target folder backend.List(false); backupsets = new List<ManifestEntry>(); } else { //This will list all files on the backend and create the target folder backupsets = backend.GetBackupSets(); } if (backupsets.Count == 0) { if (!full) bs.SetTypeReason(Strings.Interface.FullBecauseBackendIsEmpty); full = true; } else { //A prioir backup exists, extract the compression and encryption modules used in the most recent entry string compression = null; string encryption = null; for (int i = backupsets.Count - 1; compression == null && i >= 0; i--) { for (int j = backupsets[i].Incrementals.Count - 1; compression == null && j >= 0; j--) for (int k = backupsets[i].Incrementals[j].Volumes.Count - 1; compression == null && k >= 0; k--) { compression = backupsets[i].Incrementals[j].Volumes[k].Key.Compression; encryption = backupsets[i].Incrementals[j].Volumes[k].Key.EncryptionMode; if (compression != null) break; } for (int k = backupsets[i].Volumes.Count - 1; compression == null && k >= 0; k--) { compression = backupsets[i].Volumes[k].Key.Compression; encryption = backupsets[i].Volumes[k].Key.EncryptionMode; if (compression != null) break; } } if (compression != null) { m_options.SetEncryptionModuleDefault(encryption); m_options.SetCompressionModuleDefault(compression); } } string fullCriteria1 = null; string fullCriteria2 = null; if (!full) { full = DateTime.Now > m_options.FullIfOlderThan(backupsets[backupsets.Count - 1].Time); if (full) bs.SetTypeReason(string.Format(Strings.Interface.FullBecauseLastFullIsFrom, backupsets[backupsets.Count - 1].Time, m_options.FullIfOlderThanValue)); else if (!string.IsNullOrEmpty(m_options.FullIfOlderThanValue)) fullCriteria1 = string.Format(Strings.Interface.IncrementalBecauseLastFullIsFrom, backupsets[backupsets.Count - 1].Time, m_options.FullIfOlderThanValue); } if (!full && m_options.FullIfMoreThanNIncrementals > 0) { full = backupsets[backupsets.Count - 1].Incrementals.Count >= m_options.FullIfMoreThanNIncrementals; if (full) bs.SetTypeReason(string.Format(Strings.Interface.FullBecauseThereAreNIncrementals, backupsets[backupsets.Count - 1].Incrementals.Count, m_options.FullIfMoreThanNIncrementals)); else fullCriteria2 = string.Format(Strings.Interface.IncrementalBecauseThereAreNIncrementals, backupsets[backupsets.Count - 1].Incrementals.Count, m_options.FullIfMoreThanNIncrementals); } bs.Full = full; if (!full) { if (fullCriteria1 == null && fullCriteria2 == null) bs.SetTypeReason(Strings.Interface.IncrementalBecauseNoFlagsWereSet); else if (fullCriteria2 == null) bs.SetTypeReason(fullCriteria1); else if (fullCriteria1 == null) bs.SetTypeReason(fullCriteria2); else bs.SetTypeReason(fullCriteria1 + ". " + fullCriteria2); } List<string> controlfiles = new List<string>(); if (!string.IsNullOrEmpty(m_options.SignatureControlFiles)) controlfiles.AddRange(m_options.SignatureControlFiles.Split(System.IO.Path.PathSeparator)); int vol = 0; long totalsize = 0; Manifestfile manifest = new Manifestfile(); using (Utility.TempFolder tempfolder = new XervBackup.Library.Utility.TempFolder()) { List<KeyValuePair<ManifestEntry, Library.Interface.ICompression>> patches = new List<KeyValuePair<ManifestEntry, XervBackup.Library.Interface.ICompression>>(); if (!full) { m_incrementalFraction = INCREMENAL_COST; List<ManifestEntry> entries = new List<ManifestEntry>(); entries.Add(backupsets[backupsets.Count - 1]); entries.AddRange(backupsets[backupsets.Count - 1].Incrementals); //Check before we start the download CheckLiveControl(); VerifyBackupChainWithFiles(backend, entries[entries.Count - 1]); if (m_options.CreateVerificationFile) verification = new VerificationFile(entries, backend.FilenameStrategy); OperationProgress(this, XervBackupOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, Strings.Interface.StatusReadingIncrementals, ""); patches = FindPatches(backend, entries, tempfolder, false, bs); //Check before we start the download CheckLiveControl(); Manifestfile latest = GetManifest(backend, backupsets[backupsets.Count - 1]); //Manifest version 1 does not support multiple folders if (latest.Version == 1) latest.SourceDirs = new string[] { sources[0] }; if (latest.SourceDirs.Length != sources.Length) { if (m_options.FullIfSourceFolderChanged) { Logging.Log.WriteMessage("Source folder count changed, issuing full backup", XervBackup.Library.Logging.LogMessageType.Information); if (!full) bs.SetTypeReason(Strings.Interface.FullBecauseSourceFoldersChanged); full = true; } else throw new Exception(string.Format(Strings.Interface.NumberOfSourceFoldersHasChangedError, latest.SourceDirs.Length, sources.Length)); } else { if (!m_options.AllowSourceFolderChange) { foreach (string s1 in latest.SourceDirs) { bool found = false; foreach (string s2 in sources) if (s1.Equals(s2, Utility.Utility.IsFSCaseSensitive ? StringComparison.CurrentCulture : StringComparison.CurrentCultureIgnoreCase)) { found = true; break; } if (!found) { if (m_options.FullIfSourceFolderChanged) { Logging.Log.WriteMessage("Source folders changed, issuing full backup", XervBackup.Library.Logging.LogMessageType.Information); if (!full) bs.SetTypeReason(Strings.Interface.FullBecauseSourceFoldersChanged); full = true; break; //Exit the folder loop } else throw new Exception(string.Format(Strings.Interface.SourceFoldersHasChangedError, s1)); } } manifest.SourceDirs = latest.SourceDirs; } else { manifest.SourceDirs = sources; } } } DateTime backuptime = DateTime.Now; DateTime backupchaintime; if (full) { patches.Clear(); m_incrementalFraction = 0.0; manifest.SourceDirs = sources; if (m_options.CreateVerificationFile) verification = new VerificationFile(new ManifestEntry[0], backend.FilenameStrategy); backupchaintime = backuptime; } else { backupchaintime = patches[0].Key.Time; manifest.PreviousManifestFilename = patches[patches.Count - 1].Key.Filename; manifest.PreviousManifestHash = patches[patches.Count - 1].Key.RemoteHash; } OperationProgress(this, XervBackupOperation.Backup, bs.OperationMode, -1, -1, Strings.Interface.StatusBuildingFilelist, ""); bool completedWithoutChanges; using (RSync.RSyncDir dir = new XervBackup.Library.Main.RSync.RSyncDir(manifest.SourceDirs, bs, m_options.Filter, patches)) { CheckLiveControl(); dir.ProgressEvent += new XervBackup.Library.Main.RSync.RSyncDir.ProgressEventDelegate(BackupRSyncDir_ProgressEvent); dir.DisableFiletimeCheck = m_options.DisableFiletimeCheck; dir.MaxFileSize = m_options.SkipFilesLargerThan; using (new Logging.Timer("Initiating multipass")) dir.InitiateMultiPassDiff(full, m_options); string tempVolumeFolder = m_options.AsynchronousUpload ? m_options.AsynchronousUploadFolder : m_options.TempDir; bool done = false; while (!done && totalsize < m_options.MaxSize) { using (new Logging.Timer("Multipass " + (vol + 1).ToString())) using (Utility.TempFile signaturefile = new XervBackup.Library.Utility.TempFile(System.IO.Path.Combine(tempVolumeFolder, Guid.NewGuid().ToString()))) using (Utility.TempFile contentfile = new XervBackup.Library.Utility.TempFile(System.IO.Path.Combine(tempVolumeFolder, Guid.NewGuid().ToString()))) { OperationProgress(this, XervBackupOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusCreatingVolume, vol + 1), ""); CheckLiveControl(); using (Library.Interface.ICompression signaturearchive = DynamicLoader.CompressionLoader.GetModule(m_options.CompressionModule, signaturefile, m_options.RawOptions)) using (Library.Interface.ICompression contentarchive = DynamicLoader.CompressionLoader.GetModule(m_options.CompressionModule, contentfile, m_options.RawOptions)) { //If we are all out, stop now, this may cause incomplete partial files if (m_options.MaxSize - totalsize < (contentarchive.FlushBufferSize + backend.FileSizeOverhead)) break; //Add signature files to archive foreach (string s in controlfiles) if (!string.IsNullOrEmpty(s)) using (System.IO.Stream cs = signaturearchive.CreateFile(System.IO.Path.Combine(RSync.RSyncDir.CONTROL_ROOT, System.IO.Path.GetFileName(s)))) using (System.IO.FileStream fs = System.IO.File.OpenRead(s)) Utility.Utility.CopyStream(fs, cs); //Only add control files to the very first volume controlfiles.Clear(); done = dir.MakeMultiPassDiff(signaturearchive, contentarchive, (Math.Min(m_options.VolumeSize, m_options.MaxSize - totalsize)) - backend.FileSizeOverhead); //TODO: This is not the correct size, we need to account for file size overhead as well totalsize += signaturearchive.Size; totalsize += contentarchive.Size; //TODO: This is not the best way to determine this if (totalsize >= m_options.MaxSize) dir.FinalizeMultiPass(signaturearchive, contentarchive, long.MaxValue); } completedWithoutChanges = done && !dir.AnyChangesFound; if (m_options.UploadUnchangedBackups || full) completedWithoutChanges = false; if (!completedWithoutChanges) { if (m_options.AsynchronousUpload) { m_lastProgressMessage = Strings.Interface.StatusWaitingForUpload; m_allowUploadProgress = true; m_allowUploadProgressAfter = DateTime.Now.AddSeconds(1); } else OperationProgress(this, XervBackupOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusUploadingContentVolume, vol + 1), ""); //Last check before we upload, we do not interrupt transfers CheckLiveControl(); //The backendwrapper will remove these signaturefile.Protected = true; contentfile.Protected = true; ContentEntry ce = new ContentEntry(backuptime, full, vol + 1); SignatureEntry se = new SignatureEntry(backuptime, full, vol + 1); using (new Logging.Timer("Writing delta file " + (vol + 1).ToString())) backend.Put(ce, contentfile); if (!m_options.AsynchronousUpload) OperationProgress(this, XervBackupOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusUploadingSignatureVolume, vol + 1), ""); using (new Logging.Timer("Writing remote signatures")) backend.Put(se, signaturefile); manifest.AddEntries(ce, se); if (verification != null) { verification.AddFile(ce); verification.AddFile(se); } } } if (!completedWithoutChanges) { //The backend wrapper will remove these Utility.TempFile mf = new XervBackup.Library.Utility.TempFile(); using (new Logging.Timer("Writing manifest " + backuptime.ToUniversalTime().ToString("yyyyMMddTHHmmssK"))) { //Alternate primary/secondary ManifestEntry mfe = new ManifestEntry(backuptime, full, manifest.SignatureHashes.Count % 2 != 0); manifest.SelfFilename = backend.GenerateFilename(mfe); manifest.Save(mf); if (!m_options.AsynchronousUpload) OperationProgress(this, XervBackupOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusUploadingManifestVolume, vol + 1), ""); //Write the file mf.Protected = true; backend.Put(mfe, mf); if (verification != null) verification.UpdateManifest(mfe); } if (verification != null) { using (new Logging.Timer("Writing verification " + backuptime.ToUniversalTime().ToString("yyyyMMddTHHmmssK"))) { Utility.TempFile vt = new XervBackup.Library.Utility.TempFile(); verification.Save(vt); if (!m_options.AsynchronousUpload) OperationProgress(this, XervBackupOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, Strings.Interface.StatusUploadingVerificationVolume, ""); vt.Protected = true; backend.Put(new VerificationEntry(backupchaintime), vt); } } if (m_options.AsynchronousUpload) m_allowUploadProgress = false; //The file volume counter vol++; } } } //If we are running asynchronous, we now enter the end-game if (m_options.AsynchronousUpload) { m_lastProgressMessage = Strings.Interface.StatusWaitingForUpload; m_allowUploadProgress = true; m_allowUploadProgressAfter = DateTime.Now; //Before we clear the temp folder, we need to ensure that all volumes are uploaded. //To allow the UI to show some progress while uploading, we perform the remaining // uploads synchronous List<KeyValuePair<BackupEntryBase, string>> pendingUploads = backend.ExtractPendingUploads(); //Figure out what volume number we are at foreach (KeyValuePair<BackupEntryBase, string> p in pendingUploads) if (p.Key is ManifestEntry) vol--; double unitcost = m_asyncReserved / pendingUploads.Count; //The upload each remaining volume in order foreach (KeyValuePair<BackupEntryBase, string> p in pendingUploads) { string msg; if (p.Key is ManifestEntry) { vol++; msg = string.Format(Strings.Interface.StatusUploadingManifestVolume, vol); } else if (p.Key is SignatureEntry) msg = string.Format(Strings.Interface.StatusUploadingSignatureVolume, ((SignatureEntry)p.Key).Volumenumber); else if (p.Key is ContentEntry) { msg = string.Format(Strings.Interface.StatusUploadingContentVolume, ((ContentEntry)p.Key).Volumenumber); //We allow a stop or pause request here CheckLiveControl(); } else if (p.Key is VerificationEntry) msg = Strings.Interface.StatusUploadingVerificationVolume; else throw new InvalidOperationException(); OperationProgress(this, XervBackupOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, msg, ""); backend.Put(p.Key, p.Value); m_asyncReserved -= unitcost; m_progress += unitcost; } } } } catch(Exception ex) { //If this is a controlled user-requested stop, wait for the current upload to complete if (backend != null && ex is LiveControl.ExecutionStoppedException) { try { if (m_options.AsynchronousUpload) { m_lastProgressMessage = Strings.Interface.StatusWaitingForUpload; m_allowUploadProgress = true; m_allowUploadProgressAfter = DateTime.Now; //Wait for the current upload to complete and then delete all remaining temporary files foreach (KeyValuePair<BackupEntryBase, string> p in backend.ExtractPendingUploads()) try { if (System.IO.File.Exists(p.Value)) System.IO.File.Delete(p.Value); } catch { } //Better to delete as many as possible rather than choke on a single file } } catch { } //We already have an exception, just go with that } if (backend == null || backend.ManifestUploads == 0) { Logging.Log.WriteMessage(string.Format(Strings.Interface.ErrorRunningBackup, ex.Message), Logging.LogMessageType.Error); throw; //This also activates "finally", unlike in other languages... } Logging.Log.WriteMessage(string.Format(Strings.Interface.PartialUploadMessage, backend.ManifestUploads, ex.Message), Logging.LogMessageType.Warning); bs.LogError(string.Format(Strings.Interface.PartialUploadMessage, backend.ManifestUploads, ex.Message), ex); } finally { m_progress = 100.0; if (backend != null) try { backend.Dispose(); } catch { } if (OperationCompleted != null) OperationCompleted(this, XervBackupOperation.Backup, bs.OperationMode, 100, -1, Strings.Interface.StatusCompleted, ""); OperationProgress(this, XervBackupOperation.Backup, bs.OperationMode, 100, -1, Strings.Interface.StatusCompleted, ""); } } bs.EndTime = DateTime.Now; return bs.ToString(); }
public IList<string> ListCurrentFiles() { RestoreStatistics rs = new RestoreStatistics(XervBackupOperationMode.ListCurrentFiles); SetupCommonOptions(rs); Utility.FilenameFilter filter = m_options.Filter; DateTime timelimit = m_options.RestoreTime; if (OperationStarted != null) OperationStarted(this, XervBackupOperation.List, rs.OperationMode, 0, -1, Strings.Interface.StatusStarted, ""); List<string> res; using (BackendWrapper backend = new BackendWrapper(rs, m_backend, m_options)) using (Utility.TempFolder basefolder = new XervBackup.Library.Utility.TempFolder()) { ManifestEntry bestFit = backend.GetBackupSet(timelimit); List<ManifestEntry> entries = new List<ManifestEntry>(); entries.Add(bestFit); entries.AddRange(bestFit.Incrementals); List<KeyValuePair<ManifestEntry, Library.Interface.ICompression>> patches = FindPatches(backend, entries, basefolder, false, rs); using (RSync.RSyncDir dir = new XervBackup.Library.Main.RSync.RSyncDir(new string[] { basefolder }, rs, filter, patches)) res = dir.UnmatchedFiles(); } if (OperationCompleted != null) OperationCompleted(this, XervBackupOperation.List, rs.OperationMode, 100, -1, Strings.Interface.StatusCompleted, ""); return res; }
public List<KeyValuePair<RSync.RSyncDir.PatchFileType, string>> ListActualSignatureFiles() { CommunicationStatistics stats = new CommunicationStatistics(XervBackupOperationMode.ListActualSignatureFiles); SetupCommonOptions(stats); using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options)) { ManifestEntry bestFit = backend.GetBackupSet(m_options.RestoreTime); if (bestFit.Incrementals.Count > 0) //Get the most recent incremental bestFit = bestFit.Incrementals[bestFit.Incrementals.Count - 1]; using (Utility.TempFolder folder = new XervBackup.Library.Utility.TempFolder()) { List<Library.Interface.ICompression> patches = new List<XervBackup.Library.Interface.ICompression>(); foreach (KeyValuePair<ManifestEntry, Library.Interface.ICompression> entry in FindPatches(backend, new List<ManifestEntry>(new ManifestEntry[] { bestFit }), folder, false, stats)) patches.Add(entry.Value); using (RSync.RSyncDir dir = new XervBackup.Library.Main.RSync.RSyncDir(new string[] { folder }, stats, null)) return dir.ListPatchFiles(patches); } } }
/// <summary> /// Running the unit test confirms the correctness of XervBackup /// </summary> /// <param name="folders">The folders to backup. Folder at index 0 is the base, all others are incrementals</param> /// <param name="target">The target destination for the backups</param> public static void RunTest(string[] folders, Dictionary<string, string> options, string target) { LogHelper log = new LogHelper("unittest.log"); Log.CurrentLog = log; ; Log.LogLevel = XervBackup.Library.Logging.LogMessageType.Profiling; string tempdir = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location), "tempdir"); try { if (System.IO.Directory.Exists(tempdir)) System.IO.Directory.Delete(tempdir, true); System.IO.Directory.CreateDirectory(tempdir); } catch(Exception ex) { Log.WriteMessage("Failed to clean tempdir", LogMessageType.Error, ex); } XervBackup.Library.Utility.TempFolder.SystemTempPath = tempdir; //Set some defaults if (!options.ContainsKey("passphrase")) options["passphrase"] = "secret password!"; if (!options.ContainsKey("backup-prefix")) options["backup-prefix"] = "XervBackup_unittest"; //This would break the test, because the data is not modified the normal way options["disable-filetime-check"] = "true"; //We do not use the same folder, so we need this option options["allow-sourcefolder-change"] = "true"; //We want all messages in the log options["log-level"] = LogMessageType.Profiling.ToString(); //We cannot rely on USN numbering, but we can use USN enumeration //options["disable-usn-diff-check"] = "true"; //We use precise times options["disable-time-tolerance"] = "true"; options["verification-level"] = "full"; //We need all sets, even if they are unchanged options["upload-unchanged-backups"] = "true"; using(new Timer("Total unittest")) using(TempFolder tf = new TempFolder()) { //The code below tests for a race condition in the ssh backend. /*string[] list = null; string[] prevList = null; for (int i = 0; i < 1000; i++) { Console.WriteLine(string.Format("Listing, test {0}", i)); list = XervBackup.Library.Main.Interface.List(target, options); if (i != 0 && list.Length != prevList.Length) Console.WriteLine(string.Format("Count mismatch {0} vs {1}", list.Length, prevList.Length)); prevList = list; }*/ if (string.IsNullOrEmpty(target)) { target = "file://" + tf; } else { Console.WriteLine("Removing old backups"); Dictionary<string, string> tmp = new Dictionary<string, string>(options); tmp["delete-all-but-n-full"] = "0"; tmp["force"] = ""; tmp["allow-full-removal"] = ""; using (new Timer("Cleaning up any existing backups")) Console.WriteLine(XervBackup.Library.Main.Interface.DeleteAllButNFull(target, tmp)); } log.Backupset = "Backup " + folders[0]; Console.WriteLine("Backing up the full copy: " + folders[0]); using (new Timer("Full backup of " + folders[0])) { options["full"] = ""; Log.WriteMessage(XervBackup.Library.Main.Interface.Backup(folders[0].Split(System.IO.Path.PathSeparator), target, options), LogMessageType.Information); options.Remove("full"); } for (int i = 1; i < folders.Length; i++) { //options["passphrase"] = "bad password"; //If the backups are too close, we can't pick the right one :( System.Threading.Thread.Sleep(1000 * 5); log.Backupset = "Backup " + folders[i]; Console.WriteLine("Backing up the incremental copy: " + folders[i]); using (new Timer("Incremental backup of " + folders[i])) Log.WriteMessage(XervBackup.Library.Main.Interface.Backup(folders[i].Split(System.IO.Path.PathSeparator), target, options), LogMessageType.Information); } XervBackup.Library.Main.Options opts = new XervBackup.Library.Main.Options(options); using (XervBackup.Library.Interface.IBackend bk = XervBackup.Library.DynamicLoader.BackendLoader.GetBackend(target, options)) foreach (XervBackup.Library.Interface.IFileEntry fe in bk.List()) if (fe.Size > opts.VolumeSize) { string msg = string.Format("The file {0} is {1} bytes larger than allowed", fe.Name, fe.Size - opts.VolumeSize); Console.WriteLine(msg); Log.WriteMessage(msg, LogMessageType.Error); } List<XervBackup.Library.Main.ManifestEntry> entries = XervBackup.Library.Main.Interface.ParseFileList(target, options); if (entries.Count != 1 || entries[0].Incrementals.Count != folders.Length - 1) { StringBuilder sb = new StringBuilder(); sb.AppendLine("Entry count: " + entries.Count.ToString()); if (entries.Count == 1) sb.Append(string.Format("Found {0} incrementals but there were {1} source folders", entries[0].Incrementals.Count, folders.Length)); throw new Exception("Filename parsing problem, or corrupt storage: " + sb.ToString()); } Console.WriteLine("Verifying the backup chain"); using (new Timer("Verify backup")) { List<KeyValuePair<XervBackup.Library.Main.BackupEntryBase, Exception>> results = XervBackup.Library.Main.Interface.VerifyBackup(target, options); foreach (KeyValuePair<XervBackup.Library.Main.BackupEntryBase, Exception> x in results) if (x.Value != null) Console.WriteLine(string.Format("Error: {0}: {1}", x.Key.Filename, x.Value.ToString())); } List<XervBackup.Library.Main.ManifestEntry> t = new List<XervBackup.Library.Main.ManifestEntry>(); t.Add(entries[0]); t.AddRange(entries[0].Incrementals); entries = t; for (int i = 0; i < entries.Count; i++) { using (TempFolder ttf = new TempFolder()) { log.Backupset = "Restore " + folders[i]; Console.WriteLine("Restoring the copy: " + folders[i]); options["restore-time"] = entries[i].Time.ToString(); string[] actualfolders = folders[i].Split(System.IO.Path.PathSeparator); string[] restorefoldernames; if (actualfolders.Length == 1) restorefoldernames = new string[] { ttf }; else { restorefoldernames = new string[actualfolders.Length]; for (int j = 0; j < actualfolders.Length; j++) restorefoldernames[j] = System.IO.Path.Combine(ttf, System.IO.Path.GetFileName(actualfolders[j])); } Console.WriteLine("Partial restore of: " + folders[i]); using (TempFolder ptf = new TempFolder()) { List<string> testfiles = new List<string>(); using (new Timer("Extract list of files from" + folders[i])) { IList<string> sourcefiles = XervBackup.Library.Main.Interface.ListCurrentFiles(target, options); //Remove all folders from list for (int j = 0; j < sourcefiles.Count; j++) if (sourcefiles[j].EndsWith(System.IO.Path.DirectorySeparatorChar.ToString())) { sourcefiles.RemoveAt(j); j--; } int testfilecount = 15; Random r = new Random(); while (testfilecount-- > 0 && sourcefiles.Count > 0) { int rn = r.Next(0, sourcefiles.Count); testfiles.Add(sourcefiles[rn]); sourcefiles.RemoveAt(rn); } } //Add all folders to avoid warnings in restore log int c = testfiles.Count; Dictionary<string, string> partialFolders = new Dictionary<string, string>(Utility.ClientFilenameStringComparer); for (int j = 0; j < c; j++) { string f = testfiles[j]; do { f = System.IO.Path.GetDirectoryName(f); partialFolders[Utility.AppendDirSeparator(f)] = null; } while (f.IndexOf(System.IO.Path.DirectorySeparatorChar) > 0); } if (partialFolders.ContainsKey("")) partialFolders.Remove(""); Dictionary<string, string> tops = new Dictionary<string,string>(options); List<string> filterlist = new List<string>(); filterlist.AddRange(partialFolders.Keys); filterlist.AddRange(testfiles); tops["file-to-restore"] = String.Join(System.IO.Path.PathSeparator.ToString(), filterlist.ToArray()); using (new Timer("Partial restore of " + folders[i])) Log.WriteMessage(XervBackup.Library.Main.Interface.Restore(target, new string[] { ptf }, tops), LogMessageType.Information); Console.WriteLine("Verifying partial restore of: " + folders[i]); using (new Timer("Verification of partial restore from " + folders[i])) foreach (string s in testfiles) { string restoredname; string sourcename; if (actualfolders.Length == 1) { sourcename = System.IO.Path.Combine(actualfolders[0], s); restoredname = System.IO.Path.Combine(ptf, s);; } else { int six = s.IndexOf(System.IO.Path.DirectorySeparatorChar); sourcename = System.IO.Path.Combine(actualfolders[int.Parse(s.Substring(0, six))], s.Substring(six + 1)); restoredname = System.IO.Path.Combine(System.IO.Path.Combine(ptf, System.IO.Path.GetFileName(folders[0].Split(System.IO.Path.PathSeparator)[int.Parse(s.Substring(0, six))])), s.Substring(six + 1)); } if (!System.IO.File.Exists(restoredname)) { Log.WriteMessage("Partial restore missing file: " + restoredname, LogMessageType.Error); Console.WriteLine("Partial restore missing file: " + restoredname); } else { if (!System.IO.File.Exists(sourcename)) { Log.WriteMessage("Partial restore missing file: " + sourcename, LogMessageType.Error); Console.WriteLine("Partial restore missing file: " + sourcename); throw new Exception("Unittest is broken"); } if (!CompareFiles(sourcename, restoredname, s)) { Log.WriteMessage("Partial restore file differs: " + s, LogMessageType.Error); Console.WriteLine("Partial restore file differs: " + s); } } } } using (new Timer("Restore of " + folders[i])) Log.WriteMessage(XervBackup.Library.Main.Interface.Restore(target, restorefoldernames, options), LogMessageType.Information); Console.WriteLine("Verifying the copy: " + folders[i]); using (new Timer("Verification of " + folders[i])) { for (int j = 0; j < actualfolders.Length; j++) VerifyDir(actualfolders[j], restorefoldernames[j]); } } } } (Log.CurrentLog as StreamLog).Dispose(); Log.CurrentLog = null; }
public override void SetUp() { this.m_config_dir = new TempFolder(); initializeDbConnection(); }
internal static IDataFetcherWithRelations InitializeDbConnection(TempFolder tempFolder, System.Data.IDbConnection con) { XervBackup.GUI.Program.OpenSettingsDatabase(con, tempFolder, TEST_DB); XervBackup.GUI.Program.DataConnection = new DataFetcherWithRelations(new SQLiteDataProvider(con)); return XervBackup.GUI.Program.DataConnection; }