public void CustomRemoteURL() { string customTargetFolder = Path.Combine(this.TARGETFOLDER, "destination"); Directory.CreateDirectory(customTargetFolder); List <string> customCommands = new List <string> { $"echo --remoteurl = \"{customTargetFolder}\"" }; Dictionary <string, string> options = this.TestOptions; options["run-script-before"] = CreateScript(0, null, null, 0, customCommands); using (Controller c = new Library.Main.Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] { this.DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } string[] targetEntries = Directory.EnumerateFileSystemEntries(this.TARGETFOLDER).ToArray(); Assert.AreEqual(1, targetEntries.Length); Assert.AreEqual(customTargetFolder, targetEntries[0]); // We expect a dblock, dlist, and dindex file. IEnumerable <string> customTargetEntries = Directory.EnumerateFileSystemEntries(customTargetFolder); Assert.AreEqual(3, customTargetEntries.Count()); }
public void RunCommands() { var data = new byte[1024 * 1024 * 10]; //File.WriteAllText(Path.Combine(DATAFOLDER, "a"), "hi"); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a"), data); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) c.Backup(new string[] { DATAFOLDER }); new Random().NextBytes(data); //File.WriteAllText(Path.Combine(DATAFOLDER, "b"), "there"); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b"), data); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("Newest before deleting:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Console.WriteLine("Newest without db:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } File.Delete(DBFILE); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) Assert.AreEqual(c.List().Filesets.Count(), 2); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 1 }), null)) { var r = c.List("*"); Console.WriteLine("Oldest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(2, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("Newest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } }
public void RunCommands() { var testopts = TestOptions; testopts["no-backend-verification"] = "true"; var data = new byte[1024 * 1024 * 10]; File.WriteAllBytes(Path.Combine(DATAFOLDER, "a"), data); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var r = c.Backup(new string[] { DATAFOLDER }); var pr = (Library.Interface.IParsedBackendStatistics)r.BackendStatistics; if (pr.KnownFileSize == 0 || pr.KnownFileCount != 3 || pr.BackupListCount != 1) { throw new Exception(string.Format("Failed to get stats from remote backend: {0}, {1}, {2}", pr.KnownFileSize, pr.KnownFileCount, pr.BackupListCount)); } } }
public void PurgeBrokenFilesTest() { var blocksize = 1024 * 10; var basedatasize = 0; var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; var filenames = BorderTests.WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize).Select(x => "a" + x.Key).ToList(); var round1 = filenames.Take(filenames.Count / 3).ToArray(); var round2 = filenames.Take((filenames.Count / 3) * 2).ToArray(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }, new Library.Utility.FilterExpression(round1.Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(res.AddedFiles, round1.Length); } var dblock_file = SystemIO.IO_OS .GetFiles(TARGETFOLDER, "*.dblock.zip.aes") .Select(x => new FileInfo(x)) .OrderBy(x => x.LastWriteTimeUtc) .Select(x => x.FullName) .First(); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }, new Library.Utility.FilterExpression(round2.Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(round2.Length - round1.Length, res.AddedFiles); } System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(filenames.Count - round2.Length, res.AddedFiles); } File.Delete(dblock_file); long[] affectedfiles; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var brk = c.ListBrokenFiles(null); Assert.AreEqual(0, brk.Errors.Count()); Assert.AreEqual(0, brk.Warnings.Count()); var sets = brk.BrokenFiles.Count(); var files = brk.BrokenFiles.Sum(x => x.Item3.Count()); Assert.AreEqual(3, sets); Assert.True(files > 0); affectedfiles = brk.BrokenFiles.OrderBy(x => x.Item1).Select(x => x.Item3.LongCount()).ToArray(); } for (var i = 0; i < 3; i++) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = i }), null)) { var brk = c.ListBrokenFiles(null); Assert.AreEqual(0, brk.Errors.Count()); Assert.AreEqual(0, brk.Warnings.Count()); var sets = brk.BrokenFiles.Count(); var files = brk.BrokenFiles.Sum(x => x.Item3.Count()); Assert.AreEqual(1, sets); Assert.AreEqual(affectedfiles[i], files); } } // A dry-run should run without exceptions (see issue #4379). Dictionary <string, string> dryRunOptions = new Dictionary <string, string>(testopts) { ["dry-run"] = "true" }; using (Controller c = new Controller("file://" + this.TARGETFOLDER, dryRunOptions, null)) { IPurgeBrokenFilesResults purgeResults = c.PurgeBrokenFiles(null); Assert.AreEqual(0, purgeResults.Errors.Count()); Assert.AreEqual(0, purgeResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var brk = c.PurgeBrokenFiles(null); Assert.AreEqual(0, brk.Errors.Count()); Assert.AreEqual(0, brk.Warnings.Count()); var modFilesets = 0L; if (brk.DeleteResults != null) { modFilesets += brk.DeleteResults.DeletedSets.Count(); } if (brk.PurgeResults != null) { modFilesets += brk.PurgeResults.RewrittenFileLists; } Assert.AreEqual(3, modFilesets); } // A subsequent backup should be successful. using (Controller c = new Controller("file://" + this.TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new[] { this.DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; modifyOptions?.Invoke(testopts); var filenames = WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); // TODO: This sometimes results in a "No block hash found for file: C:\projects\duplicati\testdata\backup-data\a-0" warning. // Because of this, we don't check for warnings here. } // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { IListResults listResults = c.List("*"); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } // Do a "touch" on files to trigger a re-scan, which should do nothing //foreach (var k in filenames) //if (File.Exists(Path.Combine(DATAFOLDER, "a" + k.Key))) //File.SetLastWriteTime(Path.Combine(DATAFOLDER, "a" + k.Key), DateTime.Now.AddSeconds(5)); var data = new byte[filenames.Select(x => x.Value).Max()]; new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var r = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); if (!Library.Utility.Utility.ParseBoolOption(testopts, "disable-filetime-check")) { if (r.OpenedFiles != filenames.Count) { throw new Exception($"Opened {r.OpenedFiles}, but should open {filenames.Count}"); } if (r.ExaminedFiles != filenames.Count * 2) { throw new Exception($"Examined {r.ExaminedFiles}, but should examine open {filenames.Count * 2}"); } } } var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest before deleting:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest without db:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } testopts["dbpath"] = this.recreatedDatabaseFile; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); // TODO: This sometimes results in a "No block hash found for file: C:\projects\duplicati\testdata\backup-data\a-0" warning. // Because of this, we don't check for warnings here. } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IListResults listResults = c.List(); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); Assert.AreEqual(3, listResults.Filesets.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("V2 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("V1 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Assert.AreEqual(filenames.Count * 3, r.RestoredFiles); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, !Library.Utility.Utility.ParseBoolOption(testopts, "skip-metadata")); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Assert.AreEqual(filenames.Count, r.RestoredFiles); } } }
public void TestEmptyFolderExclude() { var source = DATAFOLDER; // Top level folder with no contents Directory.CreateDirectory(Path.Combine(source, "empty-toplevel")); // Top level folder with contents in one leaf Directory.CreateDirectory(Path.Combine(source, "toplevel")); // Empty folder Directory.CreateDirectory(Path.Combine(source, "toplevel", "empty")); // Folder with an excluded file Directory.CreateDirectory(Path.Combine(source, "toplevel", "filteredempty")); // Folder with contents Directory.CreateDirectory(Path.Combine(source, "toplevel", "normal")); // Folder with excludefile Directory.CreateDirectory(Path.Combine(source, "toplevel", "excludefile")); // Write a file that we will use for exclude target File.WriteAllLines(Path.Combine(source, "toplevel", "excludefile", "exclude.me"), new string[] { }); File.WriteAllLines(Path.Combine(source, "toplevel", "excludefile", "anyfile.txt"), new string[] { "data" }); // Write a file that we will filter File.WriteAllLines(Path.Combine(source, "toplevel", "filteredempty", "myfile.txt"), new string[] { "data" }); // Write a file that we will not filter File.WriteAllLines(Path.Combine(source, "toplevel", "normal", "standard.txt"), new string[] { "data" }); // Get the default options var testopts = TestOptions; // Create a fileset with all data present using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check that we have 4 files and 7 folders using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 7) { throw new Exception($"Initial condition not satisfied, found {folders} folders, but expected 7"); } if (files != 4) { throw new Exception($"Initial condition not satisfied, found {files} files, but expected 4"); } } // Toggle the exclude file, and build a new fileset System.Threading.Thread.Sleep(5000); testopts["ignore-filenames"] = "exclude.me"; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check that we have 2 files and 6 folders after excluding the "excludefile" folder using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 6) { throw new Exception($"Initial condition not satisfied, found {folders} folders, but expected 6"); } if (files != 2) { throw new Exception($"Initial condition not satisfied, found {files} files, but expected 2"); } } // Toggle empty folder excludes, and run a new backup to remove them System.Threading.Thread.Sleep(5000); testopts["exclude-empty-folders"] = "true"; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check that the two empty folders are now removed using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 4) { throw new Exception($"Empty not satisfied, found {folders} folders, but expected 4"); } if (files != 2) { throw new Exception($"Empty not satisfied, found {files} files, but expected 2"); } } // Filter out one file and rerun the backup to exclude the folder System.Threading.Thread.Sleep(5000); var excludefilter = new Library.Utility.FilterExpression($"*{System.IO.Path.DirectorySeparatorChar}myfile.txt", false); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }, excludefilter); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check that the empty folder is now removed using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 3) { throw new Exception($"Empty not satisfied, found {folders} folders, but expected 3"); } if (files != 1) { throw new Exception($"Empty not satisfied, found {files} files, but expected 1"); } } // Delete the one remaining file and check that we only have the top-level folder in the set System.Threading.Thread.Sleep(5000); File.Delete(Path.Combine(source, "toplevel", "normal", "standard.txt")); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }, excludefilter); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check we now have only one folder and no files using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 1) { throw new Exception($"Empty not satisfied, found {folders} folders, but expected 1"); } if (files != 0) { throw new Exception($"Empty not satisfied, found {files} files, but expected 0"); } } }
public void RunScriptBefore() { var blocksize = 10 * 1024; var options = TestOptions; options["blocksize"] = blocksize.ToString() + "b"; options["run-script-timeout"] = "5s"; // We need a small delay as we run very small backups back-to-back var PAUSE_TIME = TimeSpan.FromSeconds(3); BorderTests.WriteTestFilesToFolder(DATAFOLDER, blocksize, 0); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, options, null)) { var res = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); if (res.ParsedResult != ParsedResultType.Success) { throw new Exception("Unexpected result from base backup"); } System.Threading.Thread.Sleep(PAUSE_TIME); options["run-script-before"] = CreateScript(0); res = c.Backup(new string[] { DATAFOLDER }); if (res.ParsedResult != ParsedResultType.Success) { throw new Exception("Unexpected result from backup with return code 0"); } if (res.ExaminedFiles <= 0) { throw new Exception("Backup did not examine any files for code 0?"); } System.Threading.Thread.Sleep(PAUSE_TIME); options["run-script-before"] = CreateScript(1); res = c.Backup(new string[] { DATAFOLDER }); if (res.ParsedResult != ParsedResultType.Success) { throw new Exception("Unexpected result from backup with return code 1"); } if (res.ExaminedFiles > 0) { throw new Exception("Backup did examine files for code 1?"); } System.Threading.Thread.Sleep(PAUSE_TIME); options["run-script-before"] = CreateScript(2); res = c.Backup(new string[] { DATAFOLDER }); if (res.ParsedResult != ParsedResultType.Warning) { throw new Exception("Unexpected result from backup with return code 2"); } if (res.ExaminedFiles <= 0) { throw new Exception("Backup did not examine any files for code 2?"); } System.Threading.Thread.Sleep(PAUSE_TIME); options["run-script-before"] = CreateScript(3); res = c.Backup(new string[] { DATAFOLDER }); if (res.ParsedResult != ParsedResultType.Warning) { throw new Exception("Unexpected result from backup with return code 3"); } if (res.ExaminedFiles > 0) { throw new Exception("Backup did examine files for code 3?"); } System.Threading.Thread.Sleep(PAUSE_TIME); options["run-script-before"] = CreateScript(4); res = c.Backup(new string[] { DATAFOLDER }); if (res.ParsedResult != ParsedResultType.Error) { throw new Exception("Unexpected result from backup with return code 4"); } if (res.ExaminedFiles <= 0) { throw new Exception("Backup did not examine any files for code 4?"); } System.Threading.Thread.Sleep(PAUSE_TIME); options["run-script-before"] = CreateScript(5); res = c.Backup(new string[] { DATAFOLDER }); if (res.ParsedResult != ParsedResultType.Error) { throw new Exception("Unexpected result from backup with return code 5"); } if (res.ExaminedFiles > 0) { throw new Exception("Backup did examine files for code 5?"); } System.Threading.Thread.Sleep(PAUSE_TIME); options["run-script-before"] = CreateScript(2, "TEST WARNING MESSAGE"); res = c.Backup(new string[] { DATAFOLDER }); if (res.ParsedResult != ParsedResultType.Warning) { throw new Exception("Unexpected result from backup with return code 2"); } if (res.ExaminedFiles <= 0) { throw new Exception("Backup did examine files for code 2?"); } if (!res.Warnings.Any(x => x.IndexOf("TEST WARNING MESSAGE", StringComparison.Ordinal) >= 0)) { throw new Exception("Found no warning message in output for code 2"); } System.Threading.Thread.Sleep(PAUSE_TIME); options["run-script-before"] = CreateScript(3, "TEST WARNING MESSAGE"); res = c.Backup(new string[] { DATAFOLDER }); if (res.ParsedResult != ParsedResultType.Warning) { throw new Exception("Unexpected result from backup with return code 3"); } if (res.ExaminedFiles > 0) { throw new Exception("Backup did examine files for code 3?"); } if (!res.Warnings.Any(x => x.IndexOf("TEST WARNING MESSAGE", StringComparison.Ordinal) >= 0)) { throw new Exception("Found no warning message in output for code 3"); } System.Threading.Thread.Sleep(PAUSE_TIME); options["run-script-before"] = CreateScript(4, "TEST ERROR MESSAGE"); res = c.Backup(new string[] { DATAFOLDER }); if (res.ParsedResult != ParsedResultType.Error) { throw new Exception("Unexpected result from backup with return code 4"); } if (res.ExaminedFiles <= 0) { throw new Exception("Backup did examine files for code 4?"); } if (!res.Errors.Any(x => x.IndexOf("TEST ERROR MESSAGE", StringComparison.Ordinal) >= 0)) { throw new Exception("Found no error message in output for code 4"); } System.Threading.Thread.Sleep(PAUSE_TIME); options["run-script-before"] = CreateScript(5, "TEST ERROR MESSAGE"); res = c.Backup(new string[] { DATAFOLDER }); if (res.ParsedResult != ParsedResultType.Error) { throw new Exception("Unexpected result from backup with return code 5"); } if (res.ExaminedFiles > 0) { throw new Exception("Backup did examine files for code 5?"); } if (!res.Errors.Any(x => x.IndexOf("TEST ERROR MESSAGE", StringComparison.Ordinal) >= 0)) { throw new Exception("Found no error message in output for code 5"); } System.Threading.Thread.Sleep(PAUSE_TIME); options["run-script-before"] = CreateScript(0, sleeptime: 10); res = c.Backup(new string[] { DATAFOLDER }); if (res.ParsedResult != ParsedResultType.Warning) { throw new Exception("Unexpected result from backup with timeout script"); } if (res.ExaminedFiles <= 0) { throw new Exception("Backup did not examine any files after timeout?"); } } }
public static int Backup(List <string> args, Dictionary <string, string> options, Library.Utility.IFilter filter) { if (args.Count < 2) { return(PrintWrongNumberOfArguments(args, 2)); } var backend = args[0]; args.RemoveAt(0); var dirs = args.ToArray(); var output = new ConsoleOutput(options); Library.Interface.IBackupResults result; using (var periodicOutput = new PeriodicOutput(output, TimeSpan.FromSeconds(5))) { output.MessageEvent(string.Format("Backup started at {0}", DateTime.Now)); output.PhaseChanged += (phase, previousPhase) => { if (previousPhase == Duplicati.Library.Main.OperationPhase.Backup_PostBackupTest) { output.MessageEvent("Remote backup verification completed"); } if (phase == Duplicati.Library.Main.OperationPhase.Backup_ProcessingFiles) { output.MessageEvent("Scanning local files ..."); periodicOutput.SetReady(); } else if (phase == Duplicati.Library.Main.OperationPhase.Backup_Finalize) { periodicOutput.SetFinished(); } else if (phase == Duplicati.Library.Main.OperationPhase.Backup_PreBackupVerify) { output.MessageEvent("Checking remote backup ..."); } else if (phase == Duplicati.Library.Main.OperationPhase.Backup_PostBackupVerify) { output.MessageEvent("Checking remote backup ..."); } else if (phase == Duplicati.Library.Main.OperationPhase.Backup_PostBackupTest) { output.MessageEvent("Verifying remote backup ..."); } else if (phase == Duplicati.Library.Main.OperationPhase.Backup_Compact) { output.MessageEvent("Compacting remote backup ..."); } }; periodicOutput.WriteOutput += (progress, files, size, counting) => { output.MessageEvent(string.Format(" {0} files need to be examined ({1}){2}", files, Library.Utility.Utility.FormatSizeString(size), counting ? " (still counting)" : "")); }; using (var i = new Library.Main.Controller(backend, options, output)) result = i.Backup(dirs, filter); } if (output.VerboseOutput) { Library.Utility.Utility.PrintSerializeObject(result, Console.Out); } else { var parsedStats = result.BackendStatistics as Duplicati.Library.Interface.IParsedBackendStatistics; output.MessageEvent(string.Format(" Duration of backup: {0:hh\\:mm\\:ss}", result.Duration)); if (parsedStats != null && parsedStats.KnownFileCount > 0) { output.MessageEvent(string.Format(" Remote files: {0}", parsedStats.KnownFileCount)); output.MessageEvent(string.Format(" Remote size: {0}", Library.Utility.Utility.FormatSizeString(parsedStats.KnownFileSize))); } output.MessageEvent(string.Format(" Files added: {0}", result.AddedFiles)); output.MessageEvent(string.Format(" Files deleted: {0}", result.DeletedFiles)); output.MessageEvent(string.Format(" Files changed: {0}", result.ModifiedFiles)); output.MessageEvent(string.Format(" Data uploaded: {0}", Library.Utility.Utility.FormatSizeString(result.BackendStatistics.BytesUploaded))); output.MessageEvent(string.Format(" Data downloaded: {0}", Library.Utility.Utility.FormatSizeString(result.BackendStatistics.BytesDownloaded))); } if (result.ExaminedFiles == 0 && (filter != null || !filter.Empty)) { output.MessageEvent("No files were processed. If this was not intentional you may want to use the \"test-filter\" command"); } output.MessageEvent("Backup completed successfully!"); //Interrupted = 50 if (result.PartialBackup) { return(50); } //Completed with warnings = 2 if (result.Warnings.Count() > 0 || result.Errors.Count() > 0) { return(2); } //Success, but no upload = 1 if (result.BackendStatistics.BytesUploaded == 0) { return(1); } return(0); }
public void RunCommands() { TestOptions["verbose"] = "true"; var data = new byte[1024 * 1024 * 10]; File.WriteAllBytes(Path.Combine(DATAFOLDER, "a"), data); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) c.Backup(new string[] { DATAFOLDER }); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("In first backup:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b"), data); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) c.Backup(new string[] { DATAFOLDER }); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("Newest before deleting:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Console.WriteLine("Newest without db:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } File.Delete(DBFILE); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) c.Repair(); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) Assert.AreEqual(c.List().Filesets.Count(), 2); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 1 }), null)) { var r = c.List("*"); Console.WriteLine("Oldest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(2, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("Newest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } }
public void PurgeBrokenFilesTest() { PrepareSourceData(); var blocksize = 1024 * 10; var basedatasize = 0; var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; var filenames = BorderTests.WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize).Select(x => "a" + x.Key).ToList(); var round1 = filenames.Take(filenames.Count / 3).ToArray(); var round2 = filenames.Take((filenames.Count / 3) * 2).ToArray(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }, new Library.Utility.FilterExpression(round1.Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(res.AddedFiles, round1.Length); } var dblock_file = Directory .GetFiles(TARGETFOLDER, "*.dblock.zip.aes") .Select(x => new FileInfo(x)) .OrderBy(x => x.LastWriteTimeUtc) .Select(x => x.FullName) .First(); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }, new Library.Utility.FilterExpression(round2.Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(round2.Length - round1.Length, res.AddedFiles); } System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(filenames.Count - round2.Length, res.AddedFiles); } File.Delete(dblock_file); long[] affectedfiles; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var brk = c.ListBrokenFiles(null); var sets = brk.BrokenFiles.Count(); var files = brk.BrokenFiles.Sum(x => x.Item3.Count()); Assert.AreEqual(3, sets); Assert.True(files > 0); affectedfiles = brk.BrokenFiles.OrderBy(x => x.Item1).Select(x => x.Item3.LongCount()).ToArray(); } for (var i = 0; i < 3; i++) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = i }), null)) { var brk = c.ListBrokenFiles(null); var sets = brk.BrokenFiles.Count(); var files = brk.BrokenFiles.Sum(x => x.Item3.Count()); Assert.AreEqual(1, sets); Assert.AreEqual(affectedfiles[i], files); } } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var brk = c.PurgeBrokenFiles(null); var modFilesets = 0L; if (brk.DeleteResults != null) { modFilesets += brk.DeleteResults.DeletedSets.Count(); } if (brk.PurgeResults != null) { modFilesets += brk.PurgeResults.RewrittenFileLists; } Assert.AreEqual(3, modFilesets); } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["verbose"] = "true"; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) { modifyOptions(testopts); } if (basedatasize <= 0) { basedatasize = blocksize * 1024; } var filenames = new Dictionary <string, int>(); filenames[""] = basedatasize; filenames["-0"] = 0; filenames["-1"] = 1; filenames["-p1"] = basedatasize + 1; filenames["-p2"] = basedatasize + 2; filenames["-p500"] = basedatasize + 500; filenames["-m1"] = basedatasize - 1; filenames["-m2"] = basedatasize - 2; filenames["-m500"] = basedatasize - 500; filenames["-s1"] = blocksize / 4 + 6; filenames["-s2"] = blocksize / 10 + 6; filenames["-l1"] = blocksize * 4 + 6; filenames["-l2"] = blocksize * 10 + 6; filenames["-bm1"] = blocksize - 1; filenames["-b"] = blocksize; filenames["-bp1"] = blocksize + 1; var data = new byte[filenames.Select(x => x.Value).Max()]; foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "a" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest before deleting:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //Console.WriteLine("Newest without db:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) { File.Delete(newdb); } testopts["dbpath"] = newdb; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //Console.WriteLine("V2 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //Console.WriteLine("V1 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) { Directory.Delete(RESTOREFOLDER, true); } Directory.CreateDirectory(RESTOREFOLDER); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, true); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }
public static int Backup(List<string> args, Dictionary<string, string> options, Library.Utility.IFilter filter) { if (args.Count < 2) return PrintWrongNumberOfArguments(args, 2); var backend = args[0]; args.RemoveAt(0); var dirs = args.ToArray(); var output = new ConsoleOutput(options); Library.Interface.IBackupResults result; using(var periodicOutput = new PeriodicOutput(output, TimeSpan.FromSeconds(5))) { output.MessageEvent(string.Format("Backup started at {0}", DateTime.Now)); output.PhaseChanged += (phase, previousPhase) => { if (previousPhase == Duplicati.Library.Main.OperationPhase.Backup_PostBackupTest) output.MessageEvent("Remote backup verification completed"); if (phase == Duplicati.Library.Main.OperationPhase.Backup_ProcessingFiles) { output.MessageEvent("Scanning local files ..."); periodicOutput.SetReady(); } else if (phase == Duplicati.Library.Main.OperationPhase.Backup_Finalize) periodicOutput.SetFinished(); else if (phase == Duplicati.Library.Main.OperationPhase.Backup_PreBackupVerify) output.MessageEvent("Checking remote backup ..."); else if (phase == Duplicati.Library.Main.OperationPhase.Backup_PostBackupVerify) output.MessageEvent("Checking remote backup ..."); else if (phase == Duplicati.Library.Main.OperationPhase.Backup_PostBackupTest) output.MessageEvent("Verifying remote backup ..."); else if (phase == Duplicati.Library.Main.OperationPhase.Backup_Compact) output.MessageEvent("Compacting remote backup ..."); }; periodicOutput.WriteOutput += (progress, files, size, counting) => { output.MessageEvent(string.Format(" {0} files need to be examined ({1}){2}", files, Library.Utility.Utility.FormatSizeString(size), counting ? " (still counting)" : "")); }; using(var i = new Library.Main.Controller(backend, options, output)) result = i.Backup(dirs, filter); } if (output.VerboseOutput) { Library.Utility.Utility.PrintSerializeObject(result, Console.Out); } else { var parsedStats = result.BackendStatistics as Duplicati.Library.Interface.IParsedBackendStatistics; output.MessageEvent(string.Format(" Duration of backup: {0:hh\\:mm\\:ss}", result.Duration)); if (parsedStats != null && parsedStats.KnownFileCount > 0) { output.MessageEvent(string.Format(" Remote files: {0}", parsedStats.KnownFileCount)); output.MessageEvent(string.Format(" Remote size: {0}", Library.Utility.Utility.FormatSizeString(parsedStats.KnownFileSize))); } output.MessageEvent(string.Format(" Files added: {0}", result.AddedFiles)); output.MessageEvent(string.Format(" Files deleted: {0}", result.DeletedFiles)); output.MessageEvent(string.Format(" Files changed: {0}", result.ModifiedFiles)); output.MessageEvent(string.Format(" Data uploaded: {0}", Library.Utility.Utility.FormatSizeString(result.BackendStatistics.BytesUploaded))); output.MessageEvent(string.Format(" Data downloaded: {0}", Library.Utility.Utility.FormatSizeString(result.BackendStatistics.BytesDownloaded))); } if (result.ExaminedFiles == 0 && (filter != null || !filter.Empty)) output.MessageEvent("No files were processed. If this was not intentional you may want to use the \"test-filters\" command"); output.MessageEvent("Backup completed successfully!"); //Interrupted = 50 if (result.PartialBackup) return 50; //Completed with warnings = 2 if (result.Warnings.Count() > 0 || result.Errors.Count() > 0) return 2; //Success, but no upload = 1 if (result.BackendStatistics.BytesUploaded == 0) return 1; return 0; }
public void RunCommands() { var testopts = TestOptions; var data = new byte[1024 * 1024 * 10]; File.WriteAllBytes(Path.Combine(DATAFOLDER, "a"), data); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("In first backup:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b"), data); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("Newest before deleting:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("Newest without db:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } File.Delete(DBFILE); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); Assert.AreEqual(0, repairResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IListResults listResults = c.List(); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); Assert.AreEqual(listResults.Filesets.Count(), 2); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("Oldest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(2, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("Newest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) { modifyOptions(testopts); } var filenames = WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } // Do a "touch" on files to trigger a re-scan, which should do nothing //foreach (var k in filenames) //if (File.Exists(Path.Combine(DATAFOLDER, "a" + k.Key))) //File.SetLastWriteTime(Path.Combine(DATAFOLDER, "a" + k.Key), DateTime.Now.AddSeconds(5)); var data = new byte[filenames.Select(x => x.Value).Max()]; new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var r = c.Backup(new string[] { DATAFOLDER }); if (!Library.Utility.Utility.ParseBoolOption(testopts, "disable-filetime-check")) { if (r.OpenedFiles != filenames.Count) { throw new Exception($"Opened {r.OpenedFiles}, but should open {filenames.Count}"); } if (r.ExaminedFiles != filenames.Count * 2) { throw new Exception($"Examined {r.ExaminedFiles}, but should examine open {filenames.Count * 2}"); } } } var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest before deleting:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest without db:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) { File.Delete(newdb); } testopts["dbpath"] = newdb; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //ProgressWriteLine("V2 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //ProgressWriteLine("V1 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) { Directory.Delete(RESTOREFOLDER, true); } Directory.CreateDirectory(RESTOREFOLDER); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, !Library.Utility.Utility.ParseBoolOption(testopts, "skip-metadata")); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }
private void RunCommands(int blocksize, int basedatasize = 0) { var testopts = TestOptions; testopts["verbose"] = "true"; testopts["blocksize"] = blocksize.ToString(); if (basedatasize <= 0) { basedatasize = blocksize * 1024; } var data = new byte[basedatasize + 500]; File.WriteAllBytes(Path.Combine(DATAFOLDER, "a"), data.Take(basedatasize).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-p1"), data.Take(basedatasize + 1).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-p2"), data.Take(basedatasize + 2).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-p500"), data.Take(basedatasize + 500).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-m1"), data.Take(basedatasize - 1).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-m2"), data.Take(basedatasize - 2).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-m500"), data.Take(basedatasize - 500).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-s1"), data.Take(blocksize / 4 + 6).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-s2"), data.Take(blocksize / 10 + 6).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-l1"), data.Take(blocksize * 4 + 6).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-l2"), data.Take(blocksize * 10 + 6).ToArray()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("In first backup:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b"), data.Take(basedatasize).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-p1"), data.Take(basedatasize + 1).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-p2"), data.Take(basedatasize + 2).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-p500"), data.Take(basedatasize + 500).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-m1"), data.Take(basedatasize - 1).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-m2"), data.Take(basedatasize - 2).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-m500"), data.Take(basedatasize - 500).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-s1"), data.Take(blocksize / 4 + 6).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-s2"), data.Take(blocksize / 10 + 6).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-l1"), data.Take(blocksize * 4 + 6).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-l2"), data.Take(blocksize * 10 + 6).ToArray()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c"), data.Take(basedatasize).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-p1"), data.Take(basedatasize + 1).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-p2"), data.Take(basedatasize + 2).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-p500"), data.Take(basedatasize + 500).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-m1"), data.Take(basedatasize - 1).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-m2"), data.Take(basedatasize - 2).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-m500"), data.Take(basedatasize - 500).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-s1"), data.Take(blocksize / 4 + 6).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-s2"), data.Take(blocksize / 10 + 6).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-l1"), data.Take(blocksize * 4 + 6).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-l2"), data.Take(blocksize * 10 + 6).ToArray()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("Newest before deleting:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(34, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Console.WriteLine("Newest without db:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(34, r.Files.Count()); } File.Delete(DBFILE); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); Console.WriteLine("V2 after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(12, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); Console.WriteLine("V1 after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(23, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("Newest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(34, r.Files.Count()); } }
public void PurgeTest() { var blocksize = 1024 * 10; var basedatasize = 0; var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; var filenames = BorderTests.WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize).Select(x => "a" + x.Key).ToList(); var round1 = filenames.Take(filenames.Count / 3).ToArray(); var round2 = filenames.Take((filenames.Count / 3) * 2).ToArray(); var round3 = filenames; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }, new Library.Utility.FilterExpression(round1.Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(res.AddedFiles, round1.Length); } System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }, new Library.Utility.FilterExpression(round2.Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(res.AddedFiles, round2.Length - round1.Length); } System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(res.AddedFiles, filenames.Count - round2.Length); } System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); var last_ts = DateTime.Now; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { list_sets_only = true }), null)) { var inf = c.List(); Assert.AreEqual(0, inf.Errors.Count()); Assert.AreEqual(0, inf.Warnings.Count()); var filesets = inf.Filesets.Count(); Assert.AreEqual(3, filesets, "Incorrect number of initial filesets"); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IListResults listResults = c.List("*"); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); var filecount = listResults.Files.Count(); Assert.AreEqual(filenames.Count + 1, filecount, "Incorrect number of initial files"); } var allversion_candidate = round1.First(); var single_version_candidate = round1.Skip(1).First(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.PurgeFiles(new Library.Utility.FilterExpression("*" + Path.DirectorySeparatorChar + allversion_candidate)); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(3, res.RewrittenFileLists, "Incorrect number of rewritten filesets after all-versions purge"); Assert.AreEqual(3, res.RemovedFileCount, "Incorrect number of removed files after all-versions purge"); } for (var i = 0; i < 3; i++) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = i }), null)) { var res = c.PurgeFiles(new Library.Utility.FilterExpression(Path.Combine(this.DATAFOLDER, single_version_candidate))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(1, res.RewrittenFileLists, "Incorrect number of rewritten filesets after single-versions purge"); Assert.AreEqual(1, res.RemovedFileCount, "Incorrect number of removed files after single-versions purge"); } } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.PurgeFiles(new Library.Utility.FilterExpression(round2.Skip(round1.Length).Take(2).Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(2, res.RewrittenFileLists, "Incorrect number of rewritten filesets after 2-versions purge"); Assert.AreEqual(4, res.RemovedFileCount, "Incorrect number of removed files after 2-versions purge"); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.PurgeFiles(new Library.Utility.FilterExpression(round3.Skip(round2.Length).Take(2).Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(1, res.RewrittenFileLists, "Incorrect number of rewritten filesets after 1-versions purge"); Assert.AreEqual(2, res.RemovedFileCount, "Incorrect number of removed files after 1-versions purge"); } // Since we make the operations back-to-back, the purge timestamp can drift beyond the current time var wait_target = last_ts.AddSeconds(10) - DateTime.Now; if (wait_target.TotalMilliseconds > 0) { System.Threading.Thread.Sleep(wait_target); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var listinfo = c.List("*"); Assert.AreEqual(0, listinfo.Errors.Count()); Assert.AreEqual(0, listinfo.Warnings.Count()); var filecount = listinfo.Files.Count(); listinfo = c.List(); Assert.AreEqual(0, listinfo.Errors.Count()); Assert.AreEqual(0, listinfo.Warnings.Count()); var filesets = listinfo.Filesets.Count(); Assert.AreEqual(3, filesets, "Incorrect number of filesets after purge"); Assert.AreEqual(filenames.Count - 6 + 1, filecount, "Incorrect number of files after purge"); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var listinfo = c.List("*"); Assert.AreEqual(0, listinfo.Errors.Count()); Assert.AreEqual(0, listinfo.Warnings.Count()); var files = listinfo.Files.ToArray(); var filecount = files.Length; listinfo = c.List(); Assert.AreEqual(0, listinfo.Errors.Count()); Assert.AreEqual(0, listinfo.Warnings.Count()); var filesets = listinfo.Filesets.ToArray(); Console.WriteLine("Listing final version information"); Console.WriteLine("Versions:"); Console.WriteLine(" " + string.Join(Environment.NewLine + " ", filesets.Select(x => string.Format("{0}: {1}, {2} {3}", x.Version, x.Time, x.FileCount, x.FileSizes)))); Console.WriteLine("Files:"); Console.WriteLine(" " + string.Join(Environment.NewLine + " ", files.Select(x => string.Format("{0}: {1}", x.Path, string.Join(" - ", x.Sizes.Select(y => y.ToString())))))); Assert.AreEqual(4, filesets.Length, "Incorrect number of filesets after final backup"); Assert.AreEqual(filenames.Count + 1, filecount, "Incorrect number of files after final backup"); } }
private void RunCommands(int blocksize, int basedatasize = 0, Action<Dictionary<string, string>> modifyOptions = null) { var testopts = TestOptions; testopts["verbose"] = "true"; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) modifyOptions(testopts); if (basedatasize <= 0) basedatasize = blocksize * 1024; var filenames = new Dictionary<string, int>(); filenames[""] = basedatasize; filenames["-0"] = 0; filenames["-1"] = 1; filenames["-p1"] = basedatasize + 1; filenames["-p2"] = basedatasize + 2; filenames["-p500"] = basedatasize + 500; filenames["-m1"] = basedatasize - 1; filenames["-m2"] = basedatasize - 2; filenames["-m500"] = basedatasize - 500; filenames["-s1"] = blocksize / 4 + 6; filenames["-s2"] = blocksize / 10 + 6; filenames["-l1"] = blocksize * 4 + 6; filenames["-l2"] = blocksize * 10 + 6; filenames["-bm1"] = blocksize - 1; filenames["-b"] = blocksize; filenames["-bp1"] = blocksize + 1; var data = new byte[filenames.Select(x => x.Value).Max()]; foreach(var k in filenames) File.WriteAllBytes(Path.Combine(DATAFOLDER, "a" + k.Key), data.Take(k.Value).ToArray()); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); foreach(var k in filenames) File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); var rn = new Random(); foreach(var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest before deleting:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //Console.WriteLine("Newest without db:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) File.Delete(newdb); testopts["dbpath"] = newdb; using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //Console.WriteLine("V2 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //Console.WriteLine("V1 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) Directory.Delete(RESTOREFOLDER, true); Directory.CreateDirectory(RESTOREFOLDER); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, true); using(var tf = new Library.Utility.TempFolder()) { using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }