private static void VerifyFullRestore(string source, string[] actualfolders, string[] restorefoldernames, bool verifymetadata) { using (new Timer("Verification of " + source)) { for (int j = 0; j < actualfolders.Length; j++) TestUtils.VerifyDir(actualfolders[j], restorefoldernames[j], verifymetadata); } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; modifyOptions?.Invoke(testopts); var filenames = WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); // TODO: This sometimes results in a "No block hash found for file: C:\projects\duplicati\testdata\backup-data\a-0" warning. // Because of this, we don't check for warnings here. } // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { IListResults listResults = c.List("*"); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } // Do a "touch" on files to trigger a re-scan, which should do nothing //foreach (var k in filenames) //if (File.Exists(Path.Combine(DATAFOLDER, "a" + k.Key))) //File.SetLastWriteTime(Path.Combine(DATAFOLDER, "a" + k.Key), DateTime.Now.AddSeconds(5)); var data = new byte[filenames.Select(x => x.Value).Max()]; new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var r = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); if (!Library.Utility.Utility.ParseBoolOption(testopts, "disable-filetime-check")) { if (r.OpenedFiles != filenames.Count) { throw new Exception($"Opened {r.OpenedFiles}, but should open {filenames.Count}"); } if (r.ExaminedFiles != filenames.Count * 2) { throw new Exception($"Examined {r.ExaminedFiles}, but should examine open {filenames.Count * 2}"); } } } var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest before deleting:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest without db:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } testopts["dbpath"] = this.recreatedDatabaseFile; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); // TODO: This sometimes results in a "No block hash found for file: C:\projects\duplicati\testdata\backup-data\a-0" warning. // Because of this, we don't check for warnings here. } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IListResults listResults = c.List(); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); Assert.AreEqual(3, listResults.Filesets.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("V2 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("V1 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Assert.AreEqual(filenames.Count * 3, r.RestoredFiles); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, !Library.Utility.Utility.ParseBoolOption(testopts, "skip-metadata")); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Assert.AreEqual(filenames.Count, r.RestoredFiles); } } }
private void DoRunCommands(string target) { var opts = from n in TestOptions select string.Format("--{0}=\"{1}\"", n.Key, n.Value); var backupargs = (new string[] { "backup", target, DATAFOLDER }.Union(opts)).ToArray(); if (SourceDataFolders == null || SourceDataFolders.Count() < 3) { ProgressWriteLine($"ERROR: A minimum of 3 data folders are required in {SOURCEFOLDER}."); throw new Exception("Failed during initial minimum data folder check"); } foreach (var n in SourceDataFolders) { var foldername = Path.GetFileName(n); var targetfolder = Path.Combine(DATAFOLDER, foldername); ProgressWriteLine("Adding folder {0} to source", foldername); Directory.Move(n, targetfolder); var size = Directory.EnumerateFiles(targetfolder, "*", SearchOption.AllDirectories).Select(x => new FileInfo(x).Length).Sum(); ProgressWriteLine("Running backup with {0} data added ...", Duplicati.Library.Utility.Utility.FormatSizeString(size)); using (new Library.Logging.Timer(LOGTAG, "BackupWithDataAdded", string.Format("Backup with {0} data added", Duplicati.Library.Utility.Utility.FormatSizeString(size)))) Duplicati.CommandLine.Program.RealMain(backupargs); ProgressWriteLine("Testing data ..."); using (new Library.Logging.Timer(LOGTAG, "TestRemoteData", "Test remote data")) if (Duplicati.CommandLine.Program.RealMain((new string[] { "test", target, "all" }.Union(opts)).ToArray()) != 0) { throw new Exception("Failed during remote verification"); } } ProgressWriteLine("Running unchanged backup ..."); using (new Library.Logging.Timer(LOGTAG, "UnchangedBackup", "Unchanged backup")) Duplicati.CommandLine.Program.RealMain(backupargs); var datafolders = Directory.EnumerateDirectories(DATAFOLDER); var f = datafolders.Skip(datafolders.Count() / 2).First(); ProgressWriteLine("Renaming folder {0}", Path.GetFileName(f)); Directory.Move(f, Path.Combine(Path.GetDirectoryName(f), Path.GetFileName(f) + "-renamed")); ProgressWriteLine("Running backup with renamed folder..."); using (new Library.Logging.Timer(LOGTAG, "BackupWithRenamedFolder", "Backup with renamed folder")) Duplicati.CommandLine.Program.RealMain(backupargs); datafolders = Directory.EnumerateDirectories(DATAFOLDER); ProgressWriteLine("Deleting data"); var rm1 = datafolders.First(); var rm2 = datafolders.Skip(1).First(); var rm3 = datafolders.Skip(2).First(); Directory.Delete(rm1, true); Directory.Delete(rm2, true); var rmfiles = Directory.EnumerateFiles(rm3, "*", SearchOption.AllDirectories); foreach (var n in rmfiles.Take(rmfiles.Count() / 2)) { File.Delete(n); } ProgressWriteLine("Running backup with deleted data..."); using (new Library.Logging.Timer(LOGTAG, "BackupWithDeletedData", "Backup with deleted data")) Duplicati.CommandLine.Program.RealMain(backupargs); ProgressWriteLine("Testing the compare method ..."); using (new Library.Logging.Timer(LOGTAG, "CompareMethod", "Compare method")) Duplicati.CommandLine.Program.RealMain((new string[] { "compare", target, "0", "1" }.Union(opts)).ToArray()); for (var i = 0; i < 5; i++) { ProgressWriteLine("Running backup with changed logfile {0} of {1} ...", i + 1, 5); File.Copy(LOGFILE, Path.Combine(SOURCEFOLDER, Path.GetFileName(LOGFILE)), true); using (new Library.Logging.Timer(LOGTAG, "BackupWithLogfileChange", string.Format("Backup with logfilechange {0}", i + 1))) Duplicati.CommandLine.Program.RealMain(backupargs); } ProgressWriteLine("Compacting data ..."); using (new Library.Logging.Timer(LOGTAG, "Compacting", "Compacting")) Duplicati.CommandLine.Program.RealMain((new string[] { "compact", target, "--small-file-max-count=2" }.Union(opts)).ToArray()); datafolders = Directory.EnumerateDirectories(DATAFOLDER); var rf = datafolders.Skip(datafolders.Count() - 2).First(); ProgressWriteLine("Partial restore of {0} ...", Path.GetFileName(rf)); using (new Library.Logging.Timer(LOGTAG, "PartialRestore", "Partial restore")) Duplicati.CommandLine.Program.RealMain((new string[] { "restore", target, rf + "*", "--restore-path=\"" + RESTOREFOLDER + "\"" }.Union(opts)).ToArray()); ProgressWriteLine("Verifying partial restore ..."); using (new Library.Logging.Timer(LOGTAG, "VerifiationOfPartialRestore", "Verification of partial restored files")) TestUtils.VerifyDir(rf, RESTOREFOLDER, true); Directory.Delete(RESTOREFOLDER, true); ProgressWriteLine("Partial restore of {0} without local db...", Path.GetFileName(rf)); using (new Library.Logging.Timer(LOGTAG, "PartialRestoreWithoutLocalDb", "Partial restore without local db")) Duplicati.CommandLine.Program.RealMain((new string[] { "restore", target, rf + "*", "--restore-path=\"" + RESTOREFOLDER + "\"", "--no-local-db" }.Union(opts)).ToArray()); ProgressWriteLine("Verifying partial restore ..."); using (new Library.Logging.Timer(LOGTAG, "VerificationOfPartialRestore", "Verification of partial restored files")) TestUtils.VerifyDir(rf, RESTOREFOLDER, true); Directory.Delete(RESTOREFOLDER, true); ProgressWriteLine("Full restore ..."); using (new Library.Logging.Timer(LOGTAG, "FullRestore", "Full restore")) Duplicati.CommandLine.Program.RealMain((new string[] { "restore", target, "*", "--restore-path=\"" + RESTOREFOLDER + "\"" }.Union(opts)).ToArray()); ProgressWriteLine("Verifying full restore ..."); using (new Library.Logging.Timer(LOGTAG, "VerificationOfFullRestore", "Verification of restored files")) foreach (var s in Directory.EnumerateDirectories(DATAFOLDER)) { TestUtils.VerifyDir(s, Path.Combine(RESTOREFOLDER, Path.GetFileName(s)), true); } Directory.Delete(RESTOREFOLDER, true); ProgressWriteLine("Full restore without local db..."); using (new Library.Logging.Timer(LOGTAG, "FullRestoreWithoutDb", "Full restore without local db")) Duplicati.CommandLine.Program.RealMain((new string[] { "restore", target, "*", "--restore-path=\"" + RESTOREFOLDER + "\"", "--no-local-db" }.Union(opts)).ToArray()); ProgressWriteLine("Verifying full restore ..."); using (new Library.Logging.Timer(LOGTAG, "VerificationOfFullRestoreWithoutDb", "Verification of restored files")) foreach (var s in Directory.EnumerateDirectories(DATAFOLDER)) { TestUtils.VerifyDir(s, Path.Combine(RESTOREFOLDER, Path.GetFileName(s)), true); } ProgressWriteLine("Testing data ..."); using (new Library.Logging.Timer(LOGTAG, "TestRemoteData", "Test remote data")) if (Duplicati.CommandLine.Program.RealMain((new string[] { "test", target, "all" }.Union(opts)).ToArray()) != 0) { throw new Exception("Failed during final remote verification"); } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["verbose"] = "true"; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) { modifyOptions(testopts); } if (basedatasize <= 0) { basedatasize = blocksize * 1024; } var filenames = new Dictionary <string, int>(); filenames[""] = basedatasize; filenames["-0"] = 0; filenames["-1"] = 1; filenames["-p1"] = basedatasize + 1; filenames["-p2"] = basedatasize + 2; filenames["-p500"] = basedatasize + 500; filenames["-m1"] = basedatasize - 1; filenames["-m2"] = basedatasize - 2; filenames["-m500"] = basedatasize - 500; filenames["-s1"] = blocksize / 4 + 6; filenames["-s2"] = blocksize / 10 + 6; filenames["-l1"] = blocksize * 4 + 6; filenames["-l2"] = blocksize * 10 + 6; filenames["-bm1"] = blocksize - 1; filenames["-b"] = blocksize; filenames["-bp1"] = blocksize + 1; var data = new byte[filenames.Select(x => x.Value).Max()]; foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "a" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest before deleting:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //Console.WriteLine("Newest without db:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) { File.Delete(newdb); } testopts["dbpath"] = newdb; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //Console.WriteLine("V2 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //Console.WriteLine("V1 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) { Directory.Delete(RESTOREFOLDER, true); } Directory.CreateDirectory(RESTOREFOLDER); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, true); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) { modifyOptions(testopts); } var filenames = WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } // Do a "touch" on files to trigger a re-scan, which should do nothing //foreach (var k in filenames) //if (File.Exists(Path.Combine(DATAFOLDER, "a" + k.Key))) //File.SetLastWriteTime(Path.Combine(DATAFOLDER, "a" + k.Key), DateTime.Now.AddSeconds(5)); var data = new byte[filenames.Select(x => x.Value).Max()]; new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var r = c.Backup(new string[] { DATAFOLDER }); if (!Library.Utility.Utility.ParseBoolOption(testopts, "disable-filetime-check")) { if (r.OpenedFiles != filenames.Count) { throw new Exception($"Opened {r.OpenedFiles}, but should open {filenames.Count}"); } if (r.ExaminedFiles != filenames.Count * 2) { throw new Exception($"Examined {r.ExaminedFiles}, but should examine open {filenames.Count * 2}"); } } } var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest before deleting:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest without db:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) { File.Delete(newdb); } testopts["dbpath"] = newdb; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //ProgressWriteLine("V2 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //ProgressWriteLine("V1 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) { Directory.Delete(RESTOREFOLDER, true); } Directory.CreateDirectory(RESTOREFOLDER); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, !Library.Utility.Utility.ParseBoolOption(testopts, "skip-metadata")); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }