private void RunCommands(int blocksize, int basedatasize = 0, Action<Dictionary<string, string>> modifyOptions = null) { var testopts = TestOptions; testopts["verbose"] = "true"; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) modifyOptions(testopts); if (basedatasize <= 0) basedatasize = blocksize * 1024; var filenames = new Dictionary<string, int>(); filenames[""] = basedatasize; filenames["-0"] = 0; filenames["-1"] = 1; filenames["-p1"] = basedatasize + 1; filenames["-p2"] = basedatasize + 2; filenames["-p500"] = basedatasize + 500; filenames["-m1"] = basedatasize - 1; filenames["-m2"] = basedatasize - 2; filenames["-m500"] = basedatasize - 500; filenames["-s1"] = blocksize / 4 + 6; filenames["-s2"] = blocksize / 10 + 6; filenames["-l1"] = blocksize * 4 + 6; filenames["-l2"] = blocksize * 10 + 6; filenames["-bm1"] = blocksize - 1; filenames["-b"] = blocksize; filenames["-bp1"] = blocksize + 1; var data = new byte[filenames.Select(x => x.Value).Max()]; foreach(var k in filenames) File.WriteAllBytes(Path.Combine(DATAFOLDER, "a" + k.Key), data.Take(k.Value).ToArray()); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); foreach(var k in filenames) File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); var rn = new Random(); foreach(var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest before deleting:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //Console.WriteLine("Newest without db:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) File.Delete(newdb); testopts["dbpath"] = newdb; using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //Console.WriteLine("V2 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //Console.WriteLine("V1 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) Directory.Delete(RESTOREFOLDER, true); Directory.CreateDirectory(RESTOREFOLDER); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, true); using(var tf = new Library.Utility.TempFolder()) { using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; modifyOptions?.Invoke(testopts); var filenames = WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); // TODO: This sometimes results in a "No block hash found for file: C:\projects\duplicati\testdata\backup-data\a-0" warning. // Because of this, we don't check for warnings here. } // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { IListResults listResults = c.List("*"); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } // Do a "touch" on files to trigger a re-scan, which should do nothing //foreach (var k in filenames) //if (File.Exists(Path.Combine(DATAFOLDER, "a" + k.Key))) //File.SetLastWriteTime(Path.Combine(DATAFOLDER, "a" + k.Key), DateTime.Now.AddSeconds(5)); var data = new byte[filenames.Select(x => x.Value).Max()]; new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var r = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); if (!Library.Utility.Utility.ParseBoolOption(testopts, "disable-filetime-check")) { if (r.OpenedFiles != filenames.Count) { throw new Exception($"Opened {r.OpenedFiles}, but should open {filenames.Count}"); } if (r.ExaminedFiles != filenames.Count * 2) { throw new Exception($"Examined {r.ExaminedFiles}, but should examine open {filenames.Count * 2}"); } } } var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest before deleting:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest without db:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } testopts["dbpath"] = this.recreatedDatabaseFile; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); // TODO: This sometimes results in a "No block hash found for file: C:\projects\duplicati\testdata\backup-data\a-0" warning. // Because of this, we don't check for warnings here. } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IListResults listResults = c.List(); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); Assert.AreEqual(3, listResults.Filesets.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("V2 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("V1 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Assert.AreEqual(filenames.Count * 3, r.RestoredFiles); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, !Library.Utility.Utility.ParseBoolOption(testopts, "skip-metadata")); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Assert.AreEqual(filenames.Count, r.RestoredFiles); } } }
public static int Restore(List <string> args, Dictionary <string, string> options, Library.Utility.IFilter filter) { if (args.Count < 1) { return(PrintWrongNumberOfArguments(args, 1)); } string backend = args[0]; args.RemoveAt(0); bool controlFiles = Library.Utility.Utility.ParseBoolOption(options, "control-files"); options.Remove("control-files"); // Prefix all filenames with "*/" so we search all folders for (var ix = 0; ix < args.Count; ix++) { if (args[ix].IndexOfAny(new char[] { '*', '?', System.IO.Path.DirectorySeparatorChar, System.IO.Path.AltDirectorySeparatorChar }) < 0 && !args[ix].StartsWith("[")) { args[ix] = "*" + System.IO.Path.DirectorySeparatorChar.ToString() + args[ix]; } } var output = new ConsoleOutput(options); output.MessageEvent(string.Format("Restore started at {0}", DateTime.Now)); using (var i = new Library.Main.Controller(backend, options, output)) if (controlFiles) { var res = i.RestoreControlFiles(args.ToArray(), filter); output.MessageEvent("Restore control files completed:"); foreach (var s in res.Files) { Console.WriteLine(s); } } else { using (var periodicOutput = new PeriodicOutput(output, TimeSpan.FromSeconds(5))) { output.PhaseChanged += (phase, previousPhase) => { if (phase == Duplicati.Library.Main.OperationPhase.Restore_PreRestoreVerify) { output.MessageEvent("Checking remote backup ..."); } else if (phase == Duplicati.Library.Main.OperationPhase.Restore_ScanForExistingFiles) { output.MessageEvent("Checking existing target files ..."); } /*else if (phase == Duplicati.Library.Main.OperationPhase.Restore_DownloadingRemoteFiles) * output.MessageEvent("Downloading remote files ..."); */ else if (phase == Duplicati.Library.Main.OperationPhase.Restore_PatchWithLocalBlocks) { output.MessageEvent("Updating target files with local data ..."); } else if (phase == Duplicati.Library.Main.OperationPhase.Restore_PostRestoreVerify) { periodicOutput.SetFinished(); periodicOutput.Join(TimeSpan.FromMilliseconds(100)); output.MessageEvent("Verifying restored files ..."); } else if (phase == Duplicati.Library.Main.OperationPhase.Restore_ScanForLocalBlocks) { output.MessageEvent("Scanning local files for needed data ..."); } else if (phase == Duplicati.Library.Main.OperationPhase.Restore_CreateTargetFolders) { periodicOutput.SetReady(); } }; periodicOutput.WriteOutput += (progress, files, size, counting) => { output.MessageEvent(string.Format(" {0} files need to be restored ({1})", files, Library.Utility.Utility.FormatSizeString(size))); }; var res = i.Restore(args.ToArray(), filter); string restorePath; options.TryGetValue("restore-path", out restorePath); output.MessageEvent(string.Format("Restored {0} ({1}) files to {2}", res.FilesRestored, Library.Utility.Utility.FormatSizeString(res.SizeOfRestoredFiles), string.IsNullOrEmpty(restorePath) ? "original path" : restorePath)); output.MessageEvent(string.Format("Duration of restore: {0:hh\\:mm\\:ss}", res.Duration)); if (res.FilesRestored > 0) { output.MessageEvent("***********************************************"); output.MessageEvent("Did we help save your files? If so, please support Duplicati with a donation. We suggest 10€ for private use and 100€ for commercial use."); output.MessageEvent(""); output.MessageEvent("Paypal: http://goo.gl/P4XJ6S"); output.MessageEvent("Bitcoin: 1L74qa1n5SFKwwyHhECTHBJgcf6WT2rJKf"); output.MessageEvent("***********************************************"); } if (output.VerboseOutput) { Library.Utility.Utility.PrintSerializeObject(res, Console.Out); } } } return(0); }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["verbose"] = "true"; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) { modifyOptions(testopts); } if (basedatasize <= 0) { basedatasize = blocksize * 1024; } var filenames = new Dictionary <string, int>(); filenames[""] = basedatasize; filenames["-0"] = 0; filenames["-1"] = 1; filenames["-p1"] = basedatasize + 1; filenames["-p2"] = basedatasize + 2; filenames["-p500"] = basedatasize + 500; filenames["-m1"] = basedatasize - 1; filenames["-m2"] = basedatasize - 2; filenames["-m500"] = basedatasize - 500; filenames["-s1"] = blocksize / 4 + 6; filenames["-s2"] = blocksize / 10 + 6; filenames["-l1"] = blocksize * 4 + 6; filenames["-l2"] = blocksize * 10 + 6; filenames["-bm1"] = blocksize - 1; filenames["-b"] = blocksize; filenames["-bp1"] = blocksize + 1; var data = new byte[filenames.Select(x => x.Value).Max()]; foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "a" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest before deleting:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //Console.WriteLine("Newest without db:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) { File.Delete(newdb); } testopts["dbpath"] = newdb; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //Console.WriteLine("V2 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //Console.WriteLine("V1 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) { Directory.Delete(RESTOREFOLDER, true); } Directory.CreateDirectory(RESTOREFOLDER); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, true); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }
public static int Restore(List<string> args, Dictionary<string, string> options, Library.Utility.IFilter filter) { if (args.Count < 1) return PrintWrongNumberOfArguments(args, 1); string backend = args[0]; args.RemoveAt(0); bool controlFiles = Library.Utility.Utility.ParseBoolOption(options, "control-files"); options.Remove("control-files"); // Prefix all filenames with "*/" so we search all folders for (var ix = 0; ix < args.Count; ix++) if (args[ix].IndexOfAny(new char[] { '*', '?', System.IO.Path.DirectorySeparatorChar, System.IO.Path.AltDirectorySeparatorChar }) < 0 && !args[ix].StartsWith("[")) args[ix] = "*" + System.IO.Path.DirectorySeparatorChar.ToString() + args[ix]; // suffix all folders with "*" so we restore all contents in the folder for (var ix = 0; ix < args.Count; ix++) if (args[ix].IndexOfAny(new char[] { '*', '?' }) < 0 && !args[ix].StartsWith("[") && args[ix].EndsWith(System.IO.Path.DirectorySeparatorChar.ToString())) args[ix] += "*"; var output = new ConsoleOutput(options); output.MessageEvent(string.Format("Restore started at {0}", DateTime.Now)); using(var i = new Library.Main.Controller(backend, options, output)) if (controlFiles) { var res = i.RestoreControlFiles(args.ToArray(), filter); output.MessageEvent("Restore control files completed:"); foreach(var s in res.Files) Console.WriteLine(s); } else { using(var periodicOutput = new PeriodicOutput(output, TimeSpan.FromSeconds(5))) { output.PhaseChanged += (phase, previousPhase) => { if (phase == Duplicati.Library.Main.OperationPhase.Restore_PreRestoreVerify) output.MessageEvent("Checking remote backup ..."); else if (phase == Duplicati.Library.Main.OperationPhase.Restore_ScanForExistingFiles) output.MessageEvent("Checking existing target files ..."); /*else if (phase == Duplicati.Library.Main.OperationPhase.Restore_DownloadingRemoteFiles) output.MessageEvent("Downloading remote files ..."); */ else if (phase == Duplicati.Library.Main.OperationPhase.Restore_PatchWithLocalBlocks) output.MessageEvent("Updating target files with local data ..."); else if (phase == Duplicati.Library.Main.OperationPhase.Restore_PostRestoreVerify) { periodicOutput.SetFinished(); periodicOutput.Join(TimeSpan.FromMilliseconds(100)); output.MessageEvent("Verifying restored files ..."); } else if (phase == Duplicati.Library.Main.OperationPhase.Restore_ScanForLocalBlocks) output.MessageEvent("Scanning local files for needed data ..."); else if (phase == Duplicati.Library.Main.OperationPhase.Restore_CreateTargetFolders) periodicOutput.SetReady(); }; periodicOutput.WriteOutput += (progress, files, size, counting) => { output.MessageEvent(string.Format(" {0} files need to be restored ({1})", files, Library.Utility.Utility.FormatSizeString(size))); }; var res = i.Restore(args.ToArray(), filter); string restorePath; options.TryGetValue("restore-path", out restorePath); output.MessageEvent(string.Format("Restored {0} ({1}) files to {2}", res.FilesRestored, Library.Utility.Utility.FormatSizeString(res.SizeOfRestoredFiles), string.IsNullOrEmpty(restorePath) ? "original path" : restorePath)); output.MessageEvent(string.Format("Duration of restore: {0:hh\\:mm\\:ss}", res.Duration)); if (res.FilesRestored > 0 && !Library.Main.Utility.SuppressDonationMessages) { output.MessageEvent("***********************************************"); output.MessageEvent("Did we help save your files? If so, please support Duplicati with a donation. We suggest 10€ for private use and 100€ for commercial use."); output.MessageEvent(""); output.MessageEvent("Paypal: http://goo.gl/P4XJ6S"); output.MessageEvent("Bitcoin: 1L74qa1n5SFKwwyHhECTHBJgcf6WT2rJKf"); output.MessageEvent("***********************************************"); } if (output.VerboseOutput) Library.Utility.Utility.PrintSerializeObject(res, Console.Out); if (res.Warnings.Count() > 0) return 2; } } return 0; }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) { modifyOptions(testopts); } var filenames = WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } // Do a "touch" on files to trigger a re-scan, which should do nothing //foreach (var k in filenames) //if (File.Exists(Path.Combine(DATAFOLDER, "a" + k.Key))) //File.SetLastWriteTime(Path.Combine(DATAFOLDER, "a" + k.Key), DateTime.Now.AddSeconds(5)); var data = new byte[filenames.Select(x => x.Value).Max()]; new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var r = c.Backup(new string[] { DATAFOLDER }); if (!Library.Utility.Utility.ParseBoolOption(testopts, "disable-filetime-check")) { if (r.OpenedFiles != filenames.Count) { throw new Exception($"Opened {r.OpenedFiles}, but should open {filenames.Count}"); } if (r.ExaminedFiles != filenames.Count * 2) { throw new Exception($"Examined {r.ExaminedFiles}, but should examine open {filenames.Count * 2}"); } } } var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest before deleting:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest without db:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) { File.Delete(newdb); } testopts["dbpath"] = newdb; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //ProgressWriteLine("V2 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //ProgressWriteLine("V1 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) { Directory.Delete(RESTOREFOLDER, true); } Directory.CreateDirectory(RESTOREFOLDER); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, !Library.Utility.Utility.ParseBoolOption(testopts, "skip-metadata")); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }