public static string GetDefaultTarget(string other = null) { string alttarget = System.IO.Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "unittest_target.txt"); if (File.Exists(alttarget)) return File.ReadAllText(alttarget).Trim(); else if (other != null) return other; else using(var tf = new Library.Utility.TempFolder()) { tf.Protected = true; return "file://" + tf; } }
public static string GetDefaultTarget(string other = null) { string alttarget = System.IO.Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "unittest_target.txt"); if (File.Exists(alttarget)) { return(File.ReadAllText(alttarget).Trim()); } else if (other != null) { return(other); } else { using (var tf = new Library.Utility.TempFolder()) { tf.Protected = true; return("file://" + tf); } } }
private static string CreateRandomFile(Library.Utility.TempFolder tf, int i, int min_file_size, int max_file_size, Random rnd) { Console.Write("Generating file {0}", i); string filename = System.IO.Path.Combine(tf, i.ToString()); using (System.IO.FileStream fs = new System.IO.FileStream(filename, System.IO.FileMode.CreateNew, System.IO.FileAccess.Write)) { //Random size byte[] buf = new byte[1024]; int size = rnd.Next(min_file_size, max_file_size); Console.WriteLine(" ({0})", Duplicati.Library.Utility.Utility.FormatSizeString(size)); while (size > 0) { rnd.NextBytes(buf); fs.Write(buf, 0, Math.Min(buf.Length, size)); size -= buf.Length; } } return(filename); }
private void RunCommands(int blocksize, int basedatasize = 0, Action<Dictionary<string, string>> modifyOptions = null) { var testopts = TestOptions; testopts["verbose"] = "true"; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) modifyOptions(testopts); if (basedatasize <= 0) basedatasize = blocksize * 1024; var filenames = new Dictionary<string, int>(); filenames[""] = basedatasize; filenames["-0"] = 0; filenames["-1"] = 1; filenames["-p1"] = basedatasize + 1; filenames["-p2"] = basedatasize + 2; filenames["-p500"] = basedatasize + 500; filenames["-m1"] = basedatasize - 1; filenames["-m2"] = basedatasize - 2; filenames["-m500"] = basedatasize - 500; filenames["-s1"] = blocksize / 4 + 6; filenames["-s2"] = blocksize / 10 + 6; filenames["-l1"] = blocksize * 4 + 6; filenames["-l2"] = blocksize * 10 + 6; filenames["-bm1"] = blocksize - 1; filenames["-b"] = blocksize; filenames["-bp1"] = blocksize + 1; var data = new byte[filenames.Select(x => x.Value).Max()]; foreach(var k in filenames) File.WriteAllBytes(Path.Combine(DATAFOLDER, "a" + k.Key), data.Take(k.Value).ToArray()); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); foreach(var k in filenames) File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); var rn = new Random(); foreach(var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest before deleting:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //Console.WriteLine("Newest without db:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) File.Delete(newdb); testopts["dbpath"] = newdb; using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //Console.WriteLine("V2 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //Console.WriteLine("V1 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) Directory.Delete(RESTOREFOLDER, true); Directory.CreateDirectory(RESTOREFOLDER); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, true); using(var tf = new Library.Utility.TempFolder()) { using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; modifyOptions?.Invoke(testopts); var filenames = WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); // TODO: This sometimes results in a "No block hash found for file: C:\projects\duplicati\testdata\backup-data\a-0" warning. // Because of this, we don't check for warnings here. } // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { IListResults listResults = c.List("*"); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } // Do a "touch" on files to trigger a re-scan, which should do nothing //foreach (var k in filenames) //if (File.Exists(Path.Combine(DATAFOLDER, "a" + k.Key))) //File.SetLastWriteTime(Path.Combine(DATAFOLDER, "a" + k.Key), DateTime.Now.AddSeconds(5)); var data = new byte[filenames.Select(x => x.Value).Max()]; new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var r = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); if (!Library.Utility.Utility.ParseBoolOption(testopts, "disable-filetime-check")) { if (r.OpenedFiles != filenames.Count) { throw new Exception($"Opened {r.OpenedFiles}, but should open {filenames.Count}"); } if (r.ExaminedFiles != filenames.Count * 2) { throw new Exception($"Examined {r.ExaminedFiles}, but should examine open {filenames.Count * 2}"); } } } var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest before deleting:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest without db:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } testopts["dbpath"] = this.recreatedDatabaseFile; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); // TODO: This sometimes results in a "No block hash found for file: C:\projects\duplicati\testdata\backup-data\a-0" warning. // Because of this, we don't check for warnings here. } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IListResults listResults = c.List(); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); Assert.AreEqual(3, listResults.Filesets.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("V2 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("V1 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Assert.AreEqual(filenames.Count * 3, r.RestoredFiles); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, !Library.Utility.Utility.ParseBoolOption(testopts, "skip-metadata")); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Assert.AreEqual(filenames.Count, r.RestoredFiles); } } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["verbose"] = "true"; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) { modifyOptions(testopts); } if (basedatasize <= 0) { basedatasize = blocksize * 1024; } var filenames = new Dictionary <string, int>(); filenames[""] = basedatasize; filenames["-0"] = 0; filenames["-1"] = 1; filenames["-p1"] = basedatasize + 1; filenames["-p2"] = basedatasize + 2; filenames["-p500"] = basedatasize + 500; filenames["-m1"] = basedatasize - 1; filenames["-m2"] = basedatasize - 2; filenames["-m500"] = basedatasize - 500; filenames["-s1"] = blocksize / 4 + 6; filenames["-s2"] = blocksize / 10 + 6; filenames["-l1"] = blocksize * 4 + 6; filenames["-l2"] = blocksize * 10 + 6; filenames["-bm1"] = blocksize - 1; filenames["-b"] = blocksize; filenames["-bp1"] = blocksize + 1; var data = new byte[filenames.Select(x => x.Value).Max()]; foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "a" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest before deleting:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //Console.WriteLine("Newest without db:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) { File.Delete(newdb); } testopts["dbpath"] = newdb; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //Console.WriteLine("V2 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //Console.WriteLine("V1 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) { Directory.Delete(RESTOREFOLDER, true); } Directory.CreateDirectory(RESTOREFOLDER); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, true); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) { modifyOptions(testopts); } var filenames = WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } // Do a "touch" on files to trigger a re-scan, which should do nothing //foreach (var k in filenames) //if (File.Exists(Path.Combine(DATAFOLDER, "a" + k.Key))) //File.SetLastWriteTime(Path.Combine(DATAFOLDER, "a" + k.Key), DateTime.Now.AddSeconds(5)); var data = new byte[filenames.Select(x => x.Value).Max()]; new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var r = c.Backup(new string[] { DATAFOLDER }); if (!Library.Utility.Utility.ParseBoolOption(testopts, "disable-filetime-check")) { if (r.OpenedFiles != filenames.Count) { throw new Exception($"Opened {r.OpenedFiles}, but should open {filenames.Count}"); } if (r.ExaminedFiles != filenames.Count * 2) { throw new Exception($"Examined {r.ExaminedFiles}, but should examine open {filenames.Count * 2}"); } } } var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest before deleting:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest without db:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) { File.Delete(newdb); } testopts["dbpath"] = newdb; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //ProgressWriteLine("V2 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //ProgressWriteLine("V1 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) { Directory.Delete(RESTOREFOLDER, true); } Directory.CreateDirectory(RESTOREFOLDER); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, !Library.Utility.Utility.ParseBoolOption(testopts, "skip-metadata")); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }
public void ExecuteTask(IDuplicityTask task) { Dictionary <string, string> options = new Dictionary <string, string>(); //Set the log level to be that of the GUI options["log-level"] = Duplicati.Library.Logging.Log.LogLevel.ToString(); string destination = task.GetConfiguration(options); string results = ""; string parsedMessage = ""; m_isAborted = false; try { //TODO: Its a bit dirty to set the options after creating the instance using (Interface i = new Interface(destination, options)) { lock (m_lock) { m_stopReason = System.Windows.Forms.CloseReason.None; m_currentBackupControlInterface = i; } SetupControlInterface(); i.OperationProgress += new OperationProgressEvent(Duplicati_OperationProgress); switch (task.TaskType) { case DuplicityTaskType.FullBackup: case DuplicityTaskType.IncrementalBackup: { //Activate auto-cleanup options["auto-cleanup"] = ""; options["force"] = ""; if (task.Schedule.Task.KeepFull > 0) { m_extraOperations++; } if (!string.IsNullOrEmpty(task.Schedule.Task.KeepTime)) { m_extraOperations++; } Library.Utility.TempFolder tf = null; try { if (ProgressEvent != null) { ProgressEvent(DuplicatiOperation.Backup, RunnerState.Started, task.Schedule.Name, "", 0, -1); } if (task.Task.IncludeSetup) { //Make a copy of the current database tf = new Duplicati.Library.Utility.TempFolder(); string filename = System.IO.Path.Combine(tf, System.IO.Path.GetFileName(Program.DatabasePath)); System.IO.File.Copy(Program.DatabasePath, filename, true); using (System.Data.IDbConnection con = (System.Data.IDbConnection)Activator.CreateInstance(SQLiteLoader.SQLiteConnectionType)) { con.ConnectionString = "Data Source=" + filename; //Open the database, handle any encryption issues automatically Program.OpenDatabase(con); using (System.Data.IDbCommand cmd = con.CreateCommand()) { //Remove all log data to minimize the size of the database cmd.CommandText = "DELETE FROM CommandQueue;"; cmd.ExecuteNonQuery(); cmd.CommandText = "DELETE FROM Log;"; cmd.ExecuteNonQuery(); cmd.CommandText = "DELETE FROM LogBlob;"; cmd.ExecuteNonQuery(); //Free up unused space cmd.CommandText = "VACUUM;"; cmd.ExecuteNonQuery(); } } options["signature-control-files"] = filename; } options["full-if-sourcefolder-changed"] = ""; List <KeyValuePair <bool, string> > filters = new List <KeyValuePair <bool, string> >(); string[] sourceFolders = DynamicSetupHelper.GetSourceFolders(task.Task, new ApplicationSettings(task.Task.DataParent), filters); if (options.ContainsKey("filter")) { filters.AddRange(Library.Utility.FilenameFilter.DecodeFilter(options["filter"])); } options["filter"] = Library.Utility.FilenameFilter.EncodeAsFilter(filters); //At this point we register the backup as being in progress ((FullOrIncrementalTask)task).WriteBackupInProgress(Strings.DuplicatiRunner.ShutdownWhileBackupInprogress); results = i.Backup(sourceFolders); } finally { if (tf != null) { tf.Dispose(); } if (ProgressEvent != null) { ProgressEvent(DuplicatiOperation.Backup, RunnerState.Stopped, task.Schedule.Name, "", 100, -1); } } break; } case DuplicityTaskType.ListBackups: List <string> res = new List <string>(); foreach (ManifestEntry be in i.GetBackupSets()) { res.Add(be.Time.ToString()); foreach (ManifestEntry bei in be.Incrementals) { res.Add(bei.Time.ToString()); } } (task as ListBackupsTask).Backups = res.ToArray(); break; case DuplicityTaskType.ListBackupEntries: (task as ListBackupEntriesTask).Backups = i.GetBackupSets(); break; case DuplicityTaskType.ListFiles: (task as ListFilesTask).Files = i.ListCurrentFiles(); break; case DuplicityTaskType.ListSourceFolders: (task as ListSourceFoldersTask).Files = new List <string>(i.ListSourceFolders() ?? new string[0]); break; case DuplicityTaskType.ListActualFiles: (task as ListActualFilesTask).Files = i.ListActualSignatureFiles(); break; case DuplicityTaskType.RemoveAllButNFull: results = i.DeleteAllButNFull(); break; case DuplicityTaskType.RemoveOlderThan: results = i.DeleteOlderThan(); break; case DuplicityTaskType.Restore: options["file-to-restore"] = ((RestoreTask)task).SourceFiles; if (options.ContainsKey("filter")) { options.Remove("filter"); } try { if (ProgressEvent != null) { ProgressEvent(DuplicatiOperation.Restore, RunnerState.Started, task.Schedule.Name, "", 0, -1); } results = i.Restore(task.LocalPath.Split(System.IO.Path.PathSeparator)); } finally { if (ProgressEvent != null) { ProgressEvent(DuplicatiOperation.Restore, RunnerState.Stopped, task.Schedule.Name, "", 100, -1); } } break; case DuplicityTaskType.RestoreSetup: i.RestoreControlFiles(task.LocalPath); break; default: return; } } } catch (Exception ex) { while (ex is System.Reflection.TargetInvocationException && ex.InnerException != null) { ex = ex.InnerException; } if (ex is System.Threading.ThreadAbortException) { m_isAborted = true; System.Threading.Thread.ResetAbort(); } else if (ex is Library.Main.LiveControl.ExecutionStoppedException) { m_isAborted = true; } if (m_isAborted && m_stopReason != System.Windows.Forms.CloseReason.None) { //If the user has stopped the backup for some reason, write a nicer message switch (m_stopReason) { case System.Windows.Forms.CloseReason.ApplicationExitCall: parsedMessage = Strings.DuplicatiRunner.ApplicationExitLogMesssage; break; case System.Windows.Forms.CloseReason.TaskManagerClosing: parsedMessage = Strings.DuplicatiRunner.TaskManagerCloseMessage; break; case System.Windows.Forms.CloseReason.UserClosing: parsedMessage = Strings.DuplicatiRunner.UserClosingMessage; break; case System.Windows.Forms.CloseReason.WindowsShutDown: parsedMessage = Strings.DuplicatiRunner.WindowsShutdownMessage; break; default: parsedMessage = string.Format(Strings.DuplicatiRunner.OtherAbortMessage, m_stopReason); break; } if (task.Schedule != null) { //If the application is going down, the backup should resume on next launch switch (m_stopReason) { case System.Windows.Forms.CloseReason.ApplicationExitCall: case System.Windows.Forms.CloseReason.TaskManagerClosing: case System.Windows.Forms.CloseReason.WindowsShutDown: task.Schedule.ScheduledRunFailed(); break; } } } else { parsedMessage = string.Format(Strings.DuplicatiRunner.ErrorMessage, ex.Message); } results = "Error: " + ex.ToString(); //Don't localize while (ex.InnerException != null) { ex = ex.InnerException; results += Environment.NewLine + "InnerError: " + ex.ToString(); //Don't localize } } finally { lock (m_lock) m_currentBackupControlInterface = null; } try { if (!m_isAborted && (task.TaskType == DuplicityTaskType.FullBackup || task.TaskType == DuplicityTaskType.IncrementalBackup)) { if (task.Schedule.Task.KeepFull > 0) { m_lastPGProgress = 100; m_lastPGmessage = Strings.DuplicatiRunner.CleaningUpMessage; m_lastPGSubmessage = ""; m_lastPGSubprogress = -1; ReinvokeLastProgressEvent(); m_extraOperations--; RemoveAllButNFullTask tmpTask = new RemoveAllButNFullTask(task.Schedule, (int)task.Schedule.Task.KeepFull); ExecuteTask(tmpTask); results += Environment.NewLine + Strings.DuplicatiRunner.CleanupLogdataHeader + Environment.NewLine + tmpTask.Result; } if (!string.IsNullOrEmpty(task.Schedule.Task.KeepTime)) { m_lastPGProgress = 100; m_lastPGmessage = Strings.DuplicatiRunner.CleaningUpMessage; m_lastPGSubmessage = ""; m_lastPGSubprogress = -1; ReinvokeLastProgressEvent(); m_extraOperations--; RemoveOlderThanTask tmpTask = new RemoveOlderThanTask(task.Schedule, task.Schedule.Task.KeepTime); ExecuteTask(tmpTask); results += Environment.NewLine + Strings.DuplicatiRunner.CleanupLogdataHeader + Environment.NewLine + tmpTask.Result; } if (task.Schedule.Task.KeepFull > 0 || !string.IsNullOrEmpty(task.Schedule.Task.KeepTime)) { ReinvokeLastProgressEvent(); } if (ProgressEvent != null) { ProgressEvent(DuplicatiOperation.Backup, RunnerState.Stopped, task.Schedule.Name, "", 100, -1); } } } catch (Exception ex) { results += Environment.NewLine + string.Format(Strings.DuplicatiRunner.CleanupError, ex.Message); } task.IsAborted = m_isAborted; task.Result = results; task.RaiseTaskCompleted(results, parsedMessage); if (ResultEvent != null && task is FullBackupTask || task is IncrementalBackupTask) { Log[] logs = Program.DataConnection.GetObjects <Log>("TaskID = ? AND SubAction LIKE ? ORDER BY EndTime DESC", task.Task.ID, "Primary"); if (logs != null && logs.Length > 0) { Datamodel.Log l = logs[0]; RunnerResult r = RunnerResult.Error; if (l.ParsedStatus == DuplicatiOutputParser.ErrorStatus) { r = RunnerResult.Error; } else if (l.ParsedStatus == DuplicatiOutputParser.OKStatus || l.ParsedStatus == DuplicatiOutputParser.NoChangedFiles) { r = RunnerResult.OK; } else if (l.ParsedStatus == DuplicatiOutputParser.PartialStatus) { r = RunnerResult.Partial; } else if (l.ParsedStatus == DuplicatiOutputParser.WarningStatus) { r = RunnerResult.Warning; } ResultEvent(r, parsedMessage, results); } } if (task.Schedule != null && !m_isAborted) { task.Schedule.ScheduledRunCompleted(); //Register as completed if not aborted } }