public static int ListBrokenFiles(List <string> args, Dictionary <string, string> options, Library.Utility.IFilter filter) { if (args.Count != 1) { return(PrintWrongNumberOfArguments(args, 1)); } var con = new ConsoleOutput(options); var previd = -1L; var outputcount = 0L; var verbose = Duplicati.Library.Utility.Utility.ParseBoolOption(options, "verbose"); using (var i = new Library.Main.Controller(args[0], options, con)) i.ListBrokenFiles(filter, (id, time, count, path, size) => { if (previd != id) { previd = id; outputcount = 0; con.MessageEvent(string.Format("{0}\t: {1}\t({2} match(es))", id, time.ToLocalTime(), count)); } con.MessageEvent(string.Format("\t{0} ({1})", path, Library.Utility.Utility.FormatSizeString(size))); outputcount++; if (outputcount >= 5 && !verbose && count != outputcount) { con.MessageEvent(string.Format("\t ... and {0} more, (use --{1} to list all)", count - outputcount, "verbose")); return(false); } return(true); }); return(0); }
public void PurgeBrokenFilesTest() { var blocksize = 1024 * 10; var basedatasize = 0; var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; var filenames = BorderTests.WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize).Select(x => "a" + x.Key).ToList(); var round1 = filenames.Take(filenames.Count / 3).ToArray(); var round2 = filenames.Take((filenames.Count / 3) * 2).ToArray(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }, new Library.Utility.FilterExpression(round1.Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(res.AddedFiles, round1.Length); } var dblock_file = SystemIO.IO_OS .GetFiles(TARGETFOLDER, "*.dblock.zip.aes") .Select(x => new FileInfo(x)) .OrderBy(x => x.LastWriteTimeUtc) .Select(x => x.FullName) .First(); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }, new Library.Utility.FilterExpression(round2.Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(round2.Length - round1.Length, res.AddedFiles); } System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(filenames.Count - round2.Length, res.AddedFiles); } File.Delete(dblock_file); long[] affectedfiles; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var brk = c.ListBrokenFiles(null); Assert.AreEqual(0, brk.Errors.Count()); Assert.AreEqual(0, brk.Warnings.Count()); var sets = brk.BrokenFiles.Count(); var files = brk.BrokenFiles.Sum(x => x.Item3.Count()); Assert.AreEqual(3, sets); Assert.True(files > 0); affectedfiles = brk.BrokenFiles.OrderBy(x => x.Item1).Select(x => x.Item3.LongCount()).ToArray(); } for (var i = 0; i < 3; i++) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = i }), null)) { var brk = c.ListBrokenFiles(null); Assert.AreEqual(0, brk.Errors.Count()); Assert.AreEqual(0, brk.Warnings.Count()); var sets = brk.BrokenFiles.Count(); var files = brk.BrokenFiles.Sum(x => x.Item3.Count()); Assert.AreEqual(1, sets); Assert.AreEqual(affectedfiles[i], files); } } // A dry-run should run without exceptions (see issue #4379). Dictionary <string, string> dryRunOptions = new Dictionary <string, string>(testopts) { ["dry-run"] = "true" }; using (Controller c = new Controller("file://" + this.TARGETFOLDER, dryRunOptions, null)) { IPurgeBrokenFilesResults purgeResults = c.PurgeBrokenFiles(null); Assert.AreEqual(0, purgeResults.Errors.Count()); Assert.AreEqual(0, purgeResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var brk = c.PurgeBrokenFiles(null); Assert.AreEqual(0, brk.Errors.Count()); Assert.AreEqual(0, brk.Warnings.Count()); var modFilesets = 0L; if (brk.DeleteResults != null) { modFilesets += brk.DeleteResults.DeletedSets.Count(); } if (brk.PurgeResults != null) { modFilesets += brk.PurgeResults.RewrittenFileLists; } Assert.AreEqual(3, modFilesets); } // A subsequent backup should be successful. using (Controller c = new Controller("file://" + this.TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new[] { this.DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } }
public void PurgeBrokenFilesTest() { PrepareSourceData(); var blocksize = 1024 * 10; var basedatasize = 0; var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; var filenames = BorderTests.WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize).Select(x => "a" + x.Key).ToList(); var round1 = filenames.Take(filenames.Count / 3).ToArray(); var round2 = filenames.Take((filenames.Count / 3) * 2).ToArray(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }, new Library.Utility.FilterExpression(round1.Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(res.AddedFiles, round1.Length); } var dblock_file = Directory .GetFiles(TARGETFOLDER, "*.dblock.zip.aes") .Select(x => new FileInfo(x)) .OrderBy(x => x.LastWriteTimeUtc) .Select(x => x.FullName) .First(); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }, new Library.Utility.FilterExpression(round2.Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(round2.Length - round1.Length, res.AddedFiles); } System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(filenames.Count - round2.Length, res.AddedFiles); } File.Delete(dblock_file); long[] affectedfiles; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var brk = c.ListBrokenFiles(null); var sets = brk.BrokenFiles.Count(); var files = brk.BrokenFiles.Sum(x => x.Item3.Count()); Assert.AreEqual(3, sets); Assert.True(files > 0); affectedfiles = brk.BrokenFiles.OrderBy(x => x.Item1).Select(x => x.Item3.LongCount()).ToArray(); } for (var i = 0; i < 3; i++) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = i }), null)) { var brk = c.ListBrokenFiles(null); var sets = brk.BrokenFiles.Count(); var files = brk.BrokenFiles.Sum(x => x.Item3.Count()); Assert.AreEqual(1, sets); Assert.AreEqual(affectedfiles[i], files); } } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var brk = c.PurgeBrokenFiles(null); var modFilesets = 0L; if (brk.DeleteResults != null) { modFilesets += brk.DeleteResults.DeletedSets.Count(); } if (brk.PurgeResults != null) { modFilesets += brk.PurgeResults.RewrittenFileLists; } Assert.AreEqual(3, modFilesets); } }