public void RunCommands() { var data = new byte[1024 * 1024 * 10]; //File.WriteAllText(Path.Combine(DATAFOLDER, "a"), "hi"); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a"), data); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) c.Backup(new string[] { DATAFOLDER }); new Random().NextBytes(data); //File.WriteAllText(Path.Combine(DATAFOLDER, "b"), "there"); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b"), data); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("Newest before deleting:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Console.WriteLine("Newest without db:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } File.Delete(DBFILE); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) Assert.AreEqual(c.List().Filesets.Count(), 2); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 1 }), null)) { var r = c.List("*"); Console.WriteLine("Oldest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(2, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("Newest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } }
private void RunCommands(int blocksize, int basedatasize = 0, Action<Dictionary<string, string>> modifyOptions = null) { var testopts = TestOptions; testopts["verbose"] = "true"; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) modifyOptions(testopts); if (basedatasize <= 0) basedatasize = blocksize * 1024; var filenames = new Dictionary<string, int>(); filenames[""] = basedatasize; filenames["-0"] = 0; filenames["-1"] = 1; filenames["-p1"] = basedatasize + 1; filenames["-p2"] = basedatasize + 2; filenames["-p500"] = basedatasize + 500; filenames["-m1"] = basedatasize - 1; filenames["-m2"] = basedatasize - 2; filenames["-m500"] = basedatasize - 500; filenames["-s1"] = blocksize / 4 + 6; filenames["-s2"] = blocksize / 10 + 6; filenames["-l1"] = blocksize * 4 + 6; filenames["-l2"] = blocksize * 10 + 6; filenames["-bm1"] = blocksize - 1; filenames["-b"] = blocksize; filenames["-bp1"] = blocksize + 1; var data = new byte[filenames.Select(x => x.Value).Max()]; foreach(var k in filenames) File.WriteAllBytes(Path.Combine(DATAFOLDER, "a" + k.Key), data.Take(k.Value).ToArray()); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); foreach(var k in filenames) File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); var rn = new Random(); foreach(var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest before deleting:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //Console.WriteLine("Newest without db:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) File.Delete(newdb); testopts["dbpath"] = newdb; using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //Console.WriteLine("V2 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //Console.WriteLine("V1 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) Directory.Delete(RESTOREFOLDER, true); Directory.CreateDirectory(RESTOREFOLDER); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, true); using(var tf = new Library.Utility.TempFolder()) { using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }
/// <summary> /// Running the unit test confirms the correctness of duplicati /// </summary> /// <param name="folders">The folders to backup. Folder at index 0 is the base, all others are incrementals</param> /// <param name="target">The target destination for the backups</param> public static void RunTest(string[] folders, Dictionary <string, string> options, string target) { string tempdir = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location), "tempdir"); string logfilename = System.IO.Path.Combine(tempdir, string.Format("unittest-{0}.log", Library.Utility.Utility.SerializeDateTime(DateTime.Now))); try { if (System.IO.Directory.Exists(tempdir)) { System.IO.Directory.Delete(tempdir, true); } System.IO.Directory.CreateDirectory(tempdir); } catch (Exception ex) { Console.WriteLine("Failed to clean tempdir: {0}", ex); } using (var log = new LogHelper(logfilename)) using (Log.StartScope(log, LogMessageType.Profiling)) { //Filter empty entries, commonly occuring with copy/paste and newlines folders = (from x in folders where !string.IsNullOrWhiteSpace(x) select Environment.ExpandEnvironmentVariables(x)).ToArray(); foreach (var f in folders) { foreach (var n in f.Split(new char[] { System.IO.Path.PathSeparator }, StringSplitOptions.RemoveEmptyEntries)) { if (!System.IO.Directory.Exists(n)) { throw new Exception(string.Format("Missing source folder: {0}", n)); } } } Duplicati.Library.Utility.TempFolder.SystemTempPath = tempdir; //Set some defaults if (!options.ContainsKey("passphrase")) { options["passphrase"] = "secret password!"; } if (!options.ContainsKey("prefix")) { options["prefix"] = "duplicati_unittest"; } //We want all messages in the log options["log-file-log-level"] = LogMessageType.Profiling.ToString(); //We cannot rely on USN numbering, but we can use USN enumeration //options["disable-usn-diff-check"] = "true"; //We use precise times options["disable-time-tolerance"] = "true"; //We need all sets, even if they are unchanged options["upload-unchanged-backups"] = "true"; bool skipfullrestore = false; bool skippartialrestore = false; bool skipverify = false; if (Utility.ParseBoolOption(options, "unittest-backuponly")) { skipfullrestore = true; skippartialrestore = true; options.Remove("unittest-backuponly"); } if (Utility.ParseBoolOption(options, "unittest-skip-partial-restore")) { skippartialrestore = true; options.Remove("unittest-skip-partial-restore"); } if (Utility.ParseBoolOption(options, "unittest-skip-full-restore")) { skipfullrestore = true; options.Remove("unittest-skip-full-restore"); } if (Utility.ParseBoolOption(options, "unittest-skip-verify")) { skipverify = true; options.Remove("unittest-skip-verify"); } var verifymetadata = !Utility.ParseBoolOption(options, "skip-metadata"); using (new Timer(LOGTAG, "UnitTest", "Total unittest")) using (TempFolder tf = new TempFolder()) { options["dbpath"] = System.IO.Path.Combine(tempdir, "unittest.sqlite"); if (System.IO.File.Exists(options["dbpath"])) { System.IO.File.Delete(options["dbpath"]); } if (string.IsNullOrEmpty(target)) { target = "file://" + tf; } else { BasicSetupHelper.ProgressWriteLine("Removing old backups"); Dictionary <string, string> tmp = new Dictionary <string, string>(options); tmp["keep-versions"] = "0"; tmp["force"] = ""; tmp["allow-full-removal"] = ""; using (new Timer(LOGTAG, "CleanupExisting", "Cleaning up any existing backups")) try { using (var bk = Duplicati.Library.DynamicLoader.BackendLoader.GetBackend(target, options)) foreach (var f in bk.List()) { if (!f.IsFolder) { bk.Delete(f.Name); } } } catch (Duplicati.Library.Interface.FolderMissingException) { } } log.Backupset = "Backup " + folders[0]; string fhtempsource = null; bool usingFHWithRestore = (!skipfullrestore || !skippartialrestore); using (var fhsourcefolder = usingFHWithRestore ? new Library.Utility.TempFolder() : null) { if (usingFHWithRestore) { fhtempsource = fhsourcefolder; TestUtils.CopyDirectoryRecursive(folders[0], fhsourcefolder); } RunBackup(usingFHWithRestore ? (string)fhsourcefolder : folders[0], target, options, folders[0]); for (int i = 1; i < folders.Length; i++) { //options["passphrase"] = "bad password"; //If the backups are too close, we can't pick the right one :( System.Threading.Thread.Sleep(1000 * 5); log.Backupset = "Backup " + folders[i]; if (usingFHWithRestore) { System.IO.Directory.Delete(fhsourcefolder, true); TestUtils.CopyDirectoryRecursive(folders[i], fhsourcefolder); } //Call function to simplify profiling RunBackup(usingFHWithRestore ? (string)fhsourcefolder : folders[i], target, options, folders[i]); } } Duplicati.Library.Main.Options opts = new Duplicati.Library.Main.Options(options); using (Duplicati.Library.Interface.IBackend bk = Duplicati.Library.DynamicLoader.BackendLoader.GetBackend(target, options)) foreach (Duplicati.Library.Interface.IFileEntry fe in bk.List()) { if (fe.Size > opts.VolumeSize) { string msg = string.Format("The file {0} is {1} bytes larger than allowed", fe.Name, fe.Size - opts.VolumeSize); BasicSetupHelper.ProgressWriteLine(msg); Log.WriteErrorMessage(LOGTAG, "RemoteTargetSize", null, msg); } } IList <DateTime> entries; using (var console = new CommandLine.ConsoleOutput(Console.Out, options)) using (var i = new Duplicati.Library.Main.Controller(target, options, console)) entries = (from n in i.List().Filesets select n.Time.ToLocalTime()).ToList(); if (entries.Count != folders.Length) { StringBuilder sb = new StringBuilder(); sb.AppendLine("Entry count: " + entries.Count.ToString()); sb.Append(string.Format("Found {0} filelists but there were {1} source folders", entries.Count, folders.Length)); throw new Exception("Filename parsing problem, or corrupt storage: " + sb); } if (!skipfullrestore || !skippartialrestore) { for (int i = 0; i < entries.Count; i++) { using (TempFolder ttf = new TempFolder()) { log.Backupset = "Restore " + folders[i]; BasicSetupHelper.ProgressWriteLine("Restoring the copy: " + folders[i]); options["time"] = entries[entries.Count - i - 1].ToString(); string[] actualfolders = folders[i].Split(System.IO.Path.PathSeparator); if (!skippartialrestore) { BasicSetupHelper.ProgressWriteLine("Partial restore of: " + folders[i]); using (TempFolder ptf = new TempFolder()) { List <string> testfiles = new List <string>(); using (new Timer(LOGTAG, "ExtractFileList", "Extract list of files from" + folders[i])) { List <string> sourcefiles; using (var console = new CommandLine.ConsoleOutput(Console.Out, options)) using (var inst = new Library.Main.Controller(target, options, console)) sourcefiles = (from n in inst.List("*").Files select n.Path).ToList(); //Remove all folders from list for (int j = 0; j < sourcefiles.Count; j++) { if (sourcefiles[j].EndsWith(System.IO.Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal)) { sourcefiles.RemoveAt(j); j--; } } int testfilecount = 15; Random r = new Random(); while (testfilecount-- > 0 && sourcefiles.Count > 0) { int rn = r.Next(0, sourcefiles.Count); testfiles.Add(sourcefiles[rn]); sourcefiles.RemoveAt(rn); } } //Add all folders to avoid warnings in restore log int c = testfiles.Count; Dictionary <string, string> partialFolders = new Dictionary <string, string>(Utility.ClientFilenameStringComparer); for (int j = 0; j < c; j++) { string f = testfiles[j]; if (!f.StartsWith(usingFHWithRestore ? fhtempsource : folders[i], Utility.ClientFilenameStringComparison)) { throw new Exception(string.Format("Unexpected file found: {0}, path is not a subfolder for {1}", f, folders[i])); } f = f.Substring(Utility.AppendDirSeparator(usingFHWithRestore ? fhtempsource : folders[i]).Length); do { f = System.IO.Path.GetDirectoryName(f); partialFolders[Utility.AppendDirSeparator(f)] = null; } while (f.IndexOf(System.IO.Path.DirectorySeparatorChar) > 0); } if (partialFolders.ContainsKey("")) { partialFolders.Remove(""); } if (partialFolders.ContainsKey(System.IO.Path.DirectorySeparatorChar.ToString())) { partialFolders.Remove(System.IO.Path.DirectorySeparatorChar.ToString()); } List <string> filterlist; var tfe = Utility.AppendDirSeparator(usingFHWithRestore ? fhtempsource : folders[i]); filterlist = (from n in partialFolders.Keys where !string.IsNullOrWhiteSpace(n) && n != System.IO.Path.DirectorySeparatorChar.ToString() select Utility.AppendDirSeparator(System.IO.Path.Combine(tfe, n))) .Union(testfiles) //Add files with full path .Union(new string[] { tfe }) //Ensure root folder is included .Distinct() .ToList(); testfiles = (from n in testfiles select n.Substring(tfe.Length)).ToList(); //Call function to simplify profiling RunPartialRestore(folders[i], target, ptf, options, filterlist.ToArray()); if (!skipverify) { //Call function to simplify profiling BasicSetupHelper.ProgressWriteLine("Verifying partial restore of: " + folders[i]); VerifyPartialRestore(folders[i], testfiles, actualfolders, ptf, folders[0], verifymetadata); } } } if (!skipfullrestore) { //Call function to simplify profiling RunRestore(folders[i], target, ttf, options); if (!skipverify) { //Call function to simplify profiling BasicSetupHelper.ProgressWriteLine("Verifying the copy: " + folders[i]); VerifyFullRestore(folders[i], actualfolders, new string[] { ttf }, verifymetadata); } } } } } foreach (string s in Utility.EnumerateFiles(tempdir)) { if (s == options["dbpath"]) { continue; } if (s == logfilename) { continue; } if (s.StartsWith(Utility.AppendDirSeparator(tf), StringComparison.Ordinal)) { continue; } Log.WriteWarningMessage(LOGTAG, "LeftOverTempFile", null, "Found left-over temp file: {0}", s.Substring(tempdir.Length)); BasicSetupHelper.ProgressWriteLine("Found left-over temp file: {0} -> {1}", s.Substring(tempdir.Length), #if DEBUG TempFile.GetStackTraceForTempFile(System.IO.Path.GetFileName(s)) #else System.IO.Path.GetFileName(s) #endif ); } foreach (string s in Utility.EnumerateFolders(tempdir)) { if (!s.StartsWith(Utility.AppendDirSeparator(tf), StringComparison.Ordinal) && Utility.AppendDirSeparator(s) != Utility.AppendDirSeparator(tf) && Utility.AppendDirSeparator(s) != Utility.AppendDirSeparator(tempdir)) { Log.WriteWarningMessage(LOGTAG, "LeftOverTempFolder", null, "Found left-over temp folder: {0}", s.Substring(tempdir.Length)); BasicSetupHelper.ProgressWriteLine("Found left-over temp folder: {0}", s.Substring(tempdir.Length)); } } } } if (LogHelper.ErrorCount > 0) { BasicSetupHelper.ProgressWriteLine("Unittest completed, but with {0} errors, see logfile for details", LogHelper.ErrorCount); } else if (LogHelper.WarningCount > 0) { BasicSetupHelper.ProgressWriteLine("Unittest completed, but with {0} warnings, see logfile for details", LogHelper.WarningCount); } else { BasicSetupHelper.ProgressWriteLine("Unittest completed successfully - Have some cake!"); } System.Diagnostics.Debug.Assert(LogHelper.ErrorCount == 0); }
public void PurgeTest() { var blocksize = 1024 * 10; var basedatasize = 0; var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; var filenames = BorderTests.WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize).Select(x => "a" + x.Key).ToList(); var round1 = filenames.Take(filenames.Count / 3).ToArray(); var round2 = filenames.Take((filenames.Count / 3) * 2).ToArray(); var round3 = filenames; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }, new Library.Utility.FilterExpression(round1.Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(res.AddedFiles, round1.Length); } System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }, new Library.Utility.FilterExpression(round2.Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(res.AddedFiles, round2.Length - round1.Length); } System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(res.AddedFiles, filenames.Count - round2.Length); } System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); var last_ts = DateTime.Now; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { list_sets_only = true }), null)) { var inf = c.List(); Assert.AreEqual(0, inf.Errors.Count()); Assert.AreEqual(0, inf.Warnings.Count()); var filesets = inf.Filesets.Count(); Assert.AreEqual(3, filesets, "Incorrect number of initial filesets"); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IListResults listResults = c.List("*"); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); var filecount = listResults.Files.Count(); Assert.AreEqual(filenames.Count + 1, filecount, "Incorrect number of initial files"); } var allversion_candidate = round1.First(); var single_version_candidate = round1.Skip(1).First(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.PurgeFiles(new Library.Utility.FilterExpression("*" + Path.DirectorySeparatorChar + allversion_candidate)); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(3, res.RewrittenFileLists, "Incorrect number of rewritten filesets after all-versions purge"); Assert.AreEqual(3, res.RemovedFileCount, "Incorrect number of removed files after all-versions purge"); } for (var i = 0; i < 3; i++) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = i }), null)) { var res = c.PurgeFiles(new Library.Utility.FilterExpression(Path.Combine(this.DATAFOLDER, single_version_candidate))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(1, res.RewrittenFileLists, "Incorrect number of rewritten filesets after single-versions purge"); Assert.AreEqual(1, res.RemovedFileCount, "Incorrect number of removed files after single-versions purge"); } } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.PurgeFiles(new Library.Utility.FilterExpression(round2.Skip(round1.Length).Take(2).Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(2, res.RewrittenFileLists, "Incorrect number of rewritten filesets after 2-versions purge"); Assert.AreEqual(4, res.RemovedFileCount, "Incorrect number of removed files after 2-versions purge"); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var res = c.PurgeFiles(new Library.Utility.FilterExpression(round3.Skip(round2.Length).Take(2).Select(x => "*" + Path.DirectorySeparatorChar + x))); Assert.AreEqual(0, res.Errors.Count()); Assert.AreEqual(0, res.Warnings.Count()); Assert.AreEqual(1, res.RewrittenFileLists, "Incorrect number of rewritten filesets after 1-versions purge"); Assert.AreEqual(2, res.RemovedFileCount, "Incorrect number of removed files after 1-versions purge"); } // Since we make the operations back-to-back, the purge timestamp can drift beyond the current time var wait_target = last_ts.AddSeconds(10) - DateTime.Now; if (wait_target.TotalMilliseconds > 0) { System.Threading.Thread.Sleep(wait_target); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var listinfo = c.List("*"); Assert.AreEqual(0, listinfo.Errors.Count()); Assert.AreEqual(0, listinfo.Warnings.Count()); var filecount = listinfo.Files.Count(); listinfo = c.List(); Assert.AreEqual(0, listinfo.Errors.Count()); Assert.AreEqual(0, listinfo.Warnings.Count()); var filesets = listinfo.Filesets.Count(); Assert.AreEqual(3, filesets, "Incorrect number of filesets after purge"); Assert.AreEqual(filenames.Count - 6 + 1, filecount, "Incorrect number of files after purge"); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var listinfo = c.List("*"); Assert.AreEqual(0, listinfo.Errors.Count()); Assert.AreEqual(0, listinfo.Warnings.Count()); var files = listinfo.Files.ToArray(); var filecount = files.Length; listinfo = c.List(); Assert.AreEqual(0, listinfo.Errors.Count()); Assert.AreEqual(0, listinfo.Warnings.Count()); var filesets = listinfo.Filesets.ToArray(); Console.WriteLine("Listing final version information"); Console.WriteLine("Versions:"); Console.WriteLine(" " + string.Join(Environment.NewLine + " ", filesets.Select(x => string.Format("{0}: {1}, {2} {3}", x.Version, x.Time, x.FileCount, x.FileSizes)))); Console.WriteLine("Files:"); Console.WriteLine(" " + string.Join(Environment.NewLine + " ", files.Select(x => string.Format("{0}: {1}", x.Path, string.Join(" - ", x.Sizes.Select(y => y.ToString())))))); Assert.AreEqual(4, filesets.Length, "Incorrect number of filesets after final backup"); Assert.AreEqual(filenames.Count + 1, filecount, "Incorrect number of files after final backup"); } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; modifyOptions?.Invoke(testopts); var filenames = WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); // TODO: This sometimes results in a "No block hash found for file: C:\projects\duplicati\testdata\backup-data\a-0" warning. // Because of this, we don't check for warnings here. } // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { IListResults listResults = c.List("*"); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } // Do a "touch" on files to trigger a re-scan, which should do nothing //foreach (var k in filenames) //if (File.Exists(Path.Combine(DATAFOLDER, "a" + k.Key))) //File.SetLastWriteTime(Path.Combine(DATAFOLDER, "a" + k.Key), DateTime.Now.AddSeconds(5)); var data = new byte[filenames.Select(x => x.Value).Max()]; new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var r = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); if (!Library.Utility.Utility.ParseBoolOption(testopts, "disable-filetime-check")) { if (r.OpenedFiles != filenames.Count) { throw new Exception($"Opened {r.OpenedFiles}, but should open {filenames.Count}"); } if (r.ExaminedFiles != filenames.Count * 2) { throw new Exception($"Examined {r.ExaminedFiles}, but should examine open {filenames.Count * 2}"); } } } var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest before deleting:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest without db:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } testopts["dbpath"] = this.recreatedDatabaseFile; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); // TODO: This sometimes results in a "No block hash found for file: C:\projects\duplicati\testdata\backup-data\a-0" warning. // Because of this, we don't check for warnings here. } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IListResults listResults = c.List(); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); Assert.AreEqual(3, listResults.Filesets.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("V2 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("V1 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Assert.AreEqual(filenames.Count * 3, r.RestoredFiles); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, !Library.Utility.Utility.ParseBoolOption(testopts, "skip-metadata")); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Assert.AreEqual(filenames.Count, r.RestoredFiles); } } }
public void TestEmptyFolderExclude() { var source = DATAFOLDER; // Top level folder with no contents Directory.CreateDirectory(Path.Combine(source, "empty-toplevel")); // Top level folder with contents in one leaf Directory.CreateDirectory(Path.Combine(source, "toplevel")); // Empty folder Directory.CreateDirectory(Path.Combine(source, "toplevel", "empty")); // Folder with an excluded file Directory.CreateDirectory(Path.Combine(source, "toplevel", "filteredempty")); // Folder with contents Directory.CreateDirectory(Path.Combine(source, "toplevel", "normal")); // Folder with excludefile Directory.CreateDirectory(Path.Combine(source, "toplevel", "excludefile")); // Write a file that we will use for exclude target File.WriteAllLines(Path.Combine(source, "toplevel", "excludefile", "exclude.me"), new string[] { }); File.WriteAllLines(Path.Combine(source, "toplevel", "excludefile", "anyfile.txt"), new string[] { "data" }); // Write a file that we will filter File.WriteAllLines(Path.Combine(source, "toplevel", "filteredempty", "myfile.txt"), new string[] { "data" }); // Write a file that we will not filter File.WriteAllLines(Path.Combine(source, "toplevel", "normal", "standard.txt"), new string[] { "data" }); // Get the default options var testopts = TestOptions; // Create a fileset with all data present using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check that we have 4 files and 7 folders using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 7) { throw new Exception($"Initial condition not satisfied, found {folders} folders, but expected 7"); } if (files != 4) { throw new Exception($"Initial condition not satisfied, found {files} files, but expected 4"); } } // Toggle the exclude file, and build a new fileset System.Threading.Thread.Sleep(5000); testopts["ignore-filenames"] = "exclude.me"; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check that we have 2 files and 6 folders after excluding the "excludefile" folder using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 6) { throw new Exception($"Initial condition not satisfied, found {folders} folders, but expected 6"); } if (files != 2) { throw new Exception($"Initial condition not satisfied, found {files} files, but expected 2"); } } // Toggle empty folder excludes, and run a new backup to remove them System.Threading.Thread.Sleep(5000); testopts["exclude-empty-folders"] = "true"; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check that the two empty folders are now removed using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 4) { throw new Exception($"Empty not satisfied, found {folders} folders, but expected 4"); } if (files != 2) { throw new Exception($"Empty not satisfied, found {files} files, but expected 2"); } } // Filter out one file and rerun the backup to exclude the folder System.Threading.Thread.Sleep(5000); var excludefilter = new Library.Utility.FilterExpression($"*{System.IO.Path.DirectorySeparatorChar}myfile.txt", false); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }, excludefilter); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check that the empty folder is now removed using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 3) { throw new Exception($"Empty not satisfied, found {folders} folders, but expected 3"); } if (files != 1) { throw new Exception($"Empty not satisfied, found {files} files, but expected 1"); } } // Delete the one remaining file and check that we only have the top-level folder in the set System.Threading.Thread.Sleep(5000); File.Delete(Path.Combine(source, "toplevel", "normal", "standard.txt")); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }, excludefilter); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check we now have only one folder and no files using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 1) { throw new Exception($"Empty not satisfied, found {folders} folders, but expected 1"); } if (files != 0) { throw new Exception($"Empty not satisfied, found {files} files, but expected 0"); } } }
public static int List(List <string> args, Dictionary <string, string> options, Library.Utility.IFilter filter) { filter = filter ?? new Duplicati.Library.Utility.FilterExpression(); if (Duplicati.Library.Utility.Utility.ParseBoolOption(options, "list-sets-only")) { filter = new Duplicati.Library.Utility.FilterExpression(); } using (var i = new Library.Main.Controller(args[0], options, new ConsoleOutput(options))) { var backend = args[0]; args.RemoveAt(0); if (args.Count == 1) { long v; if (long.TryParse(args[0], out v)) { if (!options.ContainsKey("version")) { args.RemoveAt(0); args.Add("*"); options["version"] = v.ToString(); } } else if (args[0].IndexOfAny(new char[] { '*', '?' }) < 0 && !args[0].StartsWith("[")) { try { var t = Library.Utility.Timeparser.ParseTimeInterval(args[0], DateTime.Now, true); args.RemoveAt(0); args.Add("*"); options["time"] = t.ToString(); } catch { } } } // Prefix all filenames with "*/" so we search all folders for (var ix = 0; ix < args.Count; ix++) { if (args[ix].IndexOfAny(new char[] { '*', '?', System.IO.Path.DirectorySeparatorChar, System.IO.Path.AltDirectorySeparatorChar }) < 0 && !args[ix].StartsWith("[")) { args[ix] = "*" + System.IO.Path.DirectorySeparatorChar.ToString() + args[ix]; } } // Support for not adding the --auth-username if possible string dbpath; options.TryGetValue("dbpath", out dbpath); if (string.IsNullOrEmpty(dbpath)) { dbpath = Library.Main.DatabaseLocator.GetDatabasePath(backend, new Duplicati.Library.Main.Options(options), false, true); if (dbpath != null) { options["dbpath"] = dbpath; } } // Don't ask for passphrase if we have a local db if (!string.IsNullOrEmpty(dbpath) && System.IO.File.Exists(dbpath) && !options.ContainsKey("no-encryption") && !Duplicati.Library.Utility.Utility.ParseBoolOption(options, "no-local-db")) { string passphrase; options.TryGetValue("passphrase", out passphrase); if (string.IsNullOrEmpty(passphrase)) { options["no-encryption"] = "true"; } } bool controlFiles = Library.Utility.Utility.ParseBoolOption(options, "control-files"); options.Remove("control-files"); var res = controlFiles ? i.ListControlFiles(args, filter) : i.List(args, filter); //If there are no files matching, and we are looking for one or more files, // try again with all-versions set var isRequestForFiles = !controlFiles && res.Filesets.Count() != 0 && (res.Files == null || res.Files.Count() == 0) && !filter.Empty; if (isRequestForFiles && !Library.Utility.Utility.ParseBoolOption(options, "all-versions")) { Console.WriteLine("No files matching, looking in all versions"); options["all-versions"] = "true"; options.Remove("time"); options.Remove("version"); res = i.List(args, filter); } if (res.Filesets.Count() != 0 && (res.Files == null || res.Files.Count() == 0) && filter.Empty) { Console.WriteLine("Listing filesets:"); foreach (var e in res.Filesets) { if (e.FileCount >= 0) { Console.WriteLine("{0}\t: {1} ({2} files, {3})", e.Version, e.Time, e.FileCount, Library.Utility.Utility.FormatSizeString(e.FileSizes)); } else { Console.WriteLine("{0}\t: {1}", e.Version, e.Time); } } } else { if (res.Filesets.Count() == 0) { Console.WriteLine("No time or version matched a fileset"); } else if (res.Files == null || res.Files.Count() == 0) { Console.WriteLine("Found {0} filesets, but no files matched", res.Filesets.Count()); } else if (res.Filesets.Count() == 1) { var f = res.Filesets.First(); Console.WriteLine("Listing contents {0} ({1}):", f.Version, f.Time); foreach (var e in res.Files) { Console.WriteLine("{0} {1}", e.Path, e.Path.EndsWith(System.IO.Path.DirectorySeparatorChar.ToString()) ? "" : "(" + Library.Utility.Utility.FormatSizeString(e.Sizes.First()) + ")"); } } else { Console.WriteLine("Listing files and versions:"); foreach (var e in res.Files) { Console.WriteLine(e.Path); foreach (var nx in res.Filesets.Zip(e.Sizes, (a, b) => new { Index = a.Version, Time = a.Time, Size = b })) { Console.WriteLine("{0}\t: {1} {2}", nx.Index, nx.Time, nx.Size < 0 ? " - " : Library.Utility.Utility.FormatSizeString(nx.Size)); } Console.WriteLine(); } } } } return(0); }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["verbose"] = "true"; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) { modifyOptions(testopts); } if (basedatasize <= 0) { basedatasize = blocksize * 1024; } var filenames = new Dictionary <string, int>(); filenames[""] = basedatasize; filenames["-0"] = 0; filenames["-1"] = 1; filenames["-p1"] = basedatasize + 1; filenames["-p2"] = basedatasize + 2; filenames["-p500"] = basedatasize + 500; filenames["-m1"] = basedatasize - 1; filenames["-m2"] = basedatasize - 2; filenames["-m500"] = basedatasize - 500; filenames["-s1"] = blocksize / 4 + 6; filenames["-s2"] = blocksize / 10 + 6; filenames["-l1"] = blocksize * 4 + 6; filenames["-l2"] = blocksize * 10 + 6; filenames["-bm1"] = blocksize - 1; filenames["-b"] = blocksize; filenames["-bp1"] = blocksize + 1; var data = new byte[filenames.Select(x => x.Value).Max()]; foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "a" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest before deleting:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //Console.WriteLine("Newest without db:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) { File.Delete(newdb); } testopts["dbpath"] = newdb; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //Console.WriteLine("V2 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //Console.WriteLine("V1 after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("Newest after delete:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) { Directory.Delete(RESTOREFOLDER, true); } Directory.CreateDirectory(RESTOREFOLDER); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, true); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }
public static int List(List<string> args, Dictionary<string, string> options, Library.Utility.IFilter filter) { filter = filter ?? new Duplicati.Library.Utility.FilterExpression(); if (Duplicati.Library.Utility.Utility.ParseBoolOption(options, "list-sets-only")) filter = new Duplicati.Library.Utility.FilterExpression(); using(var i = new Library.Main.Controller(args[0], options, new ConsoleOutput(options))) { var backend = args[0]; args.RemoveAt(0); if (args.Count == 1) { long v; if (long.TryParse(args[0], out v)) { if (!options.ContainsKey("version")) { args.RemoveAt(0); args.Add("*"); options["version"] = v.ToString(); } } else if (args[0].IndexOfAny(new char[] { '*', '?' }) < 0 && !args[0].StartsWith("[")) { try { var t = Library.Utility.Timeparser.ParseTimeInterval(args[0], DateTime.Now, true); args.RemoveAt(0); args.Add("*"); options["time"] = t.ToString(); } catch { } } } // Prefix all filenames with "*/" so we search all folders for(var ix = 0; ix < args.Count; ix++) if (args[ix].IndexOfAny(new char[] { '*', '?', System.IO.Path.DirectorySeparatorChar, System.IO.Path.AltDirectorySeparatorChar }) < 0 && !args[ix].StartsWith("[")) args[ix] = "*" + System.IO.Path.DirectorySeparatorChar.ToString() + args[ix]; // Support for not adding the --auth-username if possible string dbpath; options.TryGetValue("dbpath", out dbpath); if (string.IsNullOrEmpty(dbpath)) { dbpath = Library.Main.DatabaseLocator.GetDatabasePath(backend, new Duplicati.Library.Main.Options(options), false, true); if (dbpath != null) options["dbpath"] = dbpath; } // Don't ask for passphrase if we have a local db if (!string.IsNullOrEmpty(dbpath) && System.IO.File.Exists(dbpath) && !options.ContainsKey("no-encryption") && !Duplicati.Library.Utility.Utility.ParseBoolOption(options, "no-local-db")) { string passphrase; options.TryGetValue("passphrase", out passphrase); if (string.IsNullOrEmpty(passphrase)) options["no-encryption"] = "true"; } bool controlFiles = Library.Utility.Utility.ParseBoolOption(options, "control-files"); options.Remove("control-files"); var res = controlFiles ? i.ListControlFiles(args, filter) : i.List(args, filter); //If there are no files matching, and we are looking for one or more files, // try again with all-versions set var compareFilter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(args), filter); var isRequestForFiles = !controlFiles && res.Filesets.Count() != 0 && (res.Files == null || res.Files.Count() == 0) && !compareFilter.Empty; if (isRequestForFiles && !Library.Utility.Utility.ParseBoolOption(options, "all-versions")) { Console.WriteLine("No files matching, looking in all versions"); options["all-versions"] = "true"; options.Remove("time"); options.Remove("version"); res = i.List(args, filter); } if (res.Filesets.Count() != 0 && (res.Files == null || res.Files.Count() == 0) && compareFilter.Empty) { Console.WriteLine("Listing filesets:"); foreach(var e in res.Filesets) { if (e.FileCount >= 0) Console.WriteLine("{0}\t: {1} ({2} files, {3})", e.Version, e.Time, e.FileCount, Library.Utility.Utility.FormatSizeString(e.FileSizes)); else Console.WriteLine("{0}\t: {1}", e.Version, e.Time); } } else if (isRequestForFiles) { Console.WriteLine("No files matched expression"); } else { if (res.Filesets.Count() == 0) { Console.WriteLine("No time or version matched a fileset"); } else if (res.Files == null || res.Files.Count() == 0) { Console.WriteLine("Found {0} filesets, but no files matched", res.Filesets.Count()); } else if (res.Filesets.Count() == 1) { var f = res.Filesets.First(); Console.WriteLine("Listing contents {0} ({1}):", f.Version, f.Time); foreach(var e in res.Files) Console.WriteLine("{0} {1}", e.Path, e.Path.EndsWith(System.IO.Path.DirectorySeparatorChar.ToString()) ? "" : "(" + Library.Utility.Utility.FormatSizeString(e.Sizes.First()) + ")"); } else { Console.WriteLine("Listing files and versions:"); foreach(var e in res.Files) { Console.WriteLine(e.Path); foreach(var nx in res.Filesets.Zip(e.Sizes, (a, b) => new { Index = a.Version, Time = a.Time, Size = b } )) Console.WriteLine("{0}\t: {1} {2}", nx.Index, nx.Time, nx.Size < 0 ? " - " : Library.Utility.Utility.FormatSizeString(nx.Size)); Console.WriteLine(); } } } } return 0; }
/// <summary> /// Running the unit test confirms the correctness of duplicati /// </summary> /// <param name="folders">The folders to backup. Folder at index 0 is the base, all others are incrementals</param> /// <param name="target">The target destination for the backups</param> public static void RunTest(string[] folders, Dictionary<string, string> options, string target) { var oldlog = Log.CurrentLog as IDisposable; Log.CurrentLog = null; if (oldlog != null) try { oldlog.Dispose(); } catch { } LogHelper log = new LogHelper(string.Format("unittest-{0}.log", Library.Utility.Utility.SerializeDateTime(DateTime.Now))); Log.CurrentLog = log; Log.LogLevel = Duplicati.Library.Logging.LogMessageType.Profiling; string tempdir = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location), "tempdir"); //Filter empty entries, commonly occuring with copy/paste and newlines folders = (from x in folders where !string.IsNullOrWhiteSpace(x) select Library.Utility.Utility.ExpandEnvironmentVariables(x)).ToArray(); //Expand the tilde to home folder on Linux/OSX if (Utility.IsClientLinux) folders = (from x in folders select x.Replace("~", Environment.GetFolderPath(Environment.SpecialFolder.Personal))).ToArray(); foreach(var f in folders) foreach(var n in f.Split(new char[] {System.IO.Path.PathSeparator}, StringSplitOptions.RemoveEmptyEntries)) if (!System.IO.Directory.Exists(n)) throw new Exception(string.Format("Missing source folder: {0}", n)); try { if (System.IO.Directory.Exists(tempdir)) System.IO.Directory.Delete(tempdir, true); System.IO.Directory.CreateDirectory(tempdir); } catch(Exception ex) { Log.WriteMessage("Failed to clean tempdir", LogMessageType.Error, ex); } Duplicati.Library.Utility.TempFolder.SystemTempPath = tempdir; //Set some defaults if (!options.ContainsKey("passphrase")) options["passphrase"] = "secret password!"; if (!options.ContainsKey("prefix")) options["prefix"] = "duplicati_unittest"; //We want all messages in the log options["log-level"] = LogMessageType.Profiling.ToString(); //We cannot rely on USN numbering, but we can use USN enumeration //options["disable-usn-diff-check"] = "true"; //We use precise times options["disable-time-tolerance"] = "true"; //We need all sets, even if they are unchanged options["upload-unchanged-backups"] = "true"; bool skipfullrestore = false; bool skippartialrestore = false; bool skipverify = false; if (Utility.ParseBoolOption(options, "unittest-backuponly")) { skipfullrestore = true; skippartialrestore = true; options.Remove("unittest-backuponly"); } if (Utility.ParseBoolOption(options, "unittest-skip-partial-restore")) { skippartialrestore = true; options.Remove("unittest-skip-partial-restore"); } if (Utility.ParseBoolOption(options, "unittest-skip-full-restore")) { skipfullrestore = true; options.Remove("unittest-skip-full-restore"); } if (Utility.ParseBoolOption(options, "unittest-skip-verify")) { skipverify = true; options.Remove("unittest-skip-verify"); } var verifymetadata = !Utility.ParseBoolOption(options, "skip-metadata"); using(new Timer("Total unittest")) using(TempFolder tf = new TempFolder()) { options["dbpath"] = System.IO.Path.Combine(tempdir, "unittest.sqlite"); if (System.IO.File.Exists(options["dbpath"])) System.IO.File.Delete(options["dbpath"]); if (string.IsNullOrEmpty(target)) { target = "file://" + tf; } else { Console.WriteLine("Removing old backups"); Dictionary<string, string> tmp = new Dictionary<string, string>(options); tmp["keep-versions"] = "0"; tmp["force"] = ""; tmp["allow-full-removal"] = ""; using(new Timer("Cleaning up any existing backups")) try { using(var bk = Duplicati.Library.DynamicLoader.BackendLoader.GetBackend(target, options)) foreach(var f in bk.List()) if (!f.IsFolder) bk.Delete(f.Name); } catch(Duplicati.Library.Interface.FolderMissingException) { } } log.Backupset = "Backup " + folders[0]; string fhtempsource = null; bool usingFHWithRestore = (!skipfullrestore || !skippartialrestore); using(var fhsourcefolder = usingFHWithRestore ? new Library.Utility.TempFolder() : null) { if (usingFHWithRestore) { fhtempsource = fhsourcefolder; TestUtils.CopyDirectoryRecursive(folders[0], fhsourcefolder); } RunBackup(usingFHWithRestore ? (string)fhsourcefolder : folders[0], target, options, folders[0]); for (int i = 1; i < folders.Length; i++) { //options["passphrase"] = "bad password"; //If the backups are too close, we can't pick the right one :( System.Threading.Thread.Sleep(1000 * 5); log.Backupset = "Backup " + folders[i]; if (usingFHWithRestore) { System.IO.Directory.Delete(fhsourcefolder, true); TestUtils.CopyDirectoryRecursive(folders[i], fhsourcefolder); } //Call function to simplify profiling RunBackup(usingFHWithRestore ? (string)fhsourcefolder : folders[i], target, options, folders[i]); } } Duplicati.Library.Main.Options opts = new Duplicati.Library.Main.Options(options); using (Duplicati.Library.Interface.IBackend bk = Duplicati.Library.DynamicLoader.BackendLoader.GetBackend(target, options)) foreach (Duplicati.Library.Interface.IFileEntry fe in bk.List()) if (fe.Size > opts.VolumeSize) { string msg = string.Format("The file {0} is {1} bytes larger than allowed", fe.Name, fe.Size - opts.VolumeSize); Console.WriteLine(msg); Log.WriteMessage(msg, LogMessageType.Error); } IList<DateTime> entries; using(var i = new Duplicati.Library.Main.Controller(target, options, new CommandLine.ConsoleOutput(options))) entries = (from n in i.List().Filesets select n.Time.ToLocalTime()).ToList(); if (entries.Count != folders.Length) { StringBuilder sb = new StringBuilder(); sb.AppendLine("Entry count: " + entries.Count.ToString()); sb.Append(string.Format("Found {0} filelists but there were {1} source folders", entries.Count, folders.Length)); throw new Exception("Filename parsing problem, or corrupt storage: " + sb.ToString()); } if (!skipfullrestore || !skippartialrestore) { for (int i = 0; i < entries.Count; i++) { using (TempFolder ttf = new TempFolder()) { log.Backupset = "Restore " + folders[i]; Console.WriteLine("Restoring the copy: " + folders[i]); options["time"] = entries[entries.Count - i - 1].ToString(); string[] actualfolders = folders[i].Split(System.IO.Path.PathSeparator); if (!skippartialrestore) { Console.WriteLine("Partial restore of: " + folders[i]); using (TempFolder ptf = new TempFolder()) { List<string> testfiles = new List<string>(); using (new Timer("Extract list of files from" + folders[i])) { List<string> sourcefiles; using(var inst = new Library.Main.Controller(target, options, new CommandLine.ConsoleOutput(options))) sourcefiles = (from n in inst.List("*").Files select n.Path).ToList(); //Remove all folders from list for (int j = 0; j < sourcefiles.Count; j++) if (sourcefiles[j].EndsWith(System.IO.Path.DirectorySeparatorChar.ToString())) { sourcefiles.RemoveAt(j); j--; } int testfilecount = 15; Random r = new Random(); while (testfilecount-- > 0 && sourcefiles.Count > 0) { int rn = r.Next(0, sourcefiles.Count); testfiles.Add(sourcefiles[rn]); sourcefiles.RemoveAt(rn); } } //Add all folders to avoid warnings in restore log int c = testfiles.Count; Dictionary<string, string> partialFolders = new Dictionary<string, string>(Utility.ClientFilenameStringComparer); for (int j = 0; j < c; j++) { string f = testfiles[j]; if (!f.StartsWith(usingFHWithRestore ? fhtempsource : folders[i], Utility.ClientFilenameStringComparision)) throw new Exception(string.Format("Unexpected file found: {0}, path is not a subfolder for {1}", f, folders[i])); f = f.Substring(Utility.AppendDirSeparator(usingFHWithRestore ? fhtempsource : folders[i]).Length); do { f = System.IO.Path.GetDirectoryName(f); partialFolders[Utility.AppendDirSeparator(f)] = null; } while (f.IndexOf(System.IO.Path.DirectorySeparatorChar) > 0); } if (partialFolders.ContainsKey("")) partialFolders.Remove(""); if (partialFolders.ContainsKey(System.IO.Path.DirectorySeparatorChar.ToString())) partialFolders.Remove(System.IO.Path.DirectorySeparatorChar.ToString()); List<string> filterlist; var tfe = Utility.AppendDirSeparator(usingFHWithRestore ? fhtempsource : folders[i]); filterlist = (from n in partialFolders.Keys where !string.IsNullOrWhiteSpace(n) && n != System.IO.Path.DirectorySeparatorChar.ToString() select Utility.AppendDirSeparator(System.IO.Path.Combine(tfe, n))) .Union(testfiles) //Add files with full path .Union(new string[] { tfe }) //Ensure root folder is included .Distinct() .ToList(); testfiles = (from n in testfiles select n.Substring(tfe.Length)).ToList(); //Call function to simplify profiling RunPartialRestore(folders[i], target, ptf, options, filterlist.ToArray()); if (!skipverify) { //Call function to simplify profiling Console.WriteLine("Verifying partial restore of: " + folders[i]); VerifyPartialRestore(folders[i], testfiles, actualfolders, ptf, folders[0], verifymetadata); } } } if (!skipfullrestore) { //Call function to simplify profiling RunRestore(folders[i], target, ttf, options); if (!skipverify) { //Call function to simplify profiling Console.WriteLine("Verifying the copy: " + folders[i]); VerifyFullRestore(folders[i], actualfolders, new string[] { ttf }, verifymetadata); } } } } } foreach(string s in Utility.EnumerateFiles(tempdir)) { if (s == options["dbpath"]) continue; if (s.StartsWith(Utility.AppendDirSeparator(tf))) continue; Log.WriteMessage(string.Format("Found left-over temp file: {0}", s.Substring(tempdir.Length)), LogMessageType.Warning); Console.WriteLine("Found left-over temp file: {0} -> {1}", s.Substring(tempdir.Length), #if DEBUG TempFile.GetStackTraceForTempFile(System.IO.Path.GetFileName(s)) #else System.IO.Path.GetFileName(s) #endif ); } foreach(string s in Utility.EnumerateFolders(tempdir)) if (!s.StartsWith(Utility.AppendDirSeparator(tf)) && Utility.AppendDirSeparator(s) != Utility.AppendDirSeparator(tf) && Utility.AppendDirSeparator(s) != Utility.AppendDirSeparator(tempdir)) { Log.WriteMessage(string.Format("Found left-over temp folder: {0}", s.Substring(tempdir.Length)), LogMessageType.Warning); Console.WriteLine("Found left-over temp folder: {0}", s.Substring(tempdir.Length)); } } (Log.CurrentLog as StreamLog).Dispose(); Log.CurrentLog = null; if (LogHelper.ErrorCount > 0) Console.WriteLine("Unittest completed, but with {0} errors, see logfile for details", LogHelper.ErrorCount); else if (LogHelper.WarningCount > 0) Console.WriteLine("Unittest completed, but with {0} warnings, see logfile for details", LogHelper.WarningCount); else Console.WriteLine("Unittest completed successfully - Have some cake!"); }
public void RunCommands() { var testopts = TestOptions; var data = new byte[1024 * 1024 * 10]; File.WriteAllBytes(Path.Combine(DATAFOLDER, "a"), data); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("In first backup:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b"), data); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("Newest before deleting:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("Newest without db:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } File.Delete(DBFILE); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); Assert.AreEqual(0, repairResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IListResults listResults = c.List(); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); Assert.AreEqual(listResults.Filesets.Count(), 2); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("Oldest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(2, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("Newest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; if (modifyOptions != null) { modifyOptions(testopts); } var filenames = WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } // Do a "touch" on files to trigger a re-scan, which should do nothing //foreach (var k in filenames) //if (File.Exists(Path.Combine(DATAFOLDER, "a" + k.Key))) //File.SetLastWriteTime(Path.Combine(DATAFOLDER, "a" + k.Key), DateTime.Now.AddSeconds(5)); var data = new byte[filenames.Select(x => x.Value).Max()]; new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var r = c.Backup(new string[] { DATAFOLDER }); if (!Library.Utility.Utility.ParseBoolOption(testopts, "disable-filetime-check")) { if (r.OpenedFiles != filenames.Count) { throw new Exception($"Opened {r.OpenedFiles}, but should open {filenames.Count}"); } if (r.ExaminedFiles != filenames.Count * 2) { throw new Exception($"Examined {r.ExaminedFiles}, but should examine open {filenames.Count * 2}"); } } } var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest before deleting:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest without db:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } var newdb = Path.Combine(Path.GetDirectoryName(DBFILE), Path.ChangeExtension(Path.GetFileNameWithoutExtension(DBFILE) + "-recreated", Path.GetExtension(DBFILE))); if (File.Exists(newdb)) { File.Delete(newdb); } testopts["dbpath"] = newdb; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); //ProgressWriteLine("V2 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); //ProgressWriteLine("V1 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); //ProgressWriteLine("Newest after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } if (Directory.Exists(RESTOREFOLDER)) { Directory.Delete(RESTOREFOLDER, true); } Directory.CreateDirectory(RESTOREFOLDER); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(filenames.Count * 3, r.FilesRestored); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, !Library.Utility.Utility.ParseBoolOption(testopts, "skip-metadata")); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(filenames.Count, r.FilesRestored); } } }
private void RunCommands(int blocksize, int basedatasize = 0) { var testopts = TestOptions; testopts["verbose"] = "true"; testopts["blocksize"] = blocksize.ToString(); if (basedatasize <= 0) { basedatasize = blocksize * 1024; } var data = new byte[basedatasize + 500]; File.WriteAllBytes(Path.Combine(DATAFOLDER, "a"), data.Take(basedatasize).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-p1"), data.Take(basedatasize + 1).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-p2"), data.Take(basedatasize + 2).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-p500"), data.Take(basedatasize + 500).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-m1"), data.Take(basedatasize - 1).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-m2"), data.Take(basedatasize - 2).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-m500"), data.Take(basedatasize - 500).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-s1"), data.Take(blocksize / 4 + 6).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-s2"), data.Take(blocksize / 10 + 6).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-l1"), data.Take(blocksize * 4 + 6).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "a-l2"), data.Take(blocksize * 10 + 6).ToArray()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("In first backup:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b"), data.Take(basedatasize).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-p1"), data.Take(basedatasize + 1).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-p2"), data.Take(basedatasize + 2).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-p500"), data.Take(basedatasize + 500).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-m1"), data.Take(basedatasize - 1).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-m2"), data.Take(basedatasize - 2).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-m500"), data.Take(basedatasize - 500).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-s1"), data.Take(blocksize / 4 + 6).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-s2"), data.Take(blocksize / 10 + 6).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-l1"), data.Take(blocksize * 4 + 6).ToArray()); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b-l2"), data.Take(blocksize * 10 + 6).ToArray()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c"), data.Take(basedatasize).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-p1"), data.Take(basedatasize + 1).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-p2"), data.Take(basedatasize + 2).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-p500"), data.Take(basedatasize + 500).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-m1"), data.Take(basedatasize - 1).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-m2"), data.Take(basedatasize - 2).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-m500"), data.Take(basedatasize - 500).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-s1"), data.Take(blocksize / 4 + 6).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-s2"), data.Take(blocksize / 10 + 6).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-l1"), data.Take(blocksize * 4 + 6).ToArray()); new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c-l2"), data.Take(blocksize * 10 + 6).ToArray()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Backup(new string[] { DATAFOLDER }); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("Newest before deleting:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(34, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Console.WriteLine("Newest without db:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(34, r.Files.Count()); } File.Delete(DBFILE); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) c.Repair(); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) Assert.AreEqual(3, c.List().Filesets.Count()); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); Console.WriteLine("V2 after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(12, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); Console.WriteLine("V1 after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(23, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("Newest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(34, r.Files.Count()); } }
public void RunCommands() { TestOptions["verbose"] = "true"; var data = new byte[1024 * 1024 * 10]; File.WriteAllBytes(Path.Combine(DATAFOLDER, "a"), data); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) c.Backup(new string[] { DATAFOLDER }); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("In first backup:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b"), data); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) c.Backup(new string[] { DATAFOLDER }); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("Newest before deleting:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Console.WriteLine("Newest without db:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } File.Delete(DBFILE); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) c.Repair(); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions, null)) Assert.AreEqual(c.List().Filesets.Count(), 2); using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 1 }), null)) { var r = c.List("*"); Console.WriteLine("Oldest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(2, r.Files.Count()); } using(var c = new Library.Main.Controller("file://" + TARGETFOLDER, TestOptions.Expand(new { version = 0 }), null)) { var r = c.List("*"); Console.WriteLine("Newest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } }