public void RepairMissingIndexFiles(string noEncryption) { Dictionary <string, string> options = new Dictionary <string, string>(this.TestOptions) { ["no-encryption"] = noEncryption }; using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] { this.DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } string[] dindexFiles = Directory.EnumerateFiles(this.TARGETFOLDER, "*dindex*").ToArray(); Assert.Greater(dindexFiles.Length, 0); foreach (string f in dindexFiles) { File.Delete(f); } using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); Assert.AreEqual(0, repairResults.Warnings.Count()); } foreach (string file in dindexFiles) { Assert.IsTrue(File.Exists(Path.Combine(this.TARGETFOLDER, file))); } }
private void RunCommands(int blocksize, int basedatasize = 0, Action <Dictionary <string, string> > modifyOptions = null) { var testopts = TestOptions; testopts["blocksize"] = blocksize.ToString() + "b"; modifyOptions?.Invoke(testopts); var filenames = WriteTestFilesToFolder(DATAFOLDER, blocksize, basedatasize); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); // TODO: This sometimes results in a "No block hash found for file: C:\projects\duplicati\testdata\backup-data\a-0" warning. // Because of this, we don't check for warnings here. } // After the first backup we remove the --blocksize argument as that should be auto-set testopts.Remove("blocksize"); testopts.Remove("block-hash-algorithm"); testopts.Remove("file-hash-algorithm"); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { IListResults listResults = c.List("*"); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); //Console.WriteLine("In first backup:"); //Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } // Do a "touch" on files to trigger a re-scan, which should do nothing //foreach (var k in filenames) //if (File.Exists(Path.Combine(DATAFOLDER, "a" + k.Key))) //File.SetLastWriteTime(Path.Combine(DATAFOLDER, "a" + k.Key), DateTime.Now.AddSeconds(5)); var data = new byte[filenames.Select(x => x.Value).Max()]; new Random().NextBytes(data); foreach (var k in filenames) { File.WriteAllBytes(Path.Combine(DATAFOLDER, "b" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { var r = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); if (!Library.Utility.Utility.ParseBoolOption(testopts, "disable-filetime-check")) { if (r.OpenedFiles != filenames.Count) { throw new Exception($"Opened {r.OpenedFiles}, but should open {filenames.Count}"); } if (r.ExaminedFiles != filenames.Count * 2) { throw new Exception($"Examined {r.ExaminedFiles}, but should examine open {filenames.Count * 2}"); } } } var rn = new Random(); foreach (var k in filenames) { rn.NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "c" + k.Key), data.Take(k.Value).ToArray()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest before deleting:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest without db:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } testopts["dbpath"] = this.recreatedDatabaseFile; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); // TODO: This sometimes results in a "No block hash found for file: C:\projects\duplicati\testdata\backup-data\a-0" warning. // Because of this, we don't check for warnings here. } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IListResults listResults = c.List(); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); Assert.AreEqual(3, listResults.Filesets.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 2 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("V2 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 1) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("V1 after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 2) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); //ProgressWriteLine("Newest after delete:"); //ProgressWriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual((filenames.Count * 3) + 1, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = RESTOREFOLDER, no_local_blocks = true }), null)) { var r = c.Restore(null); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Assert.AreEqual(filenames.Count * 3, r.RestoredFiles); } TestUtils.VerifyDir(DATAFOLDER, RESTOREFOLDER, !Library.Utility.Utility.ParseBoolOption(testopts, "skip-metadata")); using (var tf = new Library.Utility.TempFolder()) { using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { restore_path = (string)tf, no_local_blocks = true }), null)) { var r = c.Restore(new string[] { Path.Combine(DATAFOLDER, "a") + "*" }); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Assert.AreEqual(filenames.Count, r.RestoredFiles); } } }
public async Task FilesetFiles() { // Choose a dblock size that is small enough so that more than one volume is needed. Dictionary <string, string> options = new Dictionary <string, string>(this.TestOptions) { ["dblock-size"] = "10mb", // This allows us to inspect the dlist files without needing the BackendManager (which is inaccessible here) to decrypt them. ["no-encryption"] = "true" }; // Run a full backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] { this.DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Run a partial backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = await this.RunPartialBackup(c).ConfigureAwait(false); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(1, backupResults.Warnings.Count()); } Dictionary <DateTime, int> GetBackupTypesFromRemoteFiles(Controller c, out List <string> filelistFiles) { Dictionary <DateTime, int> map = new Dictionary <DateTime, int>(); filelistFiles = new List <string>(); IListRemoteResults remoteFiles = c.ListRemote(); foreach (IFileEntry file in remoteFiles.Files) { IParsedVolume volume = VolumeBase.ParseFilename(file); if (volume != null && volume.FileType == RemoteVolumeType.Files) { string dlistFile = Path.Combine(this.TARGETFOLDER, volume.File.Name); filelistFiles.Add(dlistFile); VolumeBase.FilesetData filesetData = VolumeReaderBase.GetFilesetData(volume.CompressionModule, dlistFile, new Options(options)); map[volume.Time] = filesetData.IsFullBackup ? BackupType.FULL_BACKUP : BackupType.PARTIAL_BACKUP; } } return(map); } // Purge a file and verify that the fileset file exists in the new dlist files. List <string> dlistFiles; Dictionary <DateTime, int> backupTypeMap; using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IPurgeFilesResults purgeResults = c.PurgeFiles(new Library.Utility.FilterExpression($"*{this.fileSizes[0]}*")); Assert.AreEqual(0, purgeResults.Errors.Count()); Assert.AreEqual(0, purgeResults.Warnings.Count()); List <IListResultFileset> filesets = c.List().Filesets.ToList(); Assert.AreEqual(2, filesets.Count); Assert.AreEqual(BackupType.FULL_BACKUP, filesets.Single(x => x.Version == 1).IsFullBackup); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets.Single(x => x.Version == 0).IsFullBackup); backupTypeMap = GetBackupTypesFromRemoteFiles(c, out dlistFiles); } int[] backupTypes = backupTypeMap.OrderByDescending(x => x.Key).Select(x => x.Value).ToArray(); Assert.AreEqual(2, backupTypes.Length); Assert.AreEqual(BackupType.FULL_BACKUP, backupTypes[1]); Assert.AreEqual(BackupType.PARTIAL_BACKUP, backupTypes[0]); // Remove the dlist files. foreach (string dlistFile in dlistFiles) { File.Delete(dlistFile); } // Run a repair and verify that the fileset file exists in the new dlist files. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); Assert.AreEqual(0, repairResults.Warnings.Count()); List <IListResultFileset> filesets = c.List().Filesets.ToList(); Assert.AreEqual(2, filesets.Count); Assert.AreEqual(BackupType.FULL_BACKUP, filesets.Single(x => x.Version == 1).IsFullBackup); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets.Single(x => x.Version == 0).IsFullBackup); backupTypeMap = GetBackupTypesFromRemoteFiles(c, out _); } backupTypes = backupTypeMap.OrderByDescending(x => x.Key).Select(x => x.Value).ToArray(); Assert.AreEqual(2, backupTypes.Length); Assert.AreEqual(BackupType.FULL_BACKUP, backupTypes[1]); Assert.AreEqual(BackupType.PARTIAL_BACKUP, backupTypes[0]); }
public async Task StopAfterCurrentFile() { // Choose a dblock size that is small enough so that more than one volume is needed. Dictionary <string, string> options = new Dictionary <string, string>(this.TestOptions) { ["dblock-size"] = "10mb" }; // Run a complete backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] { this.DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); Assert.AreEqual(1, c.List().Filesets.Count()); Assert.AreEqual(BackupType.FULL_BACKUP, c.List().Filesets.Single(x => x.Version == 0).IsFullBackup); } // Run a partial backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = await this.RunPartialBackup(c).ConfigureAwait(false); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(1, backupResults.Warnings.Count()); // If we interrupt the backup, the most recent Fileset should be marked as partial. Assert.AreEqual(2, c.List().Filesets.Count()); Assert.AreEqual(BackupType.FULL_BACKUP, c.List().Filesets.Single(x => x.Version == 1).IsFullBackup); Assert.AreEqual(BackupType.PARTIAL_BACKUP, c.List().Filesets.Single(x => x.Version == 0).IsFullBackup); } // Restore files from the partial backup set. Dictionary <string, string> restoreOptions = new Dictionary <string, string>(options) { ["restore-path"] = this.RESTOREFOLDER }; using (Controller c = new Controller("file://" + this.TARGETFOLDER, restoreOptions, null)) { IListResults lastResults = c.List("*"); string[] partialVersionFiles = lastResults.Files.Select(x => x.Path).Where(x => !Utility.IsFolder(x, File.GetAttributes)).ToArray(); Assert.GreaterOrEqual(partialVersionFiles.Length, 1); c.Restore(partialVersionFiles); foreach (string filepath in partialVersionFiles) { string filename = Path.GetFileName(filepath); TestUtils.AssertFilesAreEqual(filepath, Path.Combine(this.RESTOREFOLDER, filename ?? String.Empty), false, filename); } } // Recreating the database should preserve the backup types. File.Delete(this.DBFILE); using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); Assert.AreEqual(0, repairResults.Warnings.Count()); Assert.AreEqual(2, c.List().Filesets.Count()); Assert.AreEqual(BackupType.FULL_BACKUP, c.List().Filesets.Single(x => x.Version == 1).IsFullBackup); Assert.AreEqual(BackupType.PARTIAL_BACKUP, c.List().Filesets.Single(x => x.Version == 0).IsFullBackup); } // Run a complete backup. Listing the Filesets should include both full and partial backups. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] { this.DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); Assert.AreEqual(3, c.List().Filesets.Count()); Assert.AreEqual(BackupType.FULL_BACKUP, c.List().Filesets.Single(x => x.Version == 2).IsFullBackup); Assert.AreEqual(BackupType.PARTIAL_BACKUP, c.List().Filesets.Single(x => x.Version == 1).IsFullBackup); Assert.AreEqual(BackupType.FULL_BACKUP, c.List().Filesets.Single(x => x.Version == 0).IsFullBackup); } // Restore files from the full backup set. restoreOptions["overwrite"] = "true"; using (Controller c = new Controller("file://" + this.TARGETFOLDER, restoreOptions, null)) { IListResults lastResults = c.List("*"); string[] fullVersionFiles = lastResults.Files.Select(x => x.Path).Where(x => !Utility.IsFolder(x, File.GetAttributes)).ToArray(); Assert.AreEqual(this.fileSizes.Length, fullVersionFiles.Length); IRestoreResults restoreResults = c.Restore(fullVersionFiles); Assert.AreEqual(0, restoreResults.Errors.Count()); Assert.AreEqual(0, restoreResults.Warnings.Count()); foreach (string filepath in fullVersionFiles) { string filename = Path.GetFileName(filepath); TestUtils.AssertFilesAreEqual(filepath, Path.Combine(this.RESTOREFOLDER, filename ?? String.Empty), false, filename); } } }
public void RepairMissingBlocklistHashes() { byte[] data = new byte[150 * 1024]; Random rng = new Random(); for (int k = 0; k < 2; k++) { rng.NextBytes(data); File.WriteAllBytes(Path.Combine(this.DATAFOLDER, $"{k}"), data); } Dictionary <string, string> options = new Dictionary <string, string>(this.TestOptions); using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] { this.DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Mimic a damaged database that needs to be repaired. const string selectStatement = @"SELECT BlocksetID, ""Index"", Hash FROM BlocklistHash ORDER BY Hash ASC"; List <int> expectedBlocksetIDs = new List <int>(); List <int> expectedIndexes = new List <int>(); List <string> expectedHashes = new List <string>(); using (IDbConnection connection = SQLiteLoader.LoadConnection(options["dbpath"])) { // Read the contents of the BlocklistHash table so that we can // compare them to the contents after the repair operation. using (IDbCommand command = connection.CreateCommand()) { using (IDataReader reader = command.ExecuteReader(selectStatement)) { while (reader.Read()) { expectedBlocksetIDs.Add(reader.GetInt32(0)); expectedIndexes.Add(reader.GetInt32(1)); expectedHashes.Add(reader.GetString(2)); } } } using (IDbCommand command = connection.CreateCommand()) { command.ExecuteNonQuery(@"DELETE FROM BlocklistHash"); using (IDataReader reader = command.ExecuteReader(selectStatement)) { Assert.IsFalse(reader.Read()); } } } using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); Assert.AreEqual(0, repairResults.Warnings.Count()); } List <int> repairedBlocksetIDs = new List <int>(); List <int> repairedIndexes = new List <int>(); List <string> repairedHashes = new List <string>(); using (IDbConnection connection = SQLiteLoader.LoadConnection(options["dbpath"])) { using (IDbCommand command = connection.CreateCommand()) { using (IDataReader reader = command.ExecuteReader(selectStatement)) { while (reader.Read()) { repairedBlocksetIDs.Add(reader.GetInt32(0)); repairedIndexes.Add(reader.GetInt32(1)); repairedHashes.Add(reader.GetString(2)); } } } } CollectionAssert.AreEqual(expectedBlocksetIDs, repairedBlocksetIDs); CollectionAssert.AreEqual(expectedIndexes, repairedIndexes); CollectionAssert.AreEqual(expectedHashes, repairedHashes); // A subsequent backup should run without errors. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] { this.DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } }
public void RunCommands() { var testopts = TestOptions; var data = new byte[1024 * 1024 * 10]; File.WriteAllBytes(Path.Combine(DATAFOLDER, "a"), data); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("In first backup:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); } new Random().NextBytes(data); File.WriteAllBytes(Path.Combine(DATAFOLDER, "b"), data); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("Newest before deleting:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0, no_local_db = true }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("Newest without db:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } File.Delete(DBFILE); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); Assert.AreEqual(0, repairResults.Warnings.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IListResults listResults = c.List(); Assert.AreEqual(0, listResults.Errors.Count()); Assert.AreEqual(0, listResults.Warnings.Count()); Assert.AreEqual(listResults.Filesets.Count(), 2); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 1 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("Oldest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(2, r.Files.Count()); } using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); Console.WriteLine("Newest after delete:"); Console.WriteLine(string.Join(Environment.NewLine, r.Files.Select(x => x.Path))); Assert.AreEqual(3, r.Files.Count()); } }