public static Snapshots.ISnapshotService GetSnapshot(string[] sources, Options options, ILogWriter log) { try { if (options.SnapShotStrategy != Options.OptimizationStrategy.Off) return Duplicati.Library.Snapshots.SnapshotUtility.CreateSnapshot(sources, options.RawOptions); } catch (Exception ex) { if (options.SnapShotStrategy == Options.OptimizationStrategy.Required) throw; else if (options.SnapShotStrategy == Options.OptimizationStrategy.On) { log.AddWarning(Strings.RSyncDir.SnapshotFailedError(ex.ToString()), ex); } } return Library.Utility.Utility.IsClientLinux ? (Library.Snapshots.ISnapshotService)new Duplicati.Library.Snapshots.NoSnapshotLinux(sources, options.RawOptions) : (Library.Snapshots.ISnapshotService)new Duplicati.Library.Snapshots.NoSnapshotWindows(sources, options.RawOptions); }
/// <summary> /// Tests the volume by examining the internal contents /// </summary> /// <param name="vol">The remote volume being examined</param> /// <param name="tf">The path to the downloaded copy of the file</param> /// <param name="sample_percent">A value between 0 and 1 that indicates how many blocks are tested in a dblock file</param> public static KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>> TestVolumeInternals(LocalTestDatabase db, IRemoteVolume vol, string tf, Options options, ILogWriter log, double sample_percent = 0.2) { var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(options.BlockHashAlgorithm); if (blockhasher == null) throw new Exception(Strings.Foresthash.InvalidHashAlgorithm(options.BlockHashAlgorithm)); if (!blockhasher.CanReuseTransform) throw new Exception(Strings.Foresthash.InvalidCryptoSystem(options.BlockHashAlgorithm)); var hashsize = blockhasher.HashSize / 8; var parsedInfo = Volumes.VolumeBase.ParseFilename(vol.Name); sample_percent = Math.Min(1, Math.Max(sample_percent, 0.01)); if (parsedInfo.FileType == RemoteVolumeType.Files) { //Compare with db and see if all files are accounted for // with correct file hashes and blocklist hashes using(var fl = db.CreateFilelist(vol.Name)) { using(var rd = new Volumes.FilesetVolumeReader(parsedInfo.CompressionModule, tf, options)) foreach(var f in rd.Files) fl.Add(f.Path, f.Size, f.Hash, f.Metasize, f.Metahash, f.BlocklistHashes, f.Type, f.Time); return new KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>>(vol.Name, fl.Compare().ToList()); } } else if (parsedInfo.FileType == RemoteVolumeType.Index) { var blocklinks = new List<Tuple<string, string, long>>(); IEnumerable<KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>> combined = new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>[0]; //Compare with db and see that all hashes and volumes are listed using(var rd = new Volumes.IndexVolumeReader(parsedInfo.CompressionModule, tf, options, hashsize)) foreach(var v in rd.Volumes) { blocklinks.Add(new Tuple<string, string, long>(v.Filename, v.Hash, v.Length)); using(var bl = db.CreateBlocklist(v.Filename)) { foreach(var h in v.Blocks) bl.AddBlock(h.Key, h.Value); combined = combined.Union(bl.Compare().ToArray()); } } using(var il = db.CreateIndexlist(vol.Name)) { foreach(var t in blocklinks) il.AddBlockLink(t.Item1, t.Item2, t.Item3); combined = combined.Union(il.Compare()).ToList(); } return new KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>>(vol.Name, combined.ToList()); } else if (parsedInfo.FileType == RemoteVolumeType.Blocks) { using(var bl = db.CreateBlocklist(vol.Name)) using(var rd = new Volumes.BlockVolumeReader(parsedInfo.CompressionModule, tf, options)) { //Verify that all blocks are in the file foreach(var b in rd.Blocks) bl.AddBlock(b.Key, b.Value); //Select random blocks and verify their hashes match the filename and size var hashsamples = new List<KeyValuePair<string, long>>(rd.Blocks); var sampleCount = Math.Min(Math.Max(0, (int)(hashsamples.Count * sample_percent)), hashsamples.Count - 1); var rnd = new Random(); while (hashsamples.Count > sampleCount) hashsamples.RemoveAt(rnd.Next(hashsamples.Count)); var blockbuffer = new byte[options.Blocksize]; var changes = new List<KeyValuePair<Library.Interface.TestEntryStatus, string>>(); foreach(var s in hashsamples) { var size = rd.ReadBlock(s.Key, blockbuffer); if (size != s.Value) changes.Add(new KeyValuePair<Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key)); else { var hash = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size)); if (hash != s.Key) changes.Add(new KeyValuePair<Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key)); } } return new KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>>(vol.Name, changes.Union(bl.Compare().ToList())); } } log.AddWarning(string.Format("Unexpected file type {0} for {1}", parsedInfo.FileType, vol.Name), null); return new KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>>(vol.Name, null); }
public Tuple<long, long> PrepareRestoreFilelist(DateTime restoretime, long[] versions, Library.Utility.IFilter filter, ILogWriter log) { var guid = Library.Utility.Utility.ByteArrayAsHexString(Guid.NewGuid().ToByteArray()); m_tempfiletable = "Fileset-" + guid; m_tempblocktable = "Blocks-" + guid; using(var cmd = m_connection.CreateCommand()) { var filesetIds = GetFilesetIDs(NormalizeDateTime(restoretime), versions).ToList(); while(filesetIds.Count > 0) { var filesetId = filesetIds[0]; filesetIds.RemoveAt(0); m_restoreTime = ParseFromEpochSeconds(cmd.ExecuteScalarInt64(@"SELECT ""Timestamp"" FROM ""Fileset"" WHERE ""ID"" = ?", 0, filesetId)); var ix = this.FilesetTimes.Select((value, index) => new { value.Key, index }) .Where(n => n.Key == filesetId) .Select(pair => pair.index + 1) .FirstOrDefault() - 1; log.AddMessage(string.Format("Searching backup {0} ({1}) ...", ix, m_restoreTime)); cmd.Parameters.Clear(); cmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", m_tempfiletable)); cmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", m_tempblocktable)); cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" (""ID"" INTEGER PRIMARY KEY, ""Path"" TEXT NOT NULL, ""BlocksetID"" INTEGER NOT NULL, ""MetadataID"" INTEGER NOT NULL, ""Targetpath"" TEXT NULL, ""DataVerified"" BOOLEAN NOT NULL) ", m_tempfiletable)); cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" (""ID"" INTEGER PRIMARY KEY, ""FileID"" INTEGER NOT NULL, ""Index"" INTEGER NOT NULL, ""Hash"" TEXT NOT NULL, ""Size"" INTEGER NOT NULL, ""Restored"" BOOLEAN NOT NULL, ""Metadata"" BOOLEAN NOT NULL)", m_tempblocktable)); cmd.ExecuteNonQuery(string.Format(@"CREATE INDEX ""{0}_Index"" ON ""{0}"" (""TargetPath"")", m_tempfiletable)); cmd.ExecuteNonQuery(string.Format(@"CREATE INDEX ""{0}_HashSizeIndex"" ON ""{0}"" (""Hash"", ""Size"")", m_tempblocktable)); // better suited to speed up commit on UpdateBlocks cmd.ExecuteNonQuery(string.Format(@"CREATE INDEX ""{0}_FileIdIndexIndex"" ON ""{0}"" (""FileId"", ""Index"")", m_tempblocktable)); if (filter == null || filter.Empty) { // Simple case, restore everything cmd.CommandText = string.Format(@"INSERT INTO ""{0}"" (""Path"", ""BlocksetID"", ""MetadataID"", ""DataVerified"") SELECT ""File"".""Path"", ""File"".""BlocksetID"", ""File"".""MetadataID"", 0 FROM ""File"", ""FilesetEntry"" WHERE ""File"".""ID"" = ""FilesetEntry"".""FileID"" AND ""FilesetEntry"".""FilesetID"" = ? ", m_tempfiletable); cmd.AddParameter(filesetId); cmd.ExecuteNonQuery(); } else if (Library.Utility.Utility.IsFSCaseSensitive && filter is Library.Utility.FilterExpression && (filter as Library.Utility.FilterExpression).Type == Duplicati.Library.Utility.FilterType.Simple) { // If we get a list of filenames, the lookup table is faster // unfortunately we cannot do this if the filesystem is case sensitive as // SQLite only supports ASCII compares using(var tr = m_connection.BeginTransaction()) { var p = (filter as Library.Utility.FilterExpression).GetSimpleList(); var m_filenamestable = "Filenames-" + guid; cmd.Transaction = tr; cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" (""Path"" TEXT NOT NULL) ", m_filenamestable)); cmd.CommandText = string.Format(@"INSERT INTO ""{0}"" (""Path"") VALUES (?)", m_filenamestable); cmd.AddParameter(); foreach(var s in p) { cmd.SetParameterValue(0, s); cmd.ExecuteNonQuery(); } cmd.CommandText = string.Format(@"INSERT INTO ""{0}"" (""Path"", ""BlocksetID"", ""MetadataID"", ""DataVerified"") SELECT ""File"".""Path"", ""File"".""BlocksetID"", ""File"".""MetadataID"", 0 FROM ""File"", ""FilesetEntry"" WHERE ""File"".""ID"" = ""FilesetEntry"".""FileID"" AND ""FilesetEntry"".""FilesetID"" = ? AND ""Path"" IN (SELECT DISTINCT ""Path"" FROM ""{1}"") ", m_tempfiletable, m_filenamestable); cmd.SetParameterValue(0, filesetId); var c = cmd.ExecuteNonQuery(); cmd.Parameters.Clear(); if (c != p.Length && c != 0) { var sb = new StringBuilder(); sb.AppendLine(); using(var rd = cmd.ExecuteReader(string.Format(@"SELECT ""Path"" FROM ""{0}"" WHERE ""Path"" NOT IN (SELECT ""Path"" FROM ""{1}"")", m_filenamestable, m_tempfiletable))) while (rd.Read()) sb.AppendLine(rd.GetValue(0).ToString()); var actualrestoretime = ParseFromEpochSeconds(cmd.ExecuteScalarInt64(@"SELECT ""Timestamp"" FROM ""Fileset"" WHERE ""ID"" = ?", 0, filesetId)); log.AddWarning(string.Format("{0} File(s) were not found in list of files for backup at {1}, will not be restored: {2}", p.Length - c, actualrestoretime.ToLocalTime(), sb), null); cmd.Parameters.Clear(); } cmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", m_filenamestable)); using(new Logging.Timer("CommitPrepareFileset")) tr.Commit(); } } else { // Restore but filter elements based on the filter expression // If this is too slow, we could add a special handler for wildcard searches too cmd.CommandText = string.Format(@"SELECT ""File"".""Path"", ""File"".""BlocksetID"", ""File"".""MetadataID"" FROM ""File"", ""FilesetEntry"" WHERE ""File"".""ID"" = ""FilesetEntry"".""FileID"" AND ""FilesetID"" = ?"); cmd.AddParameter(filesetId); object[] values = new object[3]; using(var cmd2 = m_connection.CreateCommand()) { cmd2.CommandText = string.Format(@"INSERT INTO ""{0}"" (""Path"", ""BlocksetID"", ""MetadataID"", ""DataVerified"") VALUES (?,?,?,0)", m_tempfiletable); cmd2.AddParameter(); cmd2.AddParameter(); cmd2.AddParameter(); using(var rd = cmd.ExecuteReader()) while (rd.Read()) { rd.GetValues(values); if (values[0] != null && values[0] != DBNull.Value && Library.Utility.FilterExpression.Matches(filter, values[0].ToString())) { cmd2.SetParameterValue(0, values[0]); cmd2.SetParameterValue(1, values[1]); cmd2.SetParameterValue(2, values[2]); cmd2.ExecuteNonQuery(); } } } } using(var rd = cmd.ExecuteReader(string.Format(@"SELECT COUNT(DISTINCT ""{0}"".""Path""), SUM(""Blockset"".""Length"") FROM ""{0}"", ""Blockset"" WHERE ""{0}"".""BlocksetID"" = ""Blockset"".""ID"" ", m_tempfiletable))) { var filecount = 0L; var filesize = 0L; if (rd.Read()) { filecount = rd.ConvertValueToInt64(0, 0); filesize = rd.ConvertValueToInt64(1, 0); } if (filecount > 0) { log.AddVerboseMessage("Needs to restore {0} files ({1})", filecount, Library.Utility.Utility.FormatSizeString(filesize)); return new Tuple<long, long>(filecount, filesize); } } } } return new Tuple<long, long>(0, 0); }
public void Commit(ILogWriter log) { m_insertblockCommand.Parameters.Clear(); var rc = m_insertblockCommand.ExecuteNonQuery(string.Format(@"UPDATE ""{0}"" SET ""Restored"" = 1 WHERE ""ID"" IN (SELECT ""{0}"".""ID"" FROM ""{0}"", ""{1}"" WHERE ""{0}"".""FileID"" = ""{1}"".""FileID"" AND ""{0}"".""Index"" = ""{1}"".""Index"" AND ""{0}"".""Hash"" = ""{1}"".""Hash"" AND ""{0}"".""Size"" = ""{1}"".""Size"" )", m_blocktablename, m_updateTable)); var nc = Convert.ToInt64(m_insertblockCommand.ExecuteScalar(string.Format(@"SELECT COUNT(*) FROM ""{0}"" ", m_updateTable))); if (rc != nc) log.AddWarning(string.Format("Inconsistency while marking blocks as updated. Updated blocks: {0}, Registered blocks: {1}", rc, nc), null); m_insertblockCommand.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", m_updateTable)); m_updateTable = null; var tr = m_insertblockCommand.Transaction; m_insertblockCommand.Dispose(); m_insertblockCommand = null; using(new Logging.Timer("CommitBlockMarker")) tr.Commit(); tr.Dispose(); }
/// <summary> /// This function will examine all options passed on the commandline, and test for unsupported or deprecated values. /// Any errors will be logged into the statistics module. /// </summary> /// <param name="options">The commandline options given</param> /// <param name="backend">The backend url</param> /// <param name="stats">The statistics into which warnings are written</param> private void ValidateOptions(ILogWriter log) { //No point in going through with this if we can't report if (log == null) { return; } //Keep a list of all supplied options Dictionary <string, string> ropts = new Dictionary <string, string>(m_options.RawOptions); //Keep a list of all supported options Dictionary <string, Library.Interface.ICommandLineArgument> supportedOptions = new Dictionary <string, Library.Interface.ICommandLineArgument>(); //There are a few internal options that are not accessible from outside, and thus not listed foreach (string s in Options.InternalOptions) { supportedOptions[s] = null; } //Figure out what module options are supported in the current setup List <Library.Interface.ICommandLineArgument> moduleOptions = new List <Duplicati.Library.Interface.ICommandLineArgument>(); Dictionary <string, string> disabledModuleOptions = new Dictionary <string, string>(); foreach (KeyValuePair <bool, Library.Interface.IGenericModule> m in m_options.LoadedModules) { if (m.Value.SupportedCommands != null) { if (m.Key) { moduleOptions.AddRange(m.Value.SupportedCommands); } else { foreach (Library.Interface.ICommandLineArgument c in m.Value.SupportedCommands) { disabledModuleOptions[c.Name] = m.Value.DisplayName + " (" + m.Value.Key + ")"; if (c.Aliases != null) { foreach (string s in c.Aliases) { disabledModuleOptions[s] = disabledModuleOptions[c.Name]; } } } } } } // Throw url-encoded options into the mix //TODO: This can hide values if both commandline and url-parameters supply the same key var ext = new Library.Utility.Uri(m_backend).QueryParameters; foreach (var k in ext.AllKeys) { ropts[k] = ext[k]; } //Now run through all supported options, and look for deprecated options foreach (IList <Library.Interface.ICommandLineArgument> l in new IList <Library.Interface.ICommandLineArgument>[] { m_options.SupportedCommands, DynamicLoader.BackendLoader.GetSupportedCommands(m_backend), m_options.NoEncryption ? null : DynamicLoader.EncryptionLoader.GetSupportedCommands(m_options.EncryptionModule), moduleOptions, DynamicLoader.CompressionLoader.GetSupportedCommands(m_options.CompressionModule) }) { if (l != null) { foreach (Library.Interface.ICommandLineArgument a in l) { if (supportedOptions.ContainsKey(a.Name) && Array.IndexOf(Options.KnownDuplicates, a.Name.ToLower()) < 0) { log.AddWarning(string.Format(Strings.Controller.DuplicateOptionNameWarning, a.Name), null); } supportedOptions[a.Name] = a; if (a.Aliases != null) { foreach (string s in a.Aliases) { if (supportedOptions.ContainsKey(s) && Array.IndexOf(Options.KnownDuplicates, s.ToLower()) < 0) { log.AddWarning(string.Format(Strings.Controller.DuplicateOptionNameWarning, s), null); } supportedOptions[s] = a; } } if (a.Deprecated) { List <string> aliases = new List <string>(); aliases.Add(a.Name); if (a.Aliases != null) { aliases.AddRange(a.Aliases); } foreach (string s in aliases) { if (ropts.ContainsKey(s)) { string optname = a.Name; if (a.Name != s) { optname += " (" + s + ")"; } log.AddWarning(string.Format(Strings.Controller.DeprecatedOptionUsedWarning, optname, a.DeprecationMessage), null); } } } } } } //Now look for options that were supplied but not supported foreach (string s in ropts.Keys) { if (!supportedOptions.ContainsKey(s)) { if (disabledModuleOptions.ContainsKey(s)) { log.AddWarning(string.Format(Strings.Controller.UnsupportedOptionDisabledModuleWarning, s, disabledModuleOptions[s]), null); } else { log.AddWarning(string.Format(Strings.Controller.UnsupportedOptionWarning, s), null); } } } //Look at the value supplied for each argument and see if is valid according to its type foreach (string s in ropts.Keys) { Library.Interface.ICommandLineArgument arg; if (supportedOptions.TryGetValue(s, out arg) && arg != null) { string validationMessage = ValidateOptionValue(arg, s, ropts[s]); if (validationMessage != null) { log.AddWarning(validationMessage, null); } } } //TODO: Based on the action, see if all options are relevant }
public Tuple <long, long> PrepareRestoreFilelist(DateTime restoretime, long[] versions, Library.Utility.IFilter filter, ILogWriter log) { var guid = Library.Utility.Utility.ByteArrayAsHexString(Guid.NewGuid().ToByteArray()); m_tempfiletable = "Fileset-" + guid; m_tempblocktable = "Blocks-" + guid; using (var cmd = m_connection.CreateCommand()) { var filesetIds = GetFilesetIDs(NormalizeDateTime(restoretime), versions).ToList(); while (filesetIds.Count > 0) { var filesetId = filesetIds[0]; filesetIds.RemoveAt(0); m_restoreTime = ParseFromEpochSeconds(Convert.ToInt64(cmd.ExecuteScalar(@"SELECT ""Timestamp"" FROM ""Fileset"" WHERE ""ID"" = ?", filesetId))); var ix = this.FilesetTimes.Select((value, index) => new { value.Key, index }) .Where(n => n.Key == filesetId) .Select(pair => pair.index + 1) .FirstOrDefault() - 1; log.AddMessage(string.Format("Searching backup {0} ({1}) ...", ix, m_restoreTime)); cmd.Parameters.Clear(); cmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", m_tempfiletable)); cmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", m_tempblocktable)); cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" (""ID"" INTEGER PRIMARY KEY, ""Path"" TEXT NOT NULL, ""BlocksetID"" INTEGER NOT NULL, ""MetadataID"" INTEGER NOT NULL, ""Targetpath"" TEXT NULL ) ", m_tempfiletable)); cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" (""ID"" INTEGER PRIMARY KEY, ""FileID"" INTEGER NOT NULL, ""Index"" INTEGER NOT NULL, ""Hash"" TEXT NOT NULL, ""Size"" INTEGER NOT NULL, ""Restored"" BOOLEAN NOT NULL)", m_tempblocktable)); if (filter == null || filter.Empty) { // Simple case, restore everything cmd.CommandText = string.Format(@"INSERT INTO ""{0}"" (""Path"", ""BlocksetID"", ""MetadataID"") SELECT ""File"".""Path"", ""File"".""BlocksetID"", ""File"".""MetadataID"" FROM ""File"", ""FilesetEntry"" WHERE ""File"".""ID"" = ""FilesetEntry"".""FileID"" AND ""FilesetEntry"".""FilesetID"" = ? ", m_tempfiletable); cmd.AddParameter(filesetId); cmd.ExecuteNonQuery(); } else if (filter is Library.Utility.FilterExpression && (filter as Library.Utility.FilterExpression).Type == Duplicati.Library.Utility.FilterType.Simple) { // If we get a list of filenames, the lookup table is faster using (var tr = m_connection.BeginTransaction()) { var p = (filter as Library.Utility.FilterExpression).GetSimpleList(); var m_filenamestable = "Filenames-" + guid; cmd.Transaction = tr; cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" (""Path"" TEXT NOT NULL) ", m_filenamestable)); cmd.CommandText = string.Format(@"INSERT INTO ""{0}"" (""Path"") VALUES (?)", m_filenamestable); cmd.AddParameter(); foreach (var s in p) { cmd.SetParameterValue(0, s); cmd.ExecuteNonQuery(); } //TODO: Handle case-insensitive filename lookup cmd.CommandText = string.Format(@"INSERT INTO ""{0}"" (""Path"", ""BlocksetID"", ""MetadataID"") SELECT ""File"".""Path"", ""File"".""BlocksetID"", ""File"".""MetadataID"" FROM ""File"", ""FilesetEntry"" WHERE ""File"".""ID"" = ""FilesetEntry"".""FileID"" AND ""FilesetEntry"".""FilesetID"" = ? AND ""Path"" IN (SELECT DISTINCT ""Path"" FROM ""{1}"") ", m_tempfiletable, m_filenamestable); cmd.SetParameterValue(0, filesetId); var c = cmd.ExecuteNonQuery(); cmd.Parameters.Clear(); if (c != p.Length && c != 0) { var sb = new StringBuilder(); sb.AppendLine(); using (var rd = cmd.ExecuteReader(string.Format(@"SELECT ""Path"" FROM ""{0}"" WHERE ""Path"" NOT IN (SELECT ""Path"" FROM ""{1}"")", m_filenamestable, m_tempfiletable))) while (rd.Read()) { sb.AppendLine(rd.GetValue(0).ToString()); } var actualrestoretime = ParseFromEpochSeconds(Convert.ToInt64(cmd.ExecuteScalar(@"SELECT ""Timestamp"" FROM ""Fileset"" WHERE ""ID"" = ?", filesetId))); log.AddWarning(string.Format("{0} File(s) were not found in list of files for backup at {1}, will not be restored: {2}", p.Length - c, actualrestoretime.ToLocalTime(), sb), null); cmd.Parameters.Clear(); } cmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", m_filenamestable)); using (new Logging.Timer("CommitPrepareFileset")) tr.Commit(); } } else { // Restore but filter elements based on the filter expression // If this is too slow, we could add a special handler for wildcard searches too cmd.CommandText = string.Format(@"SELECT ""File"".""Path"", ""File"".""BlocksetID"", ""File"".""MetadataID"" FROM ""File"", ""FilesetEntry"" WHERE ""File"".""ID"" = ""FilesetEntry"".""FileID"" AND ""FilesetID"" = ?"); cmd.AddParameter(filesetId); object[] values = new object[3]; using (var cmd2 = m_connection.CreateCommand()) { cmd2.CommandText = string.Format(@"INSERT INTO ""{0}"" (""Path"", ""BlocksetID"", ""MetadataID"") VALUES (?,?,?)", m_tempfiletable); cmd2.AddParameter(); cmd2.AddParameter(); cmd2.AddParameter(); using (var rd = cmd.ExecuteReader()) while (rd.Read()) { rd.GetValues(values); if (values[0] != null && values[0] != DBNull.Value && Library.Utility.FilterExpression.Matches(filter, values[0].ToString())) { cmd2.SetParameterValue(0, values[0]); cmd2.SetParameterValue(1, values[1]); cmd2.SetParameterValue(2, values[2]); cmd2.ExecuteNonQuery(); } } } } using (var rd = cmd.ExecuteReader(string.Format(@"SELECT COUNT(DISTINCT ""{0}"".""Path""), SUM(""Blockset"".""Length"") FROM ""{0}"", ""Blockset"" WHERE ""{0}"".""BlocksetID"" = ""Blockset"".""ID"" ", m_tempfiletable))) { var filecount = 0L; var filesize = 0L; if (rd.Read()) { var r0 = rd.GetValue(0); var r1 = rd.GetValue(1); if (r0 != null && r0 != DBNull.Value) { filecount = Convert.ToInt64(r0); } if (r1 != null && r1 != DBNull.Value) { filesize = Convert.ToInt64(r1); } } if (filecount > 0) { log.AddVerboseMessage("Needs to restore {0} files ({1})", filecount, Library.Utility.Utility.FormatSizeString(filesize)); return(new Tuple <long, long>(filecount, filesize)); } } } } return(new Tuple <long, long>(0, 0)); }
/// <summary> /// Tests the volume by examining the internal contents /// </summary> /// <param name="vol">The remote volume being examined</param> /// <param name="tf">The path to the downloaded copy of the file</param> /// <param name="sample_percent">A value between 0 and 1 that indicates how many blocks are tested in a dblock file</param> public static KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > > TestVolumeInternals(LocalTestDatabase db, IRemoteVolume vol, string tf, Options options, ILogWriter log, double sample_percent = 0.2) { var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(options.BlockHashAlgorithm); if (blockhasher == null) { throw new Exception(Strings.Foresthash.InvalidHashAlgorithm(options.BlockHashAlgorithm)); } if (!blockhasher.CanReuseTransform) { throw new Exception(Strings.Foresthash.InvalidCryptoSystem(options.BlockHashAlgorithm)); } var hashsize = blockhasher.HashSize / 8; var parsedInfo = Volumes.VolumeBase.ParseFilename(vol.Name); sample_percent = Math.Min(1, Math.Max(sample_percent, 0.01)); if (parsedInfo.FileType == RemoteVolumeType.Files) { //Compare with db and see if all files are accounted for // with correct file hashes and blocklist hashes using (var fl = db.CreateFilelist(vol.Name)) { using (var rd = new Volumes.FilesetVolumeReader(parsedInfo.CompressionModule, tf, options)) foreach (var f in rd.Files) { fl.Add(f.Path, f.Size, f.Hash, f.Metasize, f.Metahash, f.BlocklistHashes, f.Type, f.Time); } return(new KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > >(vol.Name, fl.Compare().ToList())); } } else if (parsedInfo.FileType == RemoteVolumeType.Index) { var blocklinks = new List <Tuple <string, string, long> >(); IEnumerable <KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> > combined = new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0]; //Compare with db and see that all hashes and volumes are listed using (var rd = new Volumes.IndexVolumeReader(parsedInfo.CompressionModule, tf, options, hashsize)) foreach (var v in rd.Volumes) { blocklinks.Add(new Tuple <string, string, long>(v.Filename, v.Hash, v.Length)); using (var bl = db.CreateBlocklist(v.Filename)) { foreach (var h in v.Blocks) { bl.AddBlock(h.Key, h.Value); } combined = combined.Union(bl.Compare().ToArray()); } } using (var il = db.CreateIndexlist(vol.Name)) { foreach (var t in blocklinks) { il.AddBlockLink(t.Item1, t.Item2, t.Item3); } combined = combined.Union(il.Compare()).ToList(); } return(new KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > >(vol.Name, combined.ToList())); } else if (parsedInfo.FileType == RemoteVolumeType.Blocks) { using (var bl = db.CreateBlocklist(vol.Name)) using (var rd = new Volumes.BlockVolumeReader(parsedInfo.CompressionModule, tf, options)) { //Verify that all blocks are in the file foreach (var b in rd.Blocks) { bl.AddBlock(b.Key, b.Value); } //Select random blocks and verify their hashes match the filename and size var hashsamples = new List <KeyValuePair <string, long> >(rd.Blocks); var sampleCount = Math.Min(Math.Max(0, (int)(hashsamples.Count * sample_percent)), hashsamples.Count - 1); var rnd = new Random(); while (hashsamples.Count > sampleCount) { hashsamples.RemoveAt(rnd.Next(hashsamples.Count)); } var blockbuffer = new byte[options.Blocksize]; var changes = new List <KeyValuePair <Library.Interface.TestEntryStatus, string> >(); foreach (var s in hashsamples) { var size = rd.ReadBlock(s.Key, blockbuffer); if (size != s.Value) { changes.Add(new KeyValuePair <Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key)); } else { var hash = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size)); if (hash != s.Key) { changes.Add(new KeyValuePair <Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key)); } } } return(new KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > >(vol.Name, changes.Union(bl.Compare().ToList()))); } } log.AddWarning(string.Format("Unexpected file type {0} for {1}", parsedInfo.FileType, vol.Name), null); return(new KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > >(vol.Name, null)); }