public void Run() { var ext = System.IO.Path.GetExtension(m_targetpath); var module = m_options.CompressionModule; if (ext != module) m_targetpath = m_targetpath + "." + module; if (System.IO.File.Exists(m_targetpath)) throw new Exception(string.Format("Output file already exists, not overwriting: {0}", m_targetpath)); if (!System.IO.File.Exists(m_options.Dbpath)) throw new Exception(string.Format("Database file does not exist: {0}", m_options.Dbpath)); m_result.AddMessage("Scrubbing filenames from database, this may take a while, please wait"); using(var tmp = new Library.Utility.TempFile()) { System.IO.File.Copy(m_options.Dbpath, tmp, true); using(var db = new LocalBugReportDatabase(tmp)) { m_result.SetDatabase(db); db.Fix(); } using(var cm = DynamicLoader.CompressionLoader.GetModule(module, m_targetpath, m_options.RawOptions)) { using(var cs = cm.CreateFile("log-database.sqlite", Duplicati.Library.Interface.CompressionHint.Compressible, DateTime.UtcNow)) using(var fs = System.IO.File.Open(tmp, System.IO.FileMode.Open, System.IO.FileAccess.Read, System.IO.FileShare.ReadWrite)) Library.Utility.Utility.CopyStream(fs, cs); using(var cs = new System.IO.StreamWriter(cm.CreateFile("system-info.txt", Duplicati.Library.Interface.CompressionHint.Compressible, DateTime.UtcNow))) { cs.WriteLine("Duplicati: {0} ({1})", System.Reflection.Assembly.GetEntryAssembly().FullName, System.Reflection.Assembly.GetExecutingAssembly().FullName); cs.WriteLine("OS: {0}", Environment.OSVersion); cs.WriteLine("Uname: {0}", Duplicati.Library.Utility.Utility.UnameAll); cs.WriteLine("64bit: {0} ({1})", Environment.Is64BitOperatingSystem, Environment.Is64BitProcess); cs.WriteLine("Machinename: {0}", Environment.MachineName); cs.WriteLine("Processors: {0}", Environment.ProcessorCount); cs.WriteLine(".Net Version: {0}", Environment.Version); cs.WriteLine("Mono: {0} ({1}) ({2})", Duplicati.Library.Utility.Utility.IsMono, Duplicati.Library.Utility.Utility.MonoVersion, Duplicati.Library.Utility.Utility.MonoDisplayVersion); Type sqlite = null; string sqliteversion = ""; try { sqlite = Duplicati.Library.SQLiteHelper.SQLiteLoader.SQLiteConnectionType; } catch { } if (sqlite != null) { try { sqliteversion = (string)sqlite.GetProperty("SQLiteVersion").GetValue(null, null); } catch { } cs.WriteLine("SQLite: {0} - {1}", sqliteversion, sqlite.FullName); } } } } }
public void Run(IEnumerable<string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(System.IO.File.Exists(m_options.Dbpath) ? m_options.Dbpath : (string)tmpdb, "ListControlFiles")) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(filterstrings), compositefilter); try { var filteredList = ListFilesHandler.ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) throw new Exception("No filesets found on remote target"); Exception lastEx = new Exception("No suitable files found on remote target"); foreach(var fileversion in filteredList) try { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return; var file = fileversion.Value.File; long size; string hash; RemoteVolumeType type; RemoteVolumeState state; if (!db.GetRemoteVolume(file.Name, out hash, out size, out type, out state)) size = file.Size; var files = new List<Library.Interface.IListResultFile>(); using (var tmpfile = backend.Get(file.Name, size, hash)) using (var tmp = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(file.Name), tmpfile, m_options)) foreach (var cf in tmp.ControlFiles) if (Library.Utility.FilterExpression.Matches(filter, cf.Key)) files.Add(new ListResultFile(cf.Key, null)); m_result.SetResult(new Library.Interface.IListResultFileset[] { new ListResultFileset(fileversion.Key, fileversion.Value.Time, -1, -1) }, files); lastEx = null; break; } catch(Exception ex) { lastEx = ex; if (ex is System.Threading.ThreadAbortException) throw; } if (lastEx != null) throw lastEx; } finally { backend.WaitForComplete(db, null); } } } }
public virtual void Dispose() { if (m_compression != null) try { m_compression.Dispose(); } finally { m_compression = null; } if (m_localfile != null) try { m_localfile.Dispose(); } finally { m_localfile = null; } m_volumename = null; }
public VolumeWriterBase(Options options, DateTime timestamp) : base(options) { m_localfile = new Library.Utility.TempFile(); m_volumename = GenerateFilename(this.FileType, options.Prefix, GenerateGuid(options), timestamp, options.CompressionModule, options.NoEncryption ? null : options.EncryptionModule); m_compression = DynamicLoader.CompressionLoader.GetModule(options.CompressionModule, m_localfile, options.RawOptions); if(m_compression == null) throw new Exception(string.Format("Unsupported compression module: {0}", options.CompressionModule)); if ((this is IndexVolumeWriter || this is FilesetVolumeWriter) && m_compression is Library.Interface.ICompressionHinting) ((Library.Interface.ICompressionHinting)m_compression).LowOverheadMode = true; AddManifestfile(); }
public void Run() { var ext = System.IO.Path.GetExtension(m_targetpath); var module = m_options.CompressionModule; if (ext != module) m_targetpath = m_targetpath + "." + module; if (System.IO.File.Exists(m_targetpath)) throw new Exception(string.Format("Output file already exists, not overwriting: {0}", m_targetpath)); if (!System.IO.File.Exists(m_options.Dbpath)) throw new Exception(string.Format("Database file does not exist: {0}", m_options.Dbpath)); m_result.OperationProgressUpdater.UpdatePhase(OperationPhase.BugReport_Running); m_result.OperationProgressUpdater.UpdateProgress(0); m_result.AddMessage("Scrubbing filenames from database, this may take a while, please wait"); using(var tmp = new Library.Utility.TempFile()) { System.IO.File.Copy(m_options.Dbpath, tmp, true); using(var db = new LocalBugReportDatabase(tmp)) { m_result.SetDatabase(db); db.Fix(); } using(var cm = DynamicLoader.CompressionLoader.GetModule(module, m_targetpath, m_options.RawOptions)) { using(var cs = cm.CreateFile("log-database.sqlite", Duplicati.Library.Interface.CompressionHint.Compressible, DateTime.UtcNow)) using(var fs = System.IO.File.Open(tmp, System.IO.FileMode.Open, System.IO.FileAccess.Read, System.IO.FileShare.ReadWrite)) Library.Utility.Utility.CopyStream(fs, cs); using(var cs = new System.IO.StreamWriter(cm.CreateFile("system-info.txt", Duplicati.Library.Interface.CompressionHint.Compressible, DateTime.UtcNow))) foreach(var line in SystemInfoHandler.GetSystemInfo()) cs.WriteLine(line); } m_result.TargetPath = m_targetpath; } }
private void DoGet(FileEntryItem item) { Library.Utility.TempFile tmpfile = null; m_statwriter.SendEvent(BackendActionType.Get, BackendEventType.Started, item.RemoteFilename, item.Size); try { var begin = DateTime.Now; tmpfile = new Library.Utility.TempFile(); if (m_backend is Library.Interface.IStreamingBackend && !m_options.DisableStreamingTransfers) { using (var fs = System.IO.File.OpenWrite(tmpfile)) using (var ts = new ThrottledStream(fs, m_options.MaxUploadPrSecond, m_options.MaxDownloadPrSecond)) using (var pgs = new Library.Utility.ProgressReportingStream(ts, item.Size, HandleProgress)) ((Library.Interface.IStreamingBackend)m_backend).Get(item.RemoteFilename, pgs); } else m_backend.Get(item.RemoteFilename, tmpfile); var duration = DateTime.Now - begin; Logging.Log.WriteMessage(string.Format("Downloaded {0} in {1}, {2}/s", Library.Utility.Utility.FormatSizeString(item.Size), duration, Library.Utility.Utility.FormatSizeString((long)(item.Size / duration.TotalSeconds))), Duplicati.Library.Logging.LogMessageType.Profiling); m_db.LogDbOperation("get", item.RemoteFilename, JsonConvert.SerializeObject(new { Size = new System.IO.FileInfo(tmpfile).Length, Hash = CalculateFileHash(tmpfile) })); m_statwriter.SendEvent(BackendActionType.Get, BackendEventType.Completed, item.RemoteFilename, new System.IO.FileInfo(tmpfile).Length); if (!m_options.SkipFileHashChecks) { var nl = new System.IO.FileInfo(tmpfile).Length; if (item.Size >= 0) { if (nl != item.Size) throw new Exception(Strings.Controller.DownloadedFileSizeError(item.RemoteFilename, nl, item.Size)); } else item.Size = nl; var nh = CalculateFileHash(tmpfile); if (!string.IsNullOrEmpty(item.Hash)) { if (nh != item.Hash) throw new HashMismathcException(Strings.Controller.HashMismatchError(tmpfile, item.Hash, nh)); } else item.Hash = nh; } if (!item.VerifyHashOnly) { // Decrypt before returning if (!m_options.NoEncryption) { try { using(var tmpfile2 = tmpfile) { tmpfile = new Library.Utility.TempFile(); lock(m_encryptionLock) { // Auto-guess the encryption module var ext = (System.IO.Path.GetExtension(item.RemoteFilename) ?? "").TrimStart('.'); if (!m_encryption.FilenameExtension.Equals(ext, StringComparison.InvariantCultureIgnoreCase)) { // Check if the file is encrypted with something else if (DynamicLoader.EncryptionLoader.Keys.Contains(ext, StringComparer.InvariantCultureIgnoreCase)) { m_statwriter.AddVerboseMessage("Filename extension \"{0}\" does not match encryption module \"{1}\", using matching encryption module", ext, m_options.EncryptionModule); using(var encmodule = DynamicLoader.EncryptionLoader.GetModule(ext, m_options.Passphrase, m_options.RawOptions)) (encmodule ?? m_encryption).Decrypt(tmpfile2, tmpfile); } // Check if the file is not encrypted else if (DynamicLoader.CompressionLoader.Keys.Contains(ext, StringComparer.InvariantCultureIgnoreCase)) { m_statwriter.AddVerboseMessage("Filename extension \"{0}\" does not match encryption module \"{1}\", guessing that it is not encrypted", ext, m_options.EncryptionModule); } // Fallback, lets see what happens... else { m_statwriter.AddVerboseMessage("Filename extension \"{0}\" does not match encryption module \"{1}\", attempting to use specified encryption module as no others match", ext, m_options.EncryptionModule); m_encryption.Decrypt(tmpfile2, tmpfile); } } else { m_encryption.Decrypt(tmpfile2, tmpfile); } } } } catch (Exception ex) { //If we fail here, make sure that we throw a crypto exception if (ex is System.Security.Cryptography.CryptographicException) throw; else throw new System.Security.Cryptography.CryptographicException(ex.Message, ex); } } item.Result = tmpfile; tmpfile = null; } } catch { if (tmpfile != null) tmpfile.Dispose(); throw; } }
private TempFile coreDoGetSequential(FileEntryItem item, Interface.IEncryption useDecrypter, out long retDownloadSize, out string retHashcode) { retHashcode = null; retDownloadSize = -1; TempFile retTarget, dlTarget = null, decryptTarget = null; try { dlTarget = new Library.Utility.TempFile(); if (m_backend is Library.Interface.IStreamingBackend && !m_options.DisableStreamingTransfers) { Func<string> getFileHash; // extended to use stacked streams using (var fs = System.IO.File.OpenWrite(dlTarget)) using (var hs = GetFileHasherStream(fs, System.Security.Cryptography.CryptoStreamMode.Write, out getFileHash)) using (var ss = new ShaderStream(hs, true)) { using (var ts = new ThrottledStream(ss, m_options.MaxUploadPrSecond, m_options.MaxDownloadPrSecond)) using (var pgs = new Library.Utility.ProgressReportingStream(ts, item.Size, HandleProgress)) { ((Library.Interface.IStreamingBackend)m_backend).Get(item.RemoteFilename, pgs); } ss.Flush(); retDownloadSize = ss.TotalBytesWritten; retHashcode = getFileHash(); } } else { m_backend.Get(item.RemoteFilename, dlTarget); retDownloadSize = new System.IO.FileInfo(dlTarget).Length; retHashcode = CalculateFileHash(dlTarget); } // Decryption is not placed in the stream stack because there seemed to be an effort // to throw a CryptographicException on fail. If in main stack, we cannot differentiate // in which part of the stack the source of an exception resides. if (useDecrypter != null) { decryptTarget = new Library.Utility.TempFile(); lock (m_encryptionLock) { try { useDecrypter.Decrypt(dlTarget, decryptTarget); } // If we fail here, make sure that we throw a crypto exception catch (System.Security.Cryptography.CryptographicException) { throw; } catch (Exception ex) { throw new System.Security.Cryptography.CryptographicException(ex.Message, ex); } } retTarget = decryptTarget; decryptTarget = null; } else { retTarget = dlTarget; dlTarget = null; } } finally { if (dlTarget != null) dlTarget.Dispose(); if (decryptTarget != null) decryptTarget.Dispose(); } return retTarget; }
public static void SortFile(string filein, string fileout) { try { // If the file can fit into memory, this is MUCH faster using (var tfout = new Library.Utility.TempFile()) { var data = File.ReadAllLines(filein); Array.Sort(data, StringComparer.Ordinal); File.WriteAllLines(tfout, data); File.Copy(tfout, fileout, true); return; } } catch { } using (var tfin = new Library.Utility.TempFile()) using (var tfout = new Library.Utility.TempFile()) { long swaps; File.Copy(filein, tfin, true); do { swaps = 0L; using (var sw = new System.IO.StreamWriter(tfout)) using (var sr = new System.IO.StreamReader(tfin)) { var c1 = sr.ReadLine(); var c2 = sr.ReadLine(); while (c1 != null || c2 != null) { if (c1 != null && c1.StartsWith("a")) { Console.Write(""); } var cmp = StringComparer.Ordinal.Compare(c1, c2); if (c2 == null || (c1 != null && cmp < 0)) { sw.WriteLine(c1); c1 = c2; c2 = sr.ReadLine(); } else { if (cmp != 0) { sw.WriteLine(c2); } c2 = sr.ReadLine(); swaps++; } } } File.Copy(tfout, tfin, true); } while(swaps > 0); File.Copy(tfout, fileout, true); } }
public void Run(string[] paths, Library.Utility.IFilter filter = null) { m_result.OperationProgressUpdater.UpdatePhase(OperationPhase.Restore_Begin); // If we have both target paths and a filter, combine into a single filter filter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(paths), filter); if (!m_options.NoLocalDb && m_systemIO.FileExists(m_options.Dbpath)) { using(var db = new LocalRestoreDatabase(m_options.Dbpath, m_options.Blocksize)) { db.SetResult(m_result); DoRun(db, filter, m_result); db.WriteResults(); } return; } m_result.AddMessage("No local database, building a temporary database"); m_result.OperationProgressUpdater.UpdatePhase(OperationPhase.Restore_RecreateDatabase); using(var tmpdb = new Library.Utility.TempFile()) { RecreateDatabaseHandler.NumberedFilterFilelistDelegate filelistfilter = FilterNumberedFilelist(m_options.Time, m_options.Version); // Simultaneously with downloading blocklists, we patch as much as we can from the blockvolumes // This prevents repeated downloads, except for cases where the blocklists refer blocks // that have been previously handled. A local blockvolume cache can reduce this issue using(var database = new LocalRestoreDatabase(tmpdb, m_options.Blocksize)) { var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(m_options.BlockHashAlgorithm); var filehasher = System.Security.Cryptography.HashAlgorithm.Create(m_options.FileHashAlgorithm); if (blockhasher == null) throw new Exception(string.Format(Strings.Foresthash.InvalidHashAlgorithm, m_options.BlockHashAlgorithm)); if (!blockhasher.CanReuseTransform) throw new Exception(string.Format(Strings.Foresthash.InvalidCryptoSystem, m_options.BlockHashAlgorithm)); if (filehasher == null) throw new Exception(string.Format(Strings.Foresthash.InvalidHashAlgorithm, m_options.FileHashAlgorithm)); if (!filehasher.CanReuseTransform) throw new Exception(string.Format(Strings.Foresthash.InvalidCryptoSystem, m_options.FileHashAlgorithm)); bool first = true; RecreateDatabaseHandler.BlockVolumePostProcessor localpatcher = (key, rd) => { if (first) { //Figure out what files are to be patched, and what blocks are needed PrepareBlockAndFileList(database, m_options, filter, m_result); // Don't run this again first = false; } else { // Patch the missing blocks list to include the newly discovered blocklists //UpdateMissingBlocksTable(key); } if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return; CreateDirectoryStructure(database, m_options, m_result); //If we are patching an existing target folder, do not touch stuff that is already updated ScanForExistingTargetBlocks(database, m_blockbuffer, blockhasher, filehasher, m_options, m_result); if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return; #if DEBUG if (!m_options.NoLocalBlocks) #endif // If other local files already have the blocks we want, we use them instead of downloading ScanForExistingSourceBlocks(database, m_options, m_blockbuffer, blockhasher, m_result); if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return; //Update files with data PatchWithBlocklist(database, rd, m_options, m_result, m_blockbuffer); }; // TODO: When UpdateMissingBlocksTable is implemented, the localpatcher can be activated // and this will reduce the need for multiple downloads of the same volume // TODO: This will need some work to preserve the missing block list for use with --fh-dryrun m_result.RecreateDatabaseResults = new RecreateDatabaseResults(m_result); using(new Logging.Timer("Recreate temporary database for restore")) new RecreateDatabaseHandler(m_backendurl, m_options, (RecreateDatabaseResults)m_result.RecreateDatabaseResults) .DoRun(database, filter, filelistfilter, /*localpatcher*/null); //If we have --version set, we need to adjust, as the db has only the required versions //TODO: Bit of a hack to set options that way if (m_options.Version != null && m_options.Version.Length > 0) m_options.RawOptions["version"] = string.Join(",", Enumerable.Range(0, m_options.Version.Length).Select(x => x.ToString())); DoRun(database, filter, m_result); } } }
public static void SortFile(string filein, string fileout) { try { // If the file can fit into memory, this is MUCH faster using(var tfout = new Library.Utility.TempFile()) { var data = File.ReadAllLines(filein); Array.Sort(data, StringComparer.Ordinal); File.WriteAllLines(tfout, data); File.Copy(tfout, fileout, true); return; } } catch { } using(var tfin = new Library.Utility.TempFile()) using(var tfout = new Library.Utility.TempFile()) { long swaps; File.Copy(filein, tfin, true); do { swaps = 0L; using(var sw = new System.IO.StreamWriter(tfout)) using(var sr = new System.IO.StreamReader(tfin)) { var c1 = sr.ReadLine(); var c2 = sr.ReadLine(); while (c1 != null || c2 != null) { if (c1 != null && c1.StartsWith("a")) Console.Write(""); var cmp = StringComparer.Ordinal.Compare(c1, c2); if (c2 == null || (c1 != null && cmp < 0)) { sw.WriteLine(c1); c1 = c2; c2 = sr.ReadLine(); } else { if (cmp != 0) sw.WriteLine(c2); c2 = sr.ReadLine(); swaps++; } } } File.Copy(tfout, tfin, true); } while(swaps > 0); File.Copy(tfout, fileout, true); } }
public static int Run(List<string> args, Dictionary<string, string> options, Library.Utility.IFilter filter) { if (args.Count != 2) { Console.WriteLine("Invalid argument count ({0} expected 2): {1}{2}", args.Count, Environment.NewLine, string.Join(Environment.NewLine, args)); return 100; } var folder = Path.GetFullPath(args[1]); if (!Directory.Exists(folder)) { Console.WriteLine("Folder not found: {0}", folder); return 100; } Directory.SetCurrentDirectory(folder); string ixfile; options.TryGetValue("indexfile", out ixfile); if (string.IsNullOrWhiteSpace(ixfile)) ixfile = "index.txt"; ixfile = Path.GetFullPath(ixfile); if (!File.Exists(ixfile)) { using(File.Create(ixfile)) { } } else { Console.WriteLine("Sorting existing index file"); SortFile(ixfile, ixfile); } var filecount = Directory.EnumerateFiles(folder).Count(); Console.WriteLine("Processing {0} files", filecount); var i = 0; var errors = 0; var totalblocks = 0L; var files = 0; foreach(var file in Directory.EnumerateFiles(folder)) { Console.Write("{0}: {1}", i, file); try { var p = Duplicati.Library.Main.Volumes.VolumeBase.ParseFilename(file); if (p == null) { Console.WriteLine(" - Not a Duplicati file, ignoring"); continue; } if (p.FileType != Duplicati.Library.Main.RemoteVolumeType.Blocks) { Console.WriteLine(" - Filetype {0}, skipping", p.FileType); continue; } if (!string.IsNullOrWhiteSpace(p.EncryptionModule)) { Console.WriteLine(" - Encrypted, skipping"); continue; } var filekey = Path.GetFileName(file); var blocks = 0; using(var cp = Library.DynamicLoader.CompressionLoader.GetModule(p.CompressionModule, file, options)) using(var tf = new Library.Utility.TempFile()) { using(var sw = new StreamWriter(tf)) foreach(var f in cp.ListFiles(null)) { sw.WriteLine("{0}, {1}", Library.Utility.Utility.Base64UrlToBase64Plain(f), filekey); blocks++; } files++; totalblocks += blocks; Console.Write(" {0} hashes found, sorting ...", blocks); SortFile(tf, tf); Console.WriteLine(" done!"); Console.Write("Merging {0} hashes ...", totalblocks); MergeFiles(ixfile, tf, ixfile); Console.WriteLine(" done!"); } } catch(Exception ex) { Console.WriteLine(" error: {0}", ex.ToString()); errors++; } i++; } Console.WriteLine("Processed {0} files and found {1} hashes", files, totalblocks); if (errors > 0) Console.WriteLine("Experienced {0} errors", errors); return 0; }
public static bool DownloadAndUnpackUpdate(UpdateInfo version, Action <double> progress = null) { if (INSTALLDIR == null) { return(false); } var updates = version.RemoteURLS.ToList(); // If alternate update URLs are specified, // we look for packages there as well if (AutoUpdateSettings.UsesAlternateURLs) { var packagepath = new Library.Utility.Uri(updates[0]).Path; var packagename = packagepath.Split('/').Last(); foreach (var alt_url in AutoUpdateSettings.URLs.Reverse()) { var alt_uri = new Library.Utility.Uri(alt_url); var path_components = alt_uri.Path.Split('/'); var path = string.Join("/", path_components.Take(path_components.Count() - 1).Union(new string[] { packagename })); var new_path = alt_uri.SetPath(path); updates.Insert(0, new_path.ToString()); } } using (var tempfile = new Library.Utility.TempFile()) { foreach (var url in updates) { try { Action <long> cb = null; if (progress != null) { cb = (s) => { progress(Math.Min(1.0, Math.Max(0.0, (double)s / version.CompressedSize))); } } ; var wreq = (System.Net.HttpWebRequest)System.Net.WebRequest.Create(url); wreq.UserAgent = string.Format("{0} v{1}", APPNAME, SelfVersion.Version); wreq.Headers.Add("X-Install-ID", InstallID); using (var resp = wreq.GetResponse()) using (var rss = resp.GetResponseStream()) using (var pgs = new Duplicati.Library.Utility.ProgressReportingStream(rss, version.CompressedSize, cb)) using (var fs = System.IO.File.Open(tempfile, System.IO.FileMode.Create)) Duplicati.Library.Utility.Utility.CopyStream(pgs, fs); var sha256 = System.Security.Cryptography.SHA256.Create(); var md5 = System.Security.Cryptography.MD5.Create(); using (var s = System.IO.File.OpenRead(tempfile)) { if (s.Length != version.CompressedSize) { throw new Exception(string.Format("Invalid file size {0}, expected {1} for {2}", s.Length, version.CompressedSize, url)); } var sha256hash = Convert.ToBase64String(sha256.ComputeHash(s)); if (sha256hash != version.SHA256) { throw new Exception(string.Format("Damaged or corrupted file, sha256 mismatch for {0}", url)); } } using (var s = System.IO.File.OpenRead(tempfile)) { var md5hash = Convert.ToBase64String(md5.ComputeHash(s)); if (md5hash != version.MD5) { throw new Exception(string.Format("Damaged or corrupted file, md5 mismatch for {0}", url)); } } using (var tempfolder = new Duplicati.Library.Utility.TempFolder()) using (var zip = new Duplicati.Library.Compression.FileArchiveZip(tempfile, new Dictionary <string, string>())) { foreach (var file in zip.ListFilesWithSize("")) { if (System.IO.Path.IsPathRooted(file.Key) || file.Key.Trim().StartsWith("..", StringComparison.InvariantCultureIgnoreCase)) { throw new Exception(string.Format("Out-of-place file path detected: {0}", file.Key)); } var targetpath = System.IO.Path.Combine(tempfolder, file.Key); var targetfolder = System.IO.Path.GetDirectoryName(targetpath); if (!System.IO.Directory.Exists(targetfolder)) { System.IO.Directory.CreateDirectory(targetfolder); } using (var zs = zip.OpenRead(file.Key)) using (var fs = System.IO.File.Create(targetpath)) zs.CopyTo(fs); } if (VerifyUnpackedFolder(tempfolder, version)) { var versionstring = TryParseVersion(version.Version).ToString(); var targetfolder = System.IO.Path.Combine(INSTALLDIR, versionstring); if (System.IO.Directory.Exists(targetfolder)) { System.IO.Directory.Delete(targetfolder, true); } System.IO.Directory.CreateDirectory(targetfolder); var tempfolderpath = Duplicati.Library.Utility.Utility.AppendDirSeparator(tempfolder); var tempfolderlength = tempfolderpath.Length; // Would be nice, but does not work :( //System.IO.Directory.Move(tempfolder, targetfolder); foreach (var e in Duplicati.Library.Utility.Utility.EnumerateFileSystemEntries(tempfolder)) { var relpath = e.Substring(tempfolderlength); if (string.IsNullOrWhiteSpace(relpath)) { continue; } var fullpath = System.IO.Path.Combine(targetfolder, relpath); if (relpath.EndsWith(System.IO.Path.DirectorySeparatorChar.ToString())) { System.IO.Directory.CreateDirectory(fullpath); } else { System.IO.File.Copy(e, fullpath); } } // Verification will kick in when we list the installed updates //VerifyUnpackedFolder(targetfolder, version); System.IO.File.WriteAllText(System.IO.Path.Combine(INSTALLDIR, CURRENT_FILE), versionstring); m_hasUpdateInstalled = null; var obsolete = (from n in FindInstalledVersions() where n.Value.Version != version.Version && n.Value.Version != SelfVersion.Version let x = TryParseVersion(n.Value.Version) orderby x descending select n).Skip(1).ToArray(); foreach (var f in obsolete) { try { System.IO.Directory.Delete(f.Key, true); } catch { } } return(true); } else { throw new Exception(string.Format("Unable to verify unpacked folder for url: {0}", url)); } } } catch (Exception ex) { if (OnError != null) { OnError(ex); } } } } return(false); }
static bool Run(List<string> args, Dictionary<string, string> options, bool first) { string allowedChars = ValidFilenameChars; if (options.ContainsKey("extended-chars")) allowedChars += options["extended-chars"]; else allowedChars += ExtendedChars; bool autoCreateFolders = Library.Utility.Utility.ParseBoolOption(options, "auto-create-folder"); Library.Interface.IBackend backend = Library.DynamicLoader.BackendLoader.GetBackend(args[0], options); if (backend == null) { Console.WriteLine("Unsupported backend"); Console.WriteLine(); Console.WriteLine("Supported backends: " + string.Join(",", XervBackup.Library.DynamicLoader.BackendLoader.Keys)); return false; } string disabledModulesValue; string enabledModulesValue; options.TryGetValue("enable-module", out enabledModulesValue); options.TryGetValue("disable-module", out disabledModulesValue); string[] enabledModules = enabledModulesValue == null ? new string[0] : enabledModulesValue.Trim().ToLower().Split(','); string[] disabledModules = disabledModulesValue == null ? new string[0] : disabledModulesValue.Trim().ToLower().Split(','); List<Library.Interface.IGenericModule> loadedModules = new List<IGenericModule>(); foreach (Library.Interface.IGenericModule m in Library.DynamicLoader.GenericLoader.Modules) if (Array.IndexOf<string>(disabledModules, m.Key.ToLower()) < 0 && (m.LoadAsDefault || Array.IndexOf<string>(enabledModules, m.Key.ToLower()) >= 0)) { m.Configure(options); loadedModules.Add(m); } try { List<Library.Interface.IFileEntry> curlist = null; try { curlist = backend.List(); } catch (FolderMissingException fex) { if (autoCreateFolders) { try { if (backend is IBackend_v2) ((IBackend_v2)backend).CreateFolder(); curlist = backend.List(); } catch (Exception ex) { Console.WriteLine("Autocreate folder failed with message: " + ex.Message); } } if (curlist == null) throw fex; } foreach (Library.Interface.IFileEntry fe in curlist) if (!fe.IsFolder) { if (Library.Utility.Utility.ParseBoolOption(options, "auto-clean") && first) if (Library.Utility.Utility.ParseBoolOption(options, "force")) { Console.WriteLine("Auto clean, removing file: {0}", fe.Name); backend.Delete(fe.Name); continue; } else Console.WriteLine("Specify the --force flag to actually delete files"); Console.WriteLine("*** Remote folder is not empty, aborting"); return false; } int number_of_files = 10; int min_file_size = 1024; int max_file_size = 1024 * 1024 * 50; int min_filename_size = 5; int max_filename_size = 80; bool disableStreaming = Library.Utility.Utility.ParseBoolOption(options, "disable-streaming-transfers"); bool skipOverwriteTest = Library.Utility.Utility.ParseBoolOption(options, "skip-overwrite-test"); if (options.ContainsKey("number-of-files")) number_of_files = int.Parse(options["number-of-files"]); if (options.ContainsKey("min-file-size")) min_file_size = (int)XervBackup.Library.Utility.Sizeparser.ParseSize(options["min-file-size"], "mb"); if (options.ContainsKey("max-file-size")) max_file_size = (int)XervBackup.Library.Utility.Sizeparser.ParseSize(options["max-file-size"]); if (options.ContainsKey("min-filename-length")) min_filename_size = int.Parse(options["min-filename-length"]); if (options.ContainsKey("max-filename-length")) max_filename_size = int.Parse(options["max-filename-length"]); Random rnd = new Random(); System.Security.Cryptography.SHA256 sha = System.Security.Cryptography.SHA256.Create(); //Create random files using (Library.Utility.TempFolder tf = new XervBackup.Library.Utility.TempFolder()) { List<TempFile> files = new List<TempFile>(); for (int i = 0; i < number_of_files; i++) { StringBuilder filename = new StringBuilder(); int filenamelen = rnd.Next(min_filename_size, max_filename_size); for (int j = 0; j < filenamelen; j++) filename.Append(allowedChars[rnd.Next(0, allowedChars.Length)]); string localfilename = CreateRandomFile(tf, i, min_file_size, max_file_size, rnd); //Calculate local hash and length using (System.IO.FileStream fs = new System.IO.FileStream(localfilename, System.IO.FileMode.Open, System.IO.FileAccess.Read)) files.Add(new TempFile(filename.ToString(), localfilename, sha.ComputeHash(fs), fs.Length)); } byte[] dummyFileHash = null; if (!skipOverwriteTest) { Console.WriteLine("Uploading wrong files ..."); using (Library.Utility.TempFile dummy = new Library.Utility.TempFile(CreateRandomFile(tf, files.Count, 1024, 2048, rnd))) { using (System.IO.FileStream fs = new System.IO.FileStream(dummy, System.IO.FileMode.Open, System.IO.FileAccess.Read)) dummyFileHash = sha.ComputeHash(fs); //Upload a dummy file for entry 0 and the last one, they will be replaced by the real files afterwards //We upload entry 0 twice just to try to freak any internal cache list Uploadfile(dummy, 0, files[0].remotefilename, backend, disableStreaming); Uploadfile(dummy, 0, files[0].remotefilename, backend, disableStreaming); Uploadfile(dummy, files.Count - 1, files[files.Count - 1].remotefilename, backend, disableStreaming); } } Console.WriteLine("Uploading files ..."); for (int i = 0; i < files.Count; i++) Uploadfile(files[i].localfilename, i, files[i].remotefilename, backend, disableStreaming); Console.WriteLine("Verifying file list ..."); curlist = backend.List(); foreach (Library.Interface.IFileEntry fe in curlist) if (!fe.IsFolder) { bool found = false; foreach (TempFile tx in files) if (tx.remotefilename == fe.Name) { if (tx.found) Console.WriteLine("*** File with name {0} was found more than once", tx.remotefilename); found = true; tx.found = true; if (fe.Size > 0 && tx.length != fe.Size) Console.WriteLine("*** File with name {0} has size {1} but the size was reported as {2}", tx.remotefilename, tx.length, fe.Size); break; } if (!found) Console.WriteLine("*** File with name {0} was found on server but not uploaded!", fe.Name); } foreach (TempFile tx in files) if (!tx.found) Console.WriteLine("*** File with name {0} was uploaded but not found afterwards", tx.remotefilename); Console.WriteLine("Downloading files"); for (int i = 0; i < files.Count; i++) { using (XervBackup.Library.Utility.TempFile cf = new XervBackup.Library.Utility.TempFile()) { Exception e = null; Console.Write("Downloading file {0} ... ", i); try { if (backend is Library.Interface.IStreamingBackend && !disableStreaming) { using (System.IO.FileStream fs = new System.IO.FileStream(cf, System.IO.FileMode.Create, System.IO.FileAccess.Write, System.IO.FileShare.None)) using (NonSeekableStream nss = new NonSeekableStream(fs)) (backend as Library.Interface.IStreamingBackend).Get(files[i].remotefilename, nss); } else backend.Get(files[i].remotefilename, cf); e = null; } catch (Exception ex) { e = ex; } if (e != null) Console.WriteLine("failed\n*** Error: {0}", e.ToString()); else Console.WriteLine("done"); Console.Write("Checking hash ... "); using (System.IO.FileStream fs = new System.IO.FileStream(cf, System.IO.FileMode.Open, System.IO.FileAccess.Read)) if (Convert.ToBase64String(sha.ComputeHash(fs)) != Convert.ToBase64String(files[i].hash)) { if (dummyFileHash != null && Convert.ToBase64String(sha.ComputeHash(fs)) == Convert.ToBase64String(dummyFileHash)) Console.WriteLine("failed\n*** Downloaded file was the dummy file"); else Console.WriteLine("failed\n*** Downloaded file was corrupt"); } else Console.WriteLine("done"); } } Console.WriteLine("Deleting files..."); foreach (TempFile tx in files) try { backend.Delete(tx.remotefilename); } catch (Exception ex) { Console.WriteLine("*** Failed to delete file {0}, message: {1}", tx.remotefilename, ex.ToString()); } curlist = backend.List(); foreach (Library.Interface.IFileEntry fe in curlist) if (!fe.IsFolder) { Console.WriteLine("*** Remote folder contains {0} after cleanup", fe.Name); } } } finally { foreach (Library.Interface.IGenericModule m in loadedModules) if (m is IDisposable) ((IDisposable)m).Dispose(); } return true; }
public void Run(IEnumerable <string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(System.IO.File.Exists(m_options.Dbpath) ? m_options.Dbpath : (string)tmpdb, "ListControlFiles", true)) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(filterstrings), compositefilter); try { var filteredList = ListFilesHandler.ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) { throw new Exception("No filesets found on remote target"); } Exception lastEx = new Exception("No suitable files found on remote target"); foreach (var fileversion in filteredList) { try { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { return; } var file = fileversion.Value.File; long size; string hash; RemoteVolumeType type; RemoteVolumeState state; if (!db.GetRemoteVolume(file.Name, out hash, out size, out type, out state)) { size = file.Size; } var files = new List <Library.Interface.IListResultFile>(); using (var tmpfile = backend.Get(file.Name, size, hash)) using (var tmp = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(file.Name), tmpfile, m_options)) foreach (var cf in tmp.ControlFiles) { if (Library.Utility.FilterExpression.Matches(filter, cf.Key)) { files.Add(new ListResultFile(cf.Key, null)); } } m_result.SetResult(new Library.Interface.IListResultFileset[] { new ListResultFileset(fileversion.Key, fileversion.Value.Time, -1, -1) }, files); lastEx = null; break; } catch (Exception ex) { lastEx = ex; if (ex is System.Threading.ThreadAbortException) { throw; } } } if (lastEx != null) { throw lastEx; } } finally { backend.WaitForComplete(db, null); } } }
public static UpdateInfo CheckForUpdate(ReleaseType channel = ReleaseType.Unknown) { if (channel == ReleaseType.Unknown) { channel = AutoUpdateSettings.DefaultUpdateChannel; } foreach (var rawurl in MANIFEST_URLS) { var url = rawurl; // Attempt to match the url to change the channel if possible // This allows overrides to the URLs for deployment of custom builds, // but does not require that they adopt the channel system var match = AutoUpdateSettings.MATCH_AUTOUPDATE_URL.Match(url); if (match.Success) { var mg = match.Groups[AutoUpdateSettings.MATCH_UPDATE_URL_CHANNEL_GROUP]; // Replace the channel name with the chosen channel url = url.Substring(0, mg.Index) + channel.ToString().ToLowerInvariant() + url.Substring(mg.Index + mg.Length); } try { using (var tmpfile = new Library.Utility.TempFile()) { System.Net.WebClient wc = new System.Net.WebClient(); wc.Headers.Add(System.Net.HttpRequestHeader.UserAgent, string.Format("{0} v{1}{2}", APPNAME, SelfVersion.Version, string.IsNullOrWhiteSpace(InstallID) ? "" : " -" + InstallID)); wc.Headers.Add("X-Install-ID", InstallID); wc.DownloadFile(url, tmpfile); using (var fs = System.IO.File.OpenRead(tmpfile)) using (var ss = new SignatureReadingStream(fs, SIGN_KEY)) using (var tr = new System.IO.StreamReader(ss)) using (var jr = new Newtonsoft.Json.JsonTextReader(tr)) { var update = new Newtonsoft.Json.JsonSerializer().Deserialize <UpdateInfo>(jr); if (TryParseVersion(update.Version) <= TryParseVersion(SelfVersion.Version)) { return(null); } // Don't install a debug update on a release build and vice versa if (string.Equals(SelfVersion.ReleaseType, "Debug", StringComparison.InvariantCultureIgnoreCase) && !string.Equals(update.ReleaseType, SelfVersion.ReleaseType, StringComparison.CurrentCultureIgnoreCase)) { return(null); } ReleaseType rt; if (!Enum.TryParse <ReleaseType>(update.ReleaseType, true, out rt)) { rt = ReleaseType.Unknown; } // If the update is too low to be considered, skip it // Should never happen, but protects against mistakes in deployment if (rt > channel) { return(null); } LastUpdateCheckVersion = update; return(update); } } } catch (Exception ex) { if (OnError != null) { OnError(ex); } } } return(null); }
public static int Run(List <string> args, Dictionary <string, string> options, Library.Utility.IFilter filter) { if (args.Count != 2 && args.Count != 3) { Console.WriteLine("Invalid argument count ({0} expected 2 or 3): {1}{2}", args.Count, Environment.NewLine, string.Join(Environment.NewLine, args)); return(100); } var folder = Path.GetFullPath(args[1]); if (!Directory.Exists(folder)) { Console.WriteLine("Folder not found: {0}", folder); return(100); } Directory.SetCurrentDirectory(folder); string targetpath; options.TryGetValue("targetpath", out targetpath); string ixfile; options.TryGetValue("indexfile", out ixfile); if (string.IsNullOrWhiteSpace(ixfile)) { ixfile = "index.txt"; } ixfile = Path.GetFullPath(ixfile); if (!File.Exists(ixfile)) { Console.WriteLine("Index file not found, perhaps you need to run the index command?"); return(100); } Console.Write("Sorting index file ..."); Index.SortFile(ixfile, ixfile); Console.WriteLine(" done!"); string filelist; if (args.Count == 2) { var time = List.ParseListFiles(folder).First(); filelist = time.Value; Console.WriteLine("Using set 0 with timestamp {0}", time.Key.ToLocalTime()); } else { filelist = List.SelectListFile(args[2], folder); } Library.Main.Volumes.VolumeReaderBase.UpdateOptionsFromManifest(Path.GetExtension(filelist).Trim('.'), filelist, new Duplicati.Library.Main.Options(options)); string blocksize_str; options.TryGetValue("blocksize", out blocksize_str); string blockhash_str; options.TryGetValue("block-hash-algorithm", out blockhash_str); string filehash_str; options.TryGetValue("block-hash-algorithm", out filehash_str); long blocksize = string.IsNullOrWhiteSpace(blocksize_str) ? 0 : Library.Utility.Sizeparser.ParseSize(blocksize_str); if (blocksize <= 0) { Console.WriteLine("Invalid blocksize: {0}, try setting --blocksize manually", blocksize); return(100); } var blockhasher = string.IsNullOrWhiteSpace(blockhash_str) ? null : Library.Utility.HashAlgorithmHelper.Create(blockhash_str); var filehasher = string.IsNullOrWhiteSpace(filehash_str) ? null : Library.Utility.HashAlgorithmHelper.Create(filehash_str); if (blockhasher == null) { throw new Duplicati.Library.Interface.UserInformationException(string.Format("Block hash algorithm not valid: {0}", blockhash_str), "BlockHashAlgorithmNotSupported"); } if (filehasher == null) { throw new Duplicati.Library.Interface.UserInformationException(string.Format("File hash algorithm not valid: {0}", filehash_str), "FileHashAlgorithmNotSupported"); } var hashesprblock = blocksize / (blockhasher.HashSize / 8); using (var mru = new CompressedFileMRUCache(options)) { Console.WriteLine("Building lookup table for file hashes"); var lookup = new HashLookupHelper(ixfile, mru, (int)blocksize, blockhasher.HashSize / 8); var filecount = 0L; string largestprefix = null; string[] largestprefixparts = null; if (!string.IsNullOrWhiteSpace(targetpath)) { Console.WriteLine("Computing restore path"); } foreach (var f in List.EnumerateFilesInDList(filelist, filter, options)) { if (largestprefix == null) { largestprefix = f.Path; largestprefixparts = largestprefix.Split(new char[] { Path.DirectorySeparatorChar }); } else if (largestprefix.Length > 1) { var parts = f.Path.Split(new char[] { Path.DirectorySeparatorChar }); var ni = 0; for (; ni < Math.Min(parts.Length, largestprefixparts.Length); ni++) { if (!Library.Utility.Utility.ClientFilenameStringComparer.Equals(parts[ni], largestprefixparts[ni])) { break; } } if (ni != largestprefixparts.Length) { if (ni == 0) { largestprefixparts = new string[0]; largestprefix = string.Empty; } else { Array.Resize(ref largestprefixparts, ni - 1); largestprefix = string.Join(Path.DirectorySeparatorChar.ToString(), largestprefixparts); } } } filecount++; } Console.WriteLine("Restoring {0} files to {1}", filecount, string.IsNullOrWhiteSpace(targetpath) ? "original position" : targetpath); if (Library.Utility.Utility.IsClientLinux || largestprefix.Length > 0) { largestprefix = Library.Utility.Utility.AppendDirSeparator(largestprefix); } if (!string.IsNullOrEmpty(largestprefix)) { Console.WriteLine("Removing common prefix {0} from files", largestprefix); } var i = 0L; var errors = 0L; foreach (var f in List.EnumerateFilesInDList(filelist, filter, options)) { try { var targetfile = MapToRestorePath(f.Path, largestprefix, targetpath); if (!Directory.Exists(Path.GetDirectoryName(targetfile))) { Directory.CreateDirectory(Path.GetDirectoryName(targetfile)); } Console.Write("{0}: {1} ({2})", i, targetfile, Library.Utility.Utility.FormatSizeString(f.Size)); using (var tf = new Library.Utility.TempFile()) { using (var sw = File.OpenWrite(tf)) { if (f.BlocklistHashes == null) { lookup.WriteHash(sw, f.Hash); } else { var blhi = 0L; foreach (var blh in f.BlocklistHashes) { Console.Write(" {0}", blhi); var blockhashoffset = blhi * hashesprblock * blocksize; try { var bi = 0; foreach (var h in lookup.ReadBlocklistHashes(blh)) { try { sw.Position = blockhashoffset + (bi * blocksize); lookup.WriteHash(sw, h); } catch (Exception ex) { Console.WriteLine("Failed to read hash: {0}{1}{2}", h, Environment.NewLine, ex); } bi++; } } catch (Exception ex) { Console.WriteLine("Failed to read Blocklist hash: {0}{1}{2}", blh, Environment.NewLine, ex); } blhi++; } } } string fh; using (var fs = File.OpenRead(tf)) fh = Convert.ToBase64String(filehasher.ComputeHash(fs)); if (fh == f.Hash) { Console.WriteLine(" done!"); File.Copy(tf, targetfile, true); } else { Console.Write(" - Restored file hash mismatch"); if (File.Exists(targetfile)) { Console.WriteLine(" - not overwriting existing file: {0}", targetfile); } else { Console.WriteLine(" - restoring file in damaged condition"); } } } } catch (Exception ex) { Console.WriteLine(" error: {0}", ex); errors++; } i++; } } return(0); }
static bool Run(List <string> args, Dictionary <string, string> options, bool first) { string allowedChars = ValidFilenameChars; if (options.ContainsKey("extended-chars")) { allowedChars += options["extended-chars"]; } else { allowedChars += ExtendedChars; } bool autoCreateFolders = Library.Utility.Utility.ParseBoolOption(options, "auto-create-folder"); Library.Interface.IBackend backend = Library.DynamicLoader.BackendLoader.GetBackend(args[0], options); if (backend == null) { Console.WriteLine("Unsupported backend"); Console.WriteLine(); Console.WriteLine("Supported backends: " + string.Join(",", Duplicati.Library.DynamicLoader.BackendLoader.Keys)); return(false); } string disabledModulesValue; string enabledModulesValue; options.TryGetValue("enable-module", out enabledModulesValue); options.TryGetValue("disable-module", out disabledModulesValue); string[] enabledModules = enabledModulesValue == null ? new string[0] : enabledModulesValue.Trim().ToLower().Split(','); string[] disabledModules = disabledModulesValue == null ? new string[0] : disabledModulesValue.Trim().ToLower().Split(','); List <Library.Interface.IGenericModule> loadedModules = new List <IGenericModule>(); foreach (Library.Interface.IGenericModule m in Library.DynamicLoader.GenericLoader.Modules) { if (Array.IndexOf <string>(disabledModules, m.Key.ToLower()) < 0 && (m.LoadAsDefault || Array.IndexOf <string>(enabledModules, m.Key.ToLower()) >= 0)) { m.Configure(options); loadedModules.Add(m); } } try { List <Library.Interface.IFileEntry> curlist = null; try { curlist = backend.List(); } catch (FolderMissingException fex) { if (autoCreateFolders) { try { backend.CreateFolder(); curlist = backend.List(); } catch (Exception ex) { Console.WriteLine("Autocreate folder failed with message: " + ex.Message); } } if (curlist == null) { throw fex; } } foreach (Library.Interface.IFileEntry fe in curlist) { if (!fe.IsFolder) { if (Library.Utility.Utility.ParseBoolOption(options, "auto-clean") && first) { if (Library.Utility.Utility.ParseBoolOption(options, "force")) { Console.WriteLine("Auto clean, removing file: {0}", fe.Name); backend.Delete(fe.Name); continue; } else { Console.WriteLine("Specify the --force flag to actually delete files"); } } Console.WriteLine("*** Remote folder is not empty, aborting"); return(false); } } int number_of_files = 10; int min_file_size = 1024; int max_file_size = 1024 * 1024 * 50; int min_filename_size = 5; int max_filename_size = 80; bool disableStreaming = Library.Utility.Utility.ParseBoolOption(options, "disable-streaming-transfers"); bool skipOverwriteTest = Library.Utility.Utility.ParseBoolOption(options, "skip-overwrite-test"); if (options.ContainsKey("number-of-files")) { number_of_files = int.Parse(options["number-of-files"]); } if (options.ContainsKey("min-file-size")) { min_file_size = (int)Duplicati.Library.Utility.Sizeparser.ParseSize(options["min-file-size"], "mb"); } if (options.ContainsKey("max-file-size")) { max_file_size = (int)Duplicati.Library.Utility.Sizeparser.ParseSize(options["max-file-size"], "mb"); } if (options.ContainsKey("min-filename-length")) { min_filename_size = int.Parse(options["min-filename-length"]); } if (options.ContainsKey("max-filename-length")) { max_filename_size = int.Parse(options["max-filename-length"]); } Random rnd = new Random(); System.Security.Cryptography.SHA256 sha = System.Security.Cryptography.SHA256.Create(); //Create random files using (Library.Utility.TempFolder tf = new Duplicati.Library.Utility.TempFolder()) { List <TempFile> files = new List <TempFile>(); for (int i = 0; i < number_of_files; i++) { StringBuilder filename = new StringBuilder(); int filenamelen = rnd.Next(min_filename_size, max_filename_size); for (int j = 0; j < filenamelen; j++) { filename.Append(allowedChars[rnd.Next(0, allowedChars.Length)]); } string localfilename = CreateRandomFile(tf, i, min_file_size, max_file_size, rnd); //Calculate local hash and length using (System.IO.FileStream fs = new System.IO.FileStream(localfilename, System.IO.FileMode.Open, System.IO.FileAccess.Read)) files.Add(new TempFile(filename.ToString(), localfilename, sha.ComputeHash(fs), fs.Length)); } byte[] dummyFileHash = null; if (!skipOverwriteTest) { Console.WriteLine("Uploading wrong files ..."); using (Library.Utility.TempFile dummy = Library.Utility.TempFile.WrapExistingFile(CreateRandomFile(tf, files.Count, 1024, 2048, rnd))) { using (System.IO.FileStream fs = new System.IO.FileStream(dummy, System.IO.FileMode.Open, System.IO.FileAccess.Read)) dummyFileHash = sha.ComputeHash(fs); //Upload a dummy file for entry 0 and the last one, they will be replaced by the real files afterwards //We upload entry 0 twice just to try to freak any internal cache list Uploadfile(dummy, 0, files[0].remotefilename, backend, disableStreaming); Uploadfile(dummy, 0, files[0].remotefilename, backend, disableStreaming); Uploadfile(dummy, files.Count - 1, files[files.Count - 1].remotefilename, backend, disableStreaming); } } Console.WriteLine("Uploading files ..."); for (int i = 0; i < files.Count; i++) { Uploadfile(files[i].localfilename, i, files[i].remotefilename, backend, disableStreaming); } Console.WriteLine("Verifying file list ..."); curlist = backend.List(); foreach (Library.Interface.IFileEntry fe in curlist) { if (!fe.IsFolder) { bool found = false; foreach (TempFile tx in files) { if (tx.remotefilename == fe.Name) { if (tx.found) { Console.WriteLine("*** File with name {0} was found more than once", tx.remotefilename); } found = true; tx.found = true; if (fe.Size > 0 && tx.length != fe.Size) { Console.WriteLine("*** File with name {0} has size {1} but the size was reported as {2}", tx.remotefilename, tx.length, fe.Size); } break; } } if (!found) { Console.WriteLine("*** File with name {0} was found on server but not uploaded!", fe.Name); } } } foreach (TempFile tx in files) { if (!tx.found) { Console.WriteLine("*** File with name {0} was uploaded but not found afterwards", tx.remotefilename); } } Console.WriteLine("Downloading files"); for (int i = 0; i < files.Count; i++) { using (Duplicati.Library.Utility.TempFile cf = new Duplicati.Library.Utility.TempFile()) { Exception e = null; Console.Write("Downloading file {0} ... ", i); try { if (backend is Library.Interface.IStreamingBackend && !disableStreaming) { using (System.IO.FileStream fs = new System.IO.FileStream(cf, System.IO.FileMode.Create, System.IO.FileAccess.Write, System.IO.FileShare.None)) using (NonSeekableStream nss = new NonSeekableStream(fs)) (backend as Library.Interface.IStreamingBackend).Get(files[i].remotefilename, nss); } else { backend.Get(files[i].remotefilename, cf); } e = null; } catch (Exception ex) { e = ex; } if (e != null) { Console.WriteLine("failed\n*** Error: {0}", e.ToString()); } else { Console.WriteLine("done"); } Console.Write("Checking hash ... "); using (System.IO.FileStream fs = new System.IO.FileStream(cf, System.IO.FileMode.Open, System.IO.FileAccess.Read)) if (Convert.ToBase64String(sha.ComputeHash(fs)) != Convert.ToBase64String(files[i].hash)) { if (dummyFileHash != null && Convert.ToBase64String(sha.ComputeHash(fs)) == Convert.ToBase64String(dummyFileHash)) { Console.WriteLine("failed\n*** Downloaded file was the dummy file"); } else { Console.WriteLine("failed\n*** Downloaded file was corrupt"); } } else { Console.WriteLine("done"); } } } Console.WriteLine("Deleting files..."); foreach (TempFile tx in files) { try { backend.Delete(tx.remotefilename); } catch (Exception ex) { Console.WriteLine("*** Failed to delete file {0}, message: {1}", tx.remotefilename, ex.ToString()); } } curlist = backend.List(); foreach (Library.Interface.IFileEntry fe in curlist) { if (!fe.IsFolder) { Console.WriteLine("*** Remote folder contains {0} after cleanup", fe.Name); } } } } finally { foreach (Library.Interface.IGenericModule m in loadedModules) { if (m is IDisposable) { ((IDisposable)m).Dispose(); } } } return(true); }
static bool Run(List <string> args, Dictionary <string, string> options, bool first) { Library.Interface.IBackend backend = Library.DynamicLoader.BackendLoader.GetBackend(args[0], options); if (backend == null) { Console.WriteLine("Unsupported backend"); Console.WriteLine(); Console.WriteLine("Supported backends: " + string.Join(",", Duplicati.Library.DynamicLoader.BackendLoader.Keys)); return(false); } string allowedChars = ValidFilenameChars; if (options.ContainsKey("extended-chars")) { allowedChars += String.IsNullOrEmpty(options["extended-chars"]) ? ExtendedChars : options["extended-chars"]; } bool autoCreateFolders = Library.Utility.Utility.ParseBoolOption(options, "auto-create-folder"); string disabledModulesValue; string enabledModulesValue; options.TryGetValue("enable-module", out enabledModulesValue); options.TryGetValue("disable-module", out disabledModulesValue); string[] enabledModules = enabledModulesValue == null ? new string[0] : enabledModulesValue.Trim().ToLower().Split(','); string[] disabledModules = disabledModulesValue == null ? new string[0] : disabledModulesValue.Trim().ToLower().Split(','); List <Library.Interface.IGenericModule> loadedModules = new List <IGenericModule>(); foreach (Library.Interface.IGenericModule m in Library.DynamicLoader.GenericLoader.Modules) { if (!disabledModules.Contains(m.Key, StringComparer.OrdinalIgnoreCase) && (m.LoadAsDefault || enabledModules.Contains(m.Key, StringComparer.OrdinalIgnoreCase))) { m.Configure(options); loadedModules.Add(m); } } try { IEnumerable <Library.Interface.IFileEntry> curlist = null; try { backend.Test(); curlist = backend.List(); } catch (FolderMissingException fex) { if (autoCreateFolders) { try { backend.CreateFolder(); curlist = backend.List(); } catch (Exception ex) { Console.WriteLine("Autocreate folder failed with message: " + ex.Message); } } if (curlist == null) { throw fex; } } foreach (Library.Interface.IFileEntry fe in curlist) { if (!fe.IsFolder) { if (Library.Utility.Utility.ParseBoolOption(options, "auto-clean") && first) { if (Library.Utility.Utility.ParseBoolOption(options, "force")) { Console.WriteLine("Auto clean, removing file: {0}", fe.Name); backend.Delete(fe.Name); continue; } else { Console.WriteLine("Specify the --force flag to actually delete files"); } } Console.WriteLine("*** Remote folder is not empty, aborting"); return(false); } } int number_of_files = 10; int min_file_size = 1024; int max_file_size = 1024 * 1024 * 50; int min_filename_size = 5; int max_filename_size = 80; bool disableStreaming = Library.Utility.Utility.ParseBoolOption(options, "disable-streaming-transfers"); bool skipOverwriteTest = Library.Utility.Utility.ParseBoolOption(options, "skip-overwrite-test"); bool trimFilenameSpaces = Library.Utility.Utility.ParseBoolOption(options, "trim-filename-spaces"); if (options.ContainsKey("number-of-files")) { number_of_files = int.Parse(options["number-of-files"]); } if (options.ContainsKey("min-file-size")) { min_file_size = (int)Duplicati.Library.Utility.Sizeparser.ParseSize(options["min-file-size"], "mb"); } if (options.ContainsKey("max-file-size")) { max_file_size = (int)Duplicati.Library.Utility.Sizeparser.ParseSize(options["max-file-size"], "mb"); } if (options.ContainsKey("min-filename-length")) { min_filename_size = int.Parse(options["min-filename-length"]); } if (options.ContainsKey("max-filename-length")) { max_filename_size = int.Parse(options["max-filename-length"]); } Random rnd = new Random(); System.Security.Cryptography.SHA256 sha = System.Security.Cryptography.SHA256.Create(); //Create random files using (Library.Utility.TempFolder tf = new Duplicati.Library.Utility.TempFolder()) { List <TempFile> files = new List <TempFile>(); for (int i = 0; i < number_of_files; i++) { string filename = CreateRandomRemoteFileName(min_filename_size, max_filename_size, allowedChars, trimFilenameSpaces, rnd); string localfilename = CreateRandomFile(tf, i, min_file_size, max_file_size, rnd); //Calculate local hash and length using (System.IO.FileStream fs = new System.IO.FileStream(localfilename, System.IO.FileMode.Open, System.IO.FileAccess.Read)) files.Add(new TempFile(filename, localfilename, sha.ComputeHash(fs), fs.Length)); } byte[] dummyFileHash = null; if (!skipOverwriteTest) { Console.WriteLine("Uploading wrong files ..."); using (Library.Utility.TempFile dummy = Library.Utility.TempFile.WrapExistingFile(CreateRandomFile(tf, files.Count, 1024, 2048, rnd))) { using (System.IO.FileStream fs = new System.IO.FileStream(dummy, System.IO.FileMode.Open, System.IO.FileAccess.Read)) dummyFileHash = sha.ComputeHash(fs); //Upload a dummy file for entry 0 and the last one, they will be replaced by the real files afterwards //We upload entry 0 twice just to try to freak any internal cache list Uploadfile(dummy, 0, files[0].remotefilename, backend, disableStreaming); Uploadfile(dummy, 0, files[0].remotefilename, backend, disableStreaming); Uploadfile(dummy, files.Count - 1, files[files.Count - 1].remotefilename, backend, disableStreaming); } } Console.WriteLine("Uploading files ..."); for (int i = 0; i < files.Count; i++) { Uploadfile(files[i].localfilename, i, files[i].remotefilename, backend, disableStreaming); } TempFile originalRenamedFile = null; string renamedFileNewName = null; IRenameEnabledBackend renameEnabledBackend = backend as IRenameEnabledBackend; if (renameEnabledBackend != null) { // Rename the second file in the list, if there are more than one. If not, just do the first one. int renameIndex = files.Count > 1 ? 1 : 0; originalRenamedFile = files[renameIndex]; renamedFileNewName = CreateRandomRemoteFileName(min_filename_size, max_filename_size, allowedChars, trimFilenameSpaces, rnd); Console.WriteLine("Renaming file {0} from {1} to {2}", renameIndex, originalRenamedFile.remotefilename, renamedFileNewName); renameEnabledBackend.Rename(originalRenamedFile.remotefilename, renamedFileNewName); files[renameIndex] = new TempFile(renamedFileNewName, originalRenamedFile.localfilename, originalRenamedFile.hash, originalRenamedFile.length); } Console.WriteLine("Verifying file list ..."); curlist = backend.List(); foreach (Library.Interface.IFileEntry fe in curlist) { if (!fe.IsFolder) { bool found = false; foreach (TempFile tx in files) { if (tx.remotefilename == fe.Name) { if (tx.found) { Console.WriteLine("*** File with name {0} was found more than once", tx.remotefilename); } found = true; tx.found = true; if (fe.Size > 0 && tx.length != fe.Size) { Console.WriteLine("*** File with name {0} has size {1} but the size was reported as {2}", tx.remotefilename, tx.length, fe.Size); } break; } } if (!found) { if (originalRenamedFile != null && renamedFileNewName != null && originalRenamedFile.remotefilename == fe.Name) { Console.WriteLine("*** File with name {0} was found on server but was supposed to have been renamed to {1}!", fe.Name, renamedFileNewName); } else { Console.WriteLine("*** File with name {0} was found on server but not uploaded!", fe.Name); } } } } foreach (TempFile tx in files) { if (!tx.found) { Console.WriteLine("*** File with name {0} was uploaded but not found afterwards", tx.remotefilename); } } Console.WriteLine("Downloading files"); for (int i = 0; i < files.Count; i++) { using (Duplicati.Library.Utility.TempFile cf = new Duplicati.Library.Utility.TempFile()) { Exception e = null; Console.Write("Downloading file {0} ... ", i); try { if (backend is Library.Interface.IStreamingBackend && !disableStreaming) { using (System.IO.FileStream fs = new System.IO.FileStream(cf, System.IO.FileMode.Create, System.IO.FileAccess.Write, System.IO.FileShare.None)) using (NonSeekableStream nss = new NonSeekableStream(fs)) (backend as Library.Interface.IStreamingBackend).Get(files[i].remotefilename, nss); } else { backend.Get(files[i].remotefilename, cf); } e = null; } catch (Exception ex) { e = ex; } if (e != null) { Console.WriteLine("failed\n*** Error: {0}", e); } else { Console.WriteLine("done"); } Console.Write("Checking hash ... "); using (System.IO.FileStream fs = new System.IO.FileStream(cf, System.IO.FileMode.Open, System.IO.FileAccess.Read)) if (Convert.ToBase64String(sha.ComputeHash(fs)) != Convert.ToBase64String(files[i].hash)) { if (dummyFileHash != null && Convert.ToBase64String(sha.ComputeHash(fs)) == Convert.ToBase64String(dummyFileHash)) { Console.WriteLine("failed\n*** Downloaded file was the dummy file"); } else { Console.WriteLine("failed\n*** Downloaded file was corrupt"); } } else { Console.WriteLine("done"); } } } Console.WriteLine("Deleting files..."); foreach (TempFile tx in files) { try { backend.Delete(tx.remotefilename); } catch (Exception ex) { Console.WriteLine("*** Failed to delete file {0}, message: {1}", tx.remotefilename, ex); } } curlist = backend.List(); foreach (Library.Interface.IFileEntry fe in curlist) { if (!fe.IsFolder) { Console.WriteLine("*** Remote folder contains {0} after cleanup", fe.Name); } } } // Test quota retrieval IQuotaEnabledBackend quotaEnabledBackend = backend as IQuotaEnabledBackend; if (quotaEnabledBackend != null) { Console.WriteLine("Checking quota..."); IQuotaInfo quota = null; bool noException; try { quota = quotaEnabledBackend.Quota; noException = true; } catch (Exception ex) { Console.WriteLine("*** Checking quota information failed: {0}", ex); noException = false; } if (noException) { if (quota != null) { Console.WriteLine("Free Space: {0}", Library.Utility.Utility.FormatSizeString(quota.FreeQuotaSpace)); Console.WriteLine("Total Space: {0}", Library.Utility.Utility.FormatSizeString(quota.TotalQuotaSpace)); } else { Console.WriteLine("Unable to retrieve quota information"); } } } // Test DNSName lookup Console.WriteLine("Checking DNS names used by this backend..."); try { string[] dnsNames = backend.DNSName; if (dnsNames != null) { foreach (string dnsName in dnsNames) { Console.WriteLine(dnsName); } } else { Console.WriteLine("No DNS names reported"); } } catch (Exception ex) { Console.WriteLine("*** Checking DNSName failed: {0}", ex); } } finally { foreach (Library.Interface.IGenericModule m in loadedModules) { if (m is IDisposable) { ((IDisposable)m).Dispose(); } } } return(true); }
public void Encrypt(Library.Interface.IEncryption encryption, IBackendWriter stat) { if (encryption != null && !this.Encrypted) { var tempfile = new Library.Utility.TempFile(); encryption.Encrypt(this.LocalFilename, tempfile); this.DeleteLocalFile(stat); this.LocalTempfile = tempfile; this.Hash = null; this.Size = 0; this.Encrypted = true; } }
private async Task <Library.Utility.TempFile> DoGet(FileEntryItem item) { Library.Utility.TempFile tmpfile = null; await m_stats.SendEventAsync(BackendActionType.Get, BackendEventType.Started, item.RemoteFilename, item.Size); try { var begin = DateTime.Now; tmpfile = new Library.Utility.TempFile(); if (m_backend is Library.Interface.IStreamingBackend && !m_options.DisableStreamingTransfers) { using (var fs = System.IO.File.OpenWrite(tmpfile)) using (var ts = new ThrottledStream(fs, m_options.MaxUploadPrSecond, m_options.MaxDownloadPrSecond)) using (var pgs = new Library.Utility.ProgressReportingStream(ts, item.Size, pg => HandleProgress(ts, pg))) ((Library.Interface.IStreamingBackend)m_backend).Get(item.RemoteFilename, pgs); } else { m_backend.Get(item.RemoteFilename, tmpfile); } var duration = DateTime.Now - begin; var filehash = FileEntryItem.CalculateFileHash(tmpfile); Logging.Log.WriteProfilingMessage(LOGTAG, "DownloadSpeed", "Downloaded {0} in {1}, {2}/s", Library.Utility.Utility.FormatSizeString(item.Size), duration, Library.Utility.Utility.FormatSizeString((long)(item.Size / duration.TotalSeconds))); await m_database.LogRemoteOperationAsync("get", item.RemoteFilename, JsonConvert.SerializeObject(new { Size = new System.IO.FileInfo(tmpfile).Length, Hash = filehash })); await m_stats.SendEventAsync(BackendActionType.Get, BackendEventType.Completed, item.RemoteFilename, new System.IO.FileInfo(tmpfile).Length); if (!m_options.SkipFileHashChecks) { var nl = new System.IO.FileInfo(tmpfile).Length; if (item.Size >= 0) { if (nl != item.Size) { throw new Exception(Strings.Controller.DownloadedFileSizeError(item.RemoteFilename, nl, item.Size)); } } else { item.Size = nl; } if (!string.IsNullOrEmpty(item.Hash)) { if (filehash != item.Hash) { throw new Duplicati.Library.Main.BackendManager.HashMismatchException(Strings.Controller.HashMismatchError(tmpfile, item.Hash, filehash)); } } else { item.Hash = filehash; } } // Fast exit if (item.VerifyHashOnly) { return(null); } // Decrypt before returning if (!m_options.NoEncryption) { try { using (var tmpfile2 = tmpfile) { tmpfile = new Library.Utility.TempFile(); // Auto-guess the encryption module var ext = (System.IO.Path.GetExtension(item.RemoteFilename) ?? "").TrimStart('.'); if (!string.Equals(m_options.EncryptionModule, ext, StringComparison.OrdinalIgnoreCase)) { // Check if the file is encrypted with something else if (DynamicLoader.EncryptionLoader.Keys.Contains(ext, StringComparer.OrdinalIgnoreCase)) { using (var encmodule = DynamicLoader.EncryptionLoader.GetModule(ext, m_options.Passphrase, m_options.RawOptions)) if (encmodule != null) { Logging.Log.WriteVerboseMessage(LOGTAG, "AutomaticDecryptionDetection", "Filename extension \"{0}\" does not match encryption module \"{1}\", using matching encryption module", ext, m_options.EncryptionModule); encmodule.Decrypt(tmpfile2, tmpfile); } } // Check if the file is not encrypted else if (DynamicLoader.CompressionLoader.Keys.Contains(ext, StringComparer.OrdinalIgnoreCase)) { Logging.Log.WriteVerboseMessage(LOGTAG, "AutomaticDecryptionDetection", "Filename extension \"{0}\" does not match encryption module \"{1}\", guessing that it is not encrypted", ext, m_options.EncryptionModule); } // Fallback, lets see what happens... else { Logging.Log.WriteVerboseMessage(LOGTAG, "AutomaticDecryptionDetection", "Filename extension \"{0}\" does not match encryption module \"{1}\", attempting to use specified encryption module as no others match", ext, m_options.EncryptionModule); using (var encmodule = DynamicLoader.EncryptionLoader.GetModule(m_options.EncryptionModule, m_options.Passphrase, m_options.RawOptions)) encmodule.Decrypt(tmpfile2, tmpfile); } } else { using (var encmodule = DynamicLoader.EncryptionLoader.GetModule(m_options.EncryptionModule, m_options.Passphrase, m_options.RawOptions)) encmodule.Decrypt(tmpfile2, tmpfile); } } } catch (Exception ex) { //If we fail here, make sure that we throw a crypto exception if (ex is System.Security.Cryptography.CryptographicException) { throw; } else { throw new System.Security.Cryptography.CryptographicException(ex.Message, ex); } } } var res = tmpfile; tmpfile = null; return(res); } finally { try { if (tmpfile != null) { tmpfile.Dispose(); } } catch { } } }
private void DoGet(FileEntryItem item) { Library.Utility.TempFile tmpfile = null; m_statwriter.SendEvent(BackendActionType.Get, BackendEventType.Started, item.RemoteFilename, item.Size); try { tmpfile = new Library.Utility.TempFile(); if (m_backend is Library.Interface.IStreamingBackend && !m_options.DisableStreamingTransfers) { using (var fs = System.IO.File.OpenWrite(tmpfile)) using (var ts = new ThrottledStream(fs, m_options.MaxDownloadPrSecond, m_options.MaxUploadPrSecond)) using (var pgs = new Library.Utility.ProgressReportingStream(ts, item.Size, HandleProgress)) ((Library.Interface.IStreamingBackend)m_backend).Get(item.RemoteFilename, pgs); } else m_backend.Get(item.RemoteFilename, tmpfile); m_db.LogDbOperation("get", item.RemoteFilename, JsonConvert.SerializeObject(new { Size = new System.IO.FileInfo(tmpfile).Length, Hash = FileEntryItem.CalculateFileHash(tmpfile) })); m_statwriter.SendEvent(BackendActionType.Get, BackendEventType.Completed, item.RemoteFilename, new System.IO.FileInfo(tmpfile).Length); if (!m_options.SkipFileHashChecks) { var nl = new System.IO.FileInfo(tmpfile).Length; if (item.Size >= 0) { if (nl != item.Size) throw new Exception(string.Format(Strings.Controller.DownloadedFileSizeError, item.RemoteFilename, nl, item.Size)); } else item.Size = nl; var nh = FileEntryItem.CalculateFileHash(tmpfile); if (!string.IsNullOrEmpty(item.Hash)) { if (nh != item.Hash) throw new HashMismathcException(string.Format(Strings.Controller.HashMismatchError, tmpfile, item.Hash, nh)); } else item.Hash = nh; } if (!item.VerifyHashOnly) { // Decrypt before returning if (!m_options.NoEncryption) { try { using(var tmpfile2 = tmpfile) { tmpfile = new Library.Utility.TempFile(); lock(m_encryptionLock) m_encryption.Decrypt(tmpfile2, tmpfile); } } catch (Exception ex) { //If we fail here, make sure that we throw a crypto exception if (ex is System.Security.Cryptography.CryptographicException) throw; else throw new System.Security.Cryptography.CryptographicException(ex.Message, ex); } } item.Result = tmpfile; tmpfile = null; } } catch { if (tmpfile != null) tmpfile.Dispose(); throw; } }
public static int Run(List<string> args, Dictionary<string, string> options, Library.Utility.IFilter filter) { if (args.Count != 3) { Console.WriteLine("Invalid argument count ({0} expected 3): {1}{2}", args.Count, Environment.NewLine, string.Join(Environment.NewLine, args)); return 100; } using(var backend = Library.DynamicLoader.BackendLoader.GetBackend(args[1], options)) { if (backend == null) { Console.WriteLine("Backend not found: {0}{1}Backends supported: {2}", args[1], Environment.NewLine, string.Join(", ", Library.DynamicLoader.BackendLoader.Keys)); return 100; } var targetfolder = Path.GetFullPath(args[2]); if (!Directory.Exists(args[2])) { Console.WriteLine("Creating target folder: {0}", targetfolder); Directory.CreateDirectory(targetfolder); } Console.WriteLine("Listing files on backend: {0} ...", backend.ProtocolKey); var lst = backend.List(); Console.WriteLine("Found {0} files", lst.Count); var i = 0; var downloaded = 0; var errors = 0; var needspass = 0; string passphrase; options.TryGetValue("passphrase", out passphrase); foreach(var file in lst) { try { Console.Write("{0}: {1}", i, file.Name); var p = Duplicati.Library.Main.Volumes.VolumeBase.ParseFilename(file); if (p == null) { Console.WriteLine(" - Not a Duplicati file, ignoring"); continue; } var local = Path.Combine(targetfolder, file.Name); if (p.EncryptionModule != null) { if (string.IsNullOrWhiteSpace(passphrase)) { needspass++; Console.WriteLine(" - No passphrase supplied, skipping"); continue; } local = local.Substring(0, local.Length - p.EncryptionModule.Length - 1); } if (p.FileType != Duplicati.Library.Main.RemoteVolumeType.Blocks && p.FileType != Duplicati.Library.Main.RemoteVolumeType.Files) { Console.WriteLine(" - Filetype {0}, skipping", p.FileType); continue; } if (File.Exists(local)) { Console.WriteLine(" - Already exists, skipping"); continue; } Console.Write(" - downloading ({0})...", Library.Utility.Utility.FormatSizeString(file.Size)); using(var tf = new Library.Utility.TempFile()) { backend.Get(file.Name, tf); if (p.EncryptionModule != null) { Console.Write(" - decrypting ..."); using(var m = Library.DynamicLoader.EncryptionLoader.GetModule(p.EncryptionModule, passphrase, options)) using(var tf2 = new Library.Utility.TempFile()) { m.Decrypt(tf, tf2); File.Copy(tf2, local); } } else File.Copy(tf, local); } Console.WriteLine(" done!"); } catch(Exception ex) { Console.WriteLine(" error: {0}", ex.ToString()); errors++; } i++; } if (needspass > 0 && downloaded == 0) { Console.WriteLine("No files downloaded, try adding --passphrase to decrypt files"); return 100; } Console.WriteLine("Download complete, of {0} remote files, {1} were downloaded with {2} errors", lst.Count, downloaded, errors); if (needspass > 0) Console.WriteLine("Additonally {0} remote files were skipped because of encryption, supply --passphrase to download those"); if (errors > 0) return 200; else return 0; } }
public void Run() { var ext = System.IO.Path.GetExtension(m_targetpath); var module = m_options.CompressionModule; if (ext != module) { m_targetpath = m_targetpath + "." + module; } if (System.IO.File.Exists(m_targetpath)) { throw new Exception(string.Format("Output file already exists, not overwriting: {0}", m_targetpath)); } if (!System.IO.File.Exists(m_options.Dbpath)) { throw new Exception(string.Format("Database file does not exist: {0}", m_options.Dbpath)); } m_result.AddMessage("Scrubbing filenames from database, this may take a while, please wait"); using (var tmp = new Library.Utility.TempFile()) { System.IO.File.Copy(m_options.Dbpath, tmp, true); using (var db = new LocalBugReportDatabase(tmp)) { m_result.SetDatabase(db); db.Fix(); } using (var cm = DynamicLoader.CompressionLoader.GetModule(module, m_targetpath, m_options.RawOptions)) { using (var cs = cm.CreateFile("log-database.sqlite", Duplicati.Library.Interface.CompressionHint.Compressible, DateTime.UtcNow)) using (var fs = System.IO.File.Open(tmp, System.IO.FileMode.Open, System.IO.FileAccess.Read, System.IO.FileShare.ReadWrite)) Library.Utility.Utility.CopyStream(fs, cs); using (var cs = new System.IO.StreamWriter(cm.CreateFile("system-info.txt", Duplicati.Library.Interface.CompressionHint.Compressible, DateTime.UtcNow))) { cs.WriteLine("Duplicati: {0} ({1})", Duplicati.Library.Utility.Utility.getEntryAssembly().FullName, System.Reflection.Assembly.GetExecutingAssembly().FullName); cs.WriteLine("OS: {0}", Environment.OSVersion); cs.WriteLine("Uname: {0}", Duplicati.Library.Utility.Utility.UnameAll); cs.WriteLine("64bit: {0} ({1})", Environment.Is64BitOperatingSystem, Environment.Is64BitProcess); cs.WriteLine("Machinename: {0}", Environment.MachineName); cs.WriteLine("Processors: {0}", Environment.ProcessorCount); cs.WriteLine(".Net Version: {0}", Environment.Version); cs.WriteLine("Mono: {0} ({1}) ({2})", Duplicati.Library.Utility.Utility.IsMono, Duplicati.Library.Utility.Utility.MonoVersion, Duplicati.Library.Utility.Utility.MonoDisplayVersion); cs.WriteLine("Locale: {0}, {1}", System.Threading.Thread.CurrentThread.CurrentCulture, System.Threading.Thread.CurrentThread.CurrentUICulture); Type sqlite = null; string sqliteversion = ""; try { sqlite = Duplicati.Library.SQLiteHelper.SQLiteLoader.SQLiteConnectionType; } catch { } if (sqlite != null) { try { sqliteversion = (string)sqlite.GetProperty("SQLiteVersion").GetValue(null, null); } catch { } cs.WriteLine("SQLite: {0} - {1}", sqliteversion, sqlite.FullName); } } } m_result.TargetPath = m_targetpath; } }
public void Run(IEnumerable <string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { if (string.IsNullOrEmpty(m_options.Restorepath)) { throw new Exception("Cannot restore control files without --restore-path"); } if (!System.IO.Directory.Exists(m_options.Restorepath)) { System.IO.Directory.CreateDirectory(m_options.Restorepath); } using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(System.IO.File.Exists(m_options.Dbpath) ? m_options.Dbpath : (string)tmpdb, "RestoreControlFiles", true)) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(filterstrings), compositefilter); try { var filteredList = ListFilesHandler.ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) { throw new Exception("No filesets found on remote target"); } Exception lastEx = new Exception("No suitable files found on remote target"); foreach (var fileversion in filteredList) { try { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { backend.WaitForComplete(db, null); return; } var file = fileversion.Value.File; var entry = db.GetRemoteVolume(file.Name); var res = new List <string>(); using (var tmpfile = backend.Get(file.Name, entry.Size < 0 ? file.Size : entry.Size, entry.Hash)) using (var tmp = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(file.Name), tmpfile, m_options)) foreach (var cf in tmp.ControlFiles) { if (Library.Utility.FilterExpression.Matches(filter, cf.Key)) { var targetpath = System.IO.Path.Combine(m_options.Restorepath, cf.Key); using (var ts = System.IO.File.Create(targetpath)) Library.Utility.Utility.CopyStream(cf.Value, ts); res.Add(targetpath); } } m_result.SetResult(res); lastEx = null; break; } catch (Exception ex) { lastEx = ex; if (ex is System.Threading.ThreadAbortException) { throw; } } } if (lastEx != null) { throw lastEx; } } finally { backend.WaitForComplete(db, null); } db.WriteResults(); } }
private static void MergeFiles(string file1, string file2, string fileout) { using(var tf = new Library.Utility.TempFile()) { using(var sw = new System.IO.StreamWriter(tf)) using(var sr1 = new System.IO.StreamReader(file1)) using(var sr2 = new System.IO.StreamReader(file2)) { var c1 = sr1.ReadLine(); var c2 = sr2.ReadLine(); while(c1 != null || c2 != null) { var cmp = StringComparer.Ordinal.Compare(c1, c2); if (c2 == null || (c1 != null && cmp < 0)) { sw.WriteLine(c1); c1 = sr1.ReadLine(); } else { if (cmp != 0) sw.WriteLine(c2); c2 = sr2.ReadLine(); } } } File.Copy(tf, fileout, true); } }
public void SetTempFile(Library.Utility.TempFile tmpfile) { if (m_localfile != null) m_localfile.Dispose(); m_localfile = tmpfile; }
public void Run(IEnumerable<string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { var parsedfilter = new Library.Utility.FilterExpression(filterstrings); var simpleList = !(parsedfilter.Type == Library.Utility.FilterType.Simple || m_options.AllVersions); var filter = Library.Utility.JoinedFilterExpression.Join(parsedfilter, compositefilter); //Use a speedy local query if (!m_options.NoLocalDb && System.IO.File.Exists(m_options.Dbpath)) using(var db = new Database.LocalListDatabase(m_options.Dbpath)) { m_result.SetDatabase(db); using(var filesets = db.SelectFileSets(m_options.Time, m_options.Version)) { if (parsedfilter.Type != Library.Utility.FilterType.Empty) { if (simpleList || (m_options.ListFolderContents && !m_options.AllVersions)) filesets.TakeFirst(); } IEnumerable<Database.LocalListDatabase.IFileversion> files; if (m_options.ListFolderContents) files = filesets.SelectFolderContents(filter); else if (m_options.ListPrefixOnly) files = filesets.GetLargestPrefix(filter); else if (parsedfilter.Type == Duplicati.Library.Utility.FilterType.Empty) files = null; else files = filesets.SelectFiles(filter); if (m_options.ListSetsOnly) m_result.SetResult( filesets.QuickSets.Select(x => new ListResultFileset(x.Version, x.Time, x.FileCount, x.FileSizes)).ToArray(), null ); else m_result.SetResult( filesets.Sets.Select(x => new ListResultFileset(x.Version, x.Time, x.FileCount, x.FileSizes)).ToArray(), files == null ? null : (from n in files select (Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Path, n.Sizes.ToArray()))) .ToArray() ); return; } } m_result.AddMessage("No local database, accessing remote store"); //TODO: Add prefix and foldercontents // Otherwise, grab info from remote location using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(tmpdb, "List")) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filteredList = ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) throw new Exception("No filesets found on remote target"); var numberSeq = CreateResultSequence(filteredList); if (parsedfilter.Type == Library.Utility.FilterType.Empty) { m_result.SetResult(numberSeq, null); m_result.EncryptedFiles = filteredList.Any(x => !string.IsNullOrWhiteSpace(x.Value.EncryptionModule)); return; } var firstEntry = filteredList[0].Value; filteredList.RemoveAt(0); Dictionary<string, List<long>> res; if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return; using (var tmpfile = backend.Get(firstEntry.File.Name, firstEntry.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(firstEntry.File.Name), tmpfile, m_options)) if (simpleList) { m_result.SetResult( numberSeq.Take(1), (from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) orderby n.Path select new ListResultFile(n.Path, new long[] { n.Size })) .ToArray() ); return; } else { res = rd.Files .Where(x => Library.Utility.FilterExpression.Matches(filter, x.Path)) .ToDictionary( x => x.Path, y => { var lst = new List<long>(); lst.Add(y.Size); return lst; }, Library.Utility.Utility.ClientFilenameStringComparer ); } long flindex = 1; foreach(var flentry in filteredList) using(var tmpfile = backend.Get(flentry.Value.File.Name, flentry.Value.File == null ? -1 : flentry.Value.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(flentry.Value.CompressionModule, tmpfile, m_options)) { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return; foreach(var p in from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) select n) { List<long> lst; if (!res.TryGetValue(p.Path, out lst)) { lst = new List<long>(); res[p.Path] = lst; for(var i = 0; i < flindex; i++) lst.Add(-1); } lst.Add(p.Size); } foreach(var n in from i in res where i.Value.Count < flindex + 1 select i) n.Value.Add(-1); flindex++; } m_result.SetResult( numberSeq, from n in res orderby n.Key select (Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Key, n.Value)) ); } }
public static int Run(List <string> args, Dictionary <string, string> options, Library.Utility.IFilter filter) { if (args.Count != 2) { Console.WriteLine("Invalid argument count ({0} expected 2): {1}{2}", args.Count, Environment.NewLine, string.Join(Environment.NewLine, args)); return(100); } var folder = Path.GetFullPath(args[1]); if (!Directory.Exists(folder)) { Console.WriteLine("Folder not found: {0}", folder); return(100); } Directory.SetCurrentDirectory(folder); string ixfile; options.TryGetValue("indexfile", out ixfile); if (string.IsNullOrWhiteSpace(ixfile)) { ixfile = "index.txt"; } ixfile = Path.GetFullPath(ixfile); if (!File.Exists(ixfile)) { using (File.Create(ixfile)) { } } else { Console.WriteLine("Sorting existing index file"); SortFile(ixfile, ixfile); } var filecount = Directory.EnumerateFiles(folder).Count(); Console.WriteLine("Processing {0} files", filecount); var i = 0; var errors = 0; var totalblocks = 0L; var files = 0; foreach (var file in Directory.EnumerateFiles(folder)) { Console.Write("{0}: {1}", i, file); try { var p = Duplicati.Library.Main.Volumes.VolumeBase.ParseFilename(file); if (p == null) { Console.WriteLine(" - Not a Duplicati file, ignoring"); continue; } if (p.FileType != Duplicati.Library.Main.RemoteVolumeType.Blocks) { Console.WriteLine(" - Filetype {0}, skipping", p.FileType); continue; } if (!string.IsNullOrWhiteSpace(p.EncryptionModule)) { Console.WriteLine(" - Encrypted, skipping"); continue; } var filekey = Path.GetFileName(file); var blocks = 0; using (var cp = Library.DynamicLoader.CompressionLoader.GetModule(p.CompressionModule, file, options)) using (var tf = new Library.Utility.TempFile()) { using (var sw = new StreamWriter(tf)) foreach (var f in cp.ListFiles(null)) { sw.WriteLine("{0}, {1}", Library.Utility.Utility.Base64UrlToBase64Plain(f), filekey); blocks++; } files++; totalblocks += blocks; Console.Write(" {0} hashes found, sorting ...", blocks); SortFile(tf, tf); Console.WriteLine(" done!"); Console.Write("Merging {0} hashes ...", totalblocks); MergeFiles(ixfile, tf, ixfile); Console.WriteLine(" done!"); } } catch (Exception ex) { Console.WriteLine(" error: {0}", ex.ToString()); errors++; } i++; } Console.WriteLine("Processed {0} files and found {1} hashes", files, totalblocks); if (errors > 0) { Console.WriteLine("Experienced {0} errors", errors); } return(0); }
public static int Run(List<string> args, Dictionary<string, string> options, Library.Utility.IFilter filter) { if (args.Count != 2 && args.Count != 3) { Console.WriteLine("Invalid argument count ({0} expected 2 or 3): {1}{2}", args.Count, Environment.NewLine, string.Join(Environment.NewLine, args)); return 100; } var folder = Path.GetFullPath(args[1]); if (!Directory.Exists(folder)) { Console.WriteLine("Folder not found: {0}", folder); return 100; } Directory.SetCurrentDirectory(folder); string targetpath; options.TryGetValue("targetpath", out targetpath); string ixfile; options.TryGetValue("indexfile", out ixfile); if (string.IsNullOrWhiteSpace(ixfile)) ixfile = "index.txt"; ixfile = Path.GetFullPath(ixfile); if (!File.Exists(ixfile)) { Console.WriteLine("Index file not found, perhaps you need to run the index command?"); return 100; } Console.Write("Sorting index file ..."); Index.SortFile(ixfile, ixfile); Console.WriteLine(" done!"); string filelist; if (args.Count == 2) { var time = List.ParseListFiles(folder).First(); filelist = time.Value; Console.WriteLine("Using set 0 with timestamp {0}", time.Key.ToLocalTime()); } else { filelist = List.SelectListFile(args[2], folder); } Library.Main.Volumes.VolumeReaderBase.UpdateOptionsFromManifest(Path.GetExtension(filelist).Trim('.'), filelist, new Duplicati.Library.Main.Options(options)); string blocksize_str; options.TryGetValue("blocksize", out blocksize_str); string blockhash_str; options.TryGetValue("block-hash-algorithm", out blockhash_str); string filehash_str; options.TryGetValue("block-hash-algorithm", out filehash_str); long blocksize = string.IsNullOrWhiteSpace(blocksize_str) ? 0 : Library.Utility.Sizeparser.ParseSize(blocksize_str); if (blocksize <= 0) { Console.WriteLine("Invalid blocksize: {0}, try setting --blocksize manually"); return 100; } var blockhasher = string.IsNullOrWhiteSpace(blockhash_str) ? null : System.Security.Cryptography.HashAlgorithm.Create(blockhash_str); var filehasher = string.IsNullOrWhiteSpace(filehash_str) ? null : System.Security.Cryptography.HashAlgorithm.Create(filehash_str); if (blockhasher == null) throw new Exception(string.Format("Block hash algorithm not valid: {0}", blockhash_str)); if (filehasher == null) throw new Exception(string.Format("File hash algorithm not valid: {0}", filehash_str)); var hashesprblock = blocksize / (blockhasher.HashSize / 8); using(var mru = new CompressedFileMRUCache(options)) { Console.WriteLine("Building lookup table for file hashes"); var lookup = new HashLookupHelper(ixfile, mru, (int)blocksize, blockhasher.HashSize / 8); var filecount = 0L; string largestprefix = null; string[] largestprefixparts = null; if (!string.IsNullOrWhiteSpace(targetpath)) Console.WriteLine("Computing restore path"); foreach(var f in List.EnumerateFilesInDList(filelist, filter, options)) { if (largestprefix == null) { largestprefix = f.Path; largestprefixparts = largestprefix.Split(new char[] { Path.DirectorySeparatorChar }); } else if (largestprefix.Length > 1) { var parts = f.Path.Split(new char[] { Path.DirectorySeparatorChar }); var ni = 0; for(; ni < Math.Min(parts.Length, largestprefixparts.Length); ni++) if (!Library.Utility.Utility.ClientFilenameStringComparer.Equals(parts[ni], largestprefixparts[ni])) break; if (ni != largestprefixparts.Length) { if (ni == 0) { largestprefixparts = new string[0]; largestprefix = string.Empty; } else { Array.Resize(ref largestprefixparts, ni - 1); largestprefix = string.Join(Path.DirectorySeparatorChar.ToString(), largestprefixparts); } } } filecount++; } Console.WriteLine("Restoring {0} files to {1}", filecount, string.IsNullOrWhiteSpace(targetpath) ? "original position" : targetpath); if (Library.Utility.Utility.IsClientLinux || largestprefix.Length > 0) largestprefix = Library.Utility.Utility.AppendDirSeparator(largestprefix); if (!string.IsNullOrEmpty(largestprefix)) Console.WriteLine("Removing common prefix {0} from files", largestprefix); var i = 0L; var errors = 0L; foreach(var f in List.EnumerateFilesInDList(filelist, filter, options)) { try { var targetfile = MapToRestorePath(f.Path, largestprefix, targetpath); if (!Directory.Exists(Path.GetDirectoryName(targetfile))) Directory.CreateDirectory(Path.GetDirectoryName(targetfile)); Console.Write("{0}: {1} ({2})", i, targetfile, Library.Utility.Utility.FormatSizeString(f.Size)); using(var tf = new Library.Utility.TempFile()) { using(var sw = File.OpenWrite(tf)) { if (f.BlocklistHashes == null) { lookup.WriteHash(sw, f.Hash); } else { var blhi = 0L; foreach(var blh in f.BlocklistHashes) { Console.Write(" {0}", blhi); var blockhashoffset = blhi * hashesprblock * blocksize; try { var bi = 0; foreach(var h in lookup.ReadBlocklistHashes(blh)) { try { sw.Position = blockhashoffset + (bi * blocksize); lookup.WriteHash(sw, h); } catch(Exception ex) { Console.WriteLine("Failed to read hash: {0}{1}{2}", h, Environment.NewLine, ex.ToString()); } bi++; } } catch (Exception ex) { Console.WriteLine("Failed to read Blocklist hash: {0}{1}{2}", blh, Environment.NewLine, ex.ToString()); } blhi++; } } } string fh; using(var fs = File.OpenRead(tf)) fh = Convert.ToBase64String(filehasher.ComputeHash(fs)); if (fh == f.Hash) { Console.WriteLine(" done!"); File.Copy(tf, targetfile, true); } else { Console.Write(" - Restored file hash mismatch"); if (File.Exists(targetfile)) Console.WriteLine(" - not overwriting existing file: {0}", targetfile); else Console.WriteLine(" - restoring file in damaged condition"); } } } catch (Exception ex) { Console.WriteLine(" error: {0}", ex.ToString()); errors++; } i++; } } return 0; }
public static int Run(List<string> args, Dictionary<string, string> options, Library.Utility.IFilter filter) { if (args.Count != 4) { Console.WriteLine("Invalid argument count ({0} expected 4): {1}{2}", args.Count, Environment.NewLine, string.Join(Environment.NewLine, args)); return 100; } string target_compr_module = args[1]; if (!Library.DynamicLoader.CompressionLoader.Keys.Contains(target_compr_module)) { Console.WriteLine("Target compression module not found: {0}{1}Modules supported: {2}", args[1], Environment.NewLine, string.Join(", ", Library.DynamicLoader.CompressionLoader.Keys)); return 100; } var m_Options = new Options(options); using (var backend = Library.DynamicLoader.BackendLoader.GetBackend(args[2], options)) { if (backend == null) { Console.WriteLine("Backend not found: {0}{1}Backends supported: {2}", args[2], Environment.NewLine, string.Join(", ", Library.DynamicLoader.BackendLoader.Keys)); return 100; } var targetfolder = Path.GetFullPath(args[3]); if (!Directory.Exists(args[3])) { Console.WriteLine("Creating target folder: {0}", targetfolder); Directory.CreateDirectory(targetfolder); } Console.WriteLine("Listing files on backend: {0} ...", backend.ProtocolKey); var rawlist = backend.List(); Console.WriteLine("Found {0} files", rawlist.Count); var i = 0; var downloaded = 0; var errors = 0; var needspass = 0; var remotefiles = (from x in rawlist let n = VolumeBase.ParseFilename(x) where n != null && n.Prefix == m_Options.Prefix select n).ToArray(); //ToArray() ensures that we do not remote-request it multiple times if (remotefiles.Length == 0) { if (rawlist.Count == 0) Console.WriteLine("No files were found at the remote location, perhaps the target url is incorrect?"); else { var tmp = (from x in rawlist let n = VolumeBase.ParseFilename(x) where n != null select n.Prefix).ToArray(); var types = tmp.Distinct().ToArray(); if (tmp.Length == 0) Console.WriteLine("Found {0} files at the remote storage, but none that could be parsed", rawlist.Count); else if (types.Length == 1) Console.WriteLine("Found {0} parse-able files with the prefix {1}, did you forget to set the backup-prefix?", tmp.Length, types[0]); else Console.WriteLine("Found {0} parse-able files (of {1} files) with different prefixes: {2}, did you forget to set the backup-prefix?", tmp.Length, rawlist.Count, string.Join(", ", types)); } return 100; } bool reencrypt = Library.Utility.Utility.ParseBoolOption(options, "reencrypt"); bool reupload = Library.Utility.Utility.ParseBoolOption(options, "reupload"); foreach (var entry in remotefiles) { try { Console.Write("{0}: {1}", i, entry.File.Name); var local = Path.Combine(targetfolder, entry.File.Name); if (entry.EncryptionModule != null) { if (string.IsNullOrWhiteSpace(m_Options.Passphrase)) { needspass++; Console.WriteLine(" - No passphrase supplied, skipping"); continue; } local = local.Substring(0, local.Length - entry.EncryptionModule.Length - 1); } if (entry.CompressionModule == target_compr_module) { Console.WriteLine(" - compression types are same"); continue; } string localNew; if (entry.CompressionModule != null) { localNew = local.Substring(0, local.Length - entry.CompressionModule.Length - 1) + "." + target_compr_module; if (File.Exists(localNew)) { Console.WriteLine(" - target file already exist"); continue; } } else { Console.WriteLine(" - cannot detect compression type"); continue; } if (File.Exists(local)) File.Delete(local); Console.Write(" - downloading ({0})...", Library.Utility.Utility.FormatSizeString(entry.File.Size)); DateTime originLastWriteTime; FileInfo destinationFileInfo; using (var tf = new Library.Utility.TempFile()) { backend.Get(entry.File.Name, tf); originLastWriteTime = new FileInfo(tf).LastWriteTime; downloaded++; if (entry.EncryptionModule != null) { Console.Write(" decrypting ..."); using (var m = Library.DynamicLoader.EncryptionLoader.GetModule(entry.EncryptionModule, m_Options.Passphrase, options)) using (var tf2 = new Library.Utility.TempFile()) { m.Decrypt(tf, tf2); File.Copy(tf2, local); File.Delete(tf2); } } else File.Copy(tf, local); File.Delete(tf); destinationFileInfo = new FileInfo(local); destinationFileInfo.LastWriteTime = originLastWriteTime; } if (entry.CompressionModule != null) { Console.Write(" recompressing ..."); using (var cmOld = Library.DynamicLoader.CompressionLoader.GetModule(entry.CompressionModule, local, options)) using (var cmNew = Library.DynamicLoader.CompressionLoader.GetModule(target_compr_module, localNew, options)) foreach (var cmfile in cmOld.ListFiles("")) { string cmfileNew = cmfile; if (entry.FileType == RemoteVolumeType.Index) { var cmFileVolume = Library.Main.Volumes.VolumeBase.ParseFilename(cmfileNew); if (cmFileVolume != null) { cmfileNew = cmfileNew.Replace("." + cmFileVolume.CompressionModule, "." + target_compr_module); if(!reencrypt) cmfileNew = cmfileNew.Replace("." + cmFileVolume.EncryptionModule, ""); } } using (var sourceStream = cmOld.OpenRead(cmfile)) using (var cs = cmNew.CreateFile(cmfileNew, Duplicati.Library.Interface.CompressionHint.Compressible, cmOld.GetLastWriteTime(cmfile))) Library.Utility.Utility.CopyStream(sourceStream, cs); } File.Delete(local); destinationFileInfo = new FileInfo(localNew); destinationFileInfo.LastWriteTime = originLastWriteTime; } if (reencrypt && entry.EncryptionModule != null) { Console.Write(" reencrypting ..."); using (var m = Library.DynamicLoader.EncryptionLoader.GetModule(entry.EncryptionModule, m_Options.Passphrase, options)) { m.Encrypt(localNew, localNew + "." + m.FilenameExtension); File.Delete(localNew); localNew = localNew + "." + m.FilenameExtension; } destinationFileInfo = new FileInfo(localNew); destinationFileInfo.LastWriteTime = originLastWriteTime; } if (reupload) { backend.Put((new FileInfo(localNew)).Name, localNew); backend.Delete(entry.File.Name); File.Delete(localNew); } Console.WriteLine(" done!"); } catch (Exception ex) { Console.WriteLine(" error: {0}", ex.ToString()); errors++; } i++; } if (reupload) { var remoteverificationfileexist = rawlist.Any(x => x.Name == (m_Options.Prefix + "-verification.json")); if (remoteverificationfileexist) { Console.WriteLine("Found verification file {0} - deleting", m_Options.Prefix + "-verification.json"); backend.Delete(m_Options.Prefix + "-verification.json"); } } if (needspass > 0 && downloaded == 0) { Console.WriteLine("No files downloaded, try adding --passphrase to decrypt files"); return 100; } Console.WriteLine("Download complete, of {0} remote files, {1} were downloaded with {2} errors", remotefiles.Count(), downloaded, errors); if (needspass > 0) Console.WriteLine("Additonally {0} remote files were skipped because of encryption, supply --passphrase to download those"); if (errors > 0) { Console.WriteLine("There were errors during recompress of remote backend files!"); return 200; } return 0; } }
public static UpdateInfo CheckForUpdate(ReleaseType channel = ReleaseType.Unknown) { if (channel == ReleaseType.Unknown) channel = AutoUpdateSettings.DefaultUpdateChannel; foreach(var rawurl in MANIFEST_URLS) { var url = rawurl; // Attempt to match the url to change the channel if possible // This allows overrides to the URLs for deployment of custom builds, // but does not require that they adopt the channel system var match = AutoUpdateSettings.MATCH_AUTOUPDATE_URL.Match(url); if (match.Success) { var mg = match.Groups[AutoUpdateSettings.MATCH_UPDATE_URL_CHANNEL_GROUP]; // Replace the channel name with the chosen channel url = url.Substring(0, mg.Index) + channel.ToString().ToLowerInvariant() + url.Substring(mg.Index + mg.Length); } try { using(var tmpfile = new Library.Utility.TempFile()) { System.Net.WebClient wc = new System.Net.WebClient(); wc.Headers.Add(System.Net.HttpRequestHeader.UserAgent, string.Format("{0} v{1}{2}", APPNAME, SelfVersion.Version, string.IsNullOrWhiteSpace(InstallID) ? "" : " -" + InstallID)); wc.Headers.Add("X-Install-ID", InstallID); wc.DownloadFile(url, tmpfile); using(var fs = System.IO.File.OpenRead(tmpfile)) using(var ss = new SignatureReadingStream(fs, SIGN_KEY)) using(var tr = new System.IO.StreamReader(ss)) using(var jr = new Newtonsoft.Json.JsonTextReader(tr)) { var update = new Newtonsoft.Json.JsonSerializer().Deserialize<UpdateInfo>(jr); if (TryParseVersion(update.Version) <= TryParseVersion(SelfVersion.Version)) return null; // Don't install a debug update on a release build and vice versa if (string.Equals(SelfVersion.ReleaseType, "Debug", StringComparison.InvariantCultureIgnoreCase) && !string.Equals(update.ReleaseType, SelfVersion.ReleaseType, StringComparison.CurrentCultureIgnoreCase)) return null; ReleaseType rt; if (!Enum.TryParse<ReleaseType>(update.ReleaseType, true, out rt)) rt = ReleaseType.Unknown; // If the update is too low to be considered, skip it // Should never happen, but protects against mistakes in deployment if (rt > channel) return null; LastUpdateCheckVersion = update; return update; } } } catch (Exception ex) { if (OnError != null) OnError(ex); } } return null; }
public static UpdateInfo CheckForUpdate() { foreach(var url in MANIFEST_URLS) { try { using(var tmpfile = new Library.Utility.TempFile()) { System.Net.WebClient wc = new System.Net.WebClient(); wc.Headers.Add(System.Net.HttpRequestHeader.UserAgent, string.Format("{0} v{1}{2}", APPNAME, SelfVersion.Version, string.IsNullOrWhiteSpace(InstallID) ? "" : " -" + InstallID)); wc.Headers.Add("X-Install-ID", InstallID); wc.DownloadFile(url, tmpfile); using(var fs = System.IO.File.OpenRead(tmpfile)) using(var ss = new SignatureReadingStream(fs, SIGN_KEY)) using(var tr = new System.IO.StreamReader(ss)) using(var jr = new Newtonsoft.Json.JsonTextReader(tr)) { var update = new Newtonsoft.Json.JsonSerializer().Deserialize<UpdateInfo>(jr); if (TryParseVersion(update.Version) <= TryParseVersion(SelfVersion.Version)) return null; if (string.Equals(SelfVersion.ReleaseType, "Debug", StringComparison.InvariantCultureIgnoreCase) && !string.Equals(update.ReleaseType, SelfVersion.ReleaseType, StringComparison.CurrentCultureIgnoreCase)) return null; LastUpdateCheckVersion = update; return update; } } } catch (Exception ex) { if (OnError != null) OnError(ex); } } return null; }
/// <summary> /// Uploads the verification file. /// </summary> /// <param name="backendurl">The backend url</param> /// <param name="options">The options to use</param> /// <param name="result">The result writer</param> /// <param name="db">The attached database</param> /// <param name="transaction">An optional transaction object</param> public static void UploadVerificationFile(string backendurl, Options options, IBackendWriter result, LocalDatabase db, System.Data.IDbTransaction transaction) { using(var backend = new BackendManager(backendurl, options, result, db)) using(var tempfile = new Library.Utility.TempFile()) { var remotename = options.Prefix + "-verification.json"; using(var stream = new System.IO.StreamWriter(tempfile, false, System.Text.Encoding.UTF8)) FilelistProcessor.CreateVerificationFile(db, stream); if (options.Dryrun) { result.AddDryrunMessage(string.Format("Would upload verification file: {0}, size: {1}", remotename, Library.Utility.Utility.FormatSizeString(new System.IO.FileInfo(tempfile).Length))); } else { backend.PutUnencrypted(remotename, tempfile); backend.WaitForComplete(db, transaction); } } }
public static bool DownloadAndUnpackUpdate(UpdateInfo version, Action<double> progress = null) { if (INSTALLDIR == null) return false; var updates = version.RemoteURLS.ToList(); // If alternate update URLs are specified, // we look for packages there as well if (AutoUpdateSettings.UsesAlternateURLs) { var packagepath = new Library.Utility.Uri(updates[0]).Path; var packagename = packagepath.Split('/').Last(); foreach(var alt_url in AutoUpdateSettings.URLs.Reverse()) { var alt_uri = new Library.Utility.Uri(alt_url); var path_components = alt_uri.Path.Split('/'); var path = string.Join("/", path_components.Take(path_components.Count() - 1).Union(new string[] { packagename})); var new_path = alt_uri.SetPath(path); updates.Insert(0, new_path.ToString()); } } using(var tempfile = new Library.Utility.TempFile()) { foreach(var url in updates) { try { Action<long> cb = null; if (progress != null) cb = (s) => { progress(Math.Min(1.0, Math.Max(0.0, (double)s / version.CompressedSize))); }; var wreq = (System.Net.HttpWebRequest)System.Net.WebRequest.Create(url); wreq.UserAgent = string.Format("{0} v{1}", APPNAME, SelfVersion.Version); wreq.Headers.Add("X-Install-ID", InstallID); using(var resp = wreq.GetResponse()) using(var rss = resp.GetResponseStream()) using(var pgs = new Duplicati.Library.Utility.ProgressReportingStream(rss, version.CompressedSize, cb)) using(var fs = System.IO.File.Open(tempfile, System.IO.FileMode.Create)) Duplicati.Library.Utility.Utility.CopyStream(pgs, fs); var sha256 = System.Security.Cryptography.SHA256.Create(); var md5 = System.Security.Cryptography.MD5.Create(); using(var s = System.IO.File.OpenRead(tempfile)) { if (s.Length != version.CompressedSize) throw new Exception(string.Format("Invalid file size {0}, expected {1} for {2}", s.Length, version.CompressedSize, url)); var sha256hash = Convert.ToBase64String(sha256.ComputeHash(s)); if (sha256hash != version.SHA256) throw new Exception(string.Format("Damaged or corrupted file, sha256 mismatch for {0}", url)); } using(var s = System.IO.File.OpenRead(tempfile)) { var md5hash = Convert.ToBase64String(md5.ComputeHash(s)); if (md5hash != version.MD5) throw new Exception(string.Format("Damaged or corrupted file, md5 mismatch for {0}", url)); } using(var tempfolder = new Duplicati.Library.Utility.TempFolder()) using(var zip = new Duplicati.Library.Compression.FileArchiveZip(tempfile, new Dictionary<string, string>())) { foreach(var file in zip.ListFilesWithSize("")) { if (System.IO.Path.IsPathRooted(file.Key) || file.Key.Trim().StartsWith("..", StringComparison.InvariantCultureIgnoreCase)) throw new Exception(string.Format("Out-of-place file path detected: {0}", file.Key)); var targetpath = System.IO.Path.Combine(tempfolder, file.Key); var targetfolder = System.IO.Path.GetDirectoryName(targetpath); if (!System.IO.Directory.Exists(targetfolder)) System.IO.Directory.CreateDirectory(targetfolder); using(var zs = zip.OpenRead(file.Key)) using(var fs = System.IO.File.Create(targetpath)) zs.CopyTo(fs); } if (VerifyUnpackedFolder(tempfolder, version)) { var versionstring = TryParseVersion(version.Version).ToString(); var targetfolder = System.IO.Path.Combine(INSTALLDIR, versionstring); if (System.IO.Directory.Exists(targetfolder)) System.IO.Directory.Delete(targetfolder, true); System.IO.Directory.CreateDirectory(targetfolder); var tempfolderpath = Duplicati.Library.Utility.Utility.AppendDirSeparator(tempfolder); var tempfolderlength = tempfolderpath.Length; // Would be nice, but does not work :( //System.IO.Directory.Move(tempfolder, targetfolder); foreach(var e in Duplicati.Library.Utility.Utility.EnumerateFileSystemEntries(tempfolder)) { var relpath = e.Substring(tempfolderlength); if (string.IsNullOrWhiteSpace(relpath)) continue; var fullpath = System.IO.Path.Combine(targetfolder, relpath); if (relpath.EndsWith(System.IO.Path.DirectorySeparatorChar.ToString())) System.IO.Directory.CreateDirectory(fullpath); else System.IO.File.Copy(e, fullpath); } // Verification will kick in when we list the installed updates //VerifyUnpackedFolder(targetfolder, version); System.IO.File.WriteAllText(System.IO.Path.Combine(INSTALLDIR, CURRENT_FILE), versionstring); m_hasUpdateInstalled = null; var obsolete = (from n in FindInstalledVersions() where n.Value.Version != version.Version && n.Value.Version != SelfVersion.Version let x = TryParseVersion(n.Value.Version) orderby x descending select n).Skip(1).ToArray(); foreach(var f in obsolete) try { System.IO.Directory.Delete(f.Key, true); } catch { } return true; } else { throw new Exception(string.Format("Unable to verify unpacked folder for url: {0}", url)); } } } catch(Exception ex) { if (OnError != null) OnError(ex); } } } return false; }
public void Run(IEnumerable<string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { if (string.IsNullOrEmpty(m_options.Restorepath)) throw new Exception("Cannot restore control files without --restore-path"); if (!System.IO.Directory.Exists(m_options.Restorepath)) System.IO.Directory.CreateDirectory(m_options.Restorepath); using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(System.IO.File.Exists(m_options.Dbpath) ? m_options.Dbpath : (string)tmpdb, "RestoreControlFiles")) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(filterstrings), compositefilter); try { var filteredList = ListFilesHandler.ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) throw new Exception("No filesets found on remote target"); Exception lastEx = new Exception("No suitable files found on remote target"); foreach(var fileversion in filteredList) try { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { backend.WaitForComplete(db, null); return; } var file = fileversion.Value.File; long size; string hash; RemoteVolumeType type; RemoteVolumeState state; if (!db.GetRemoteVolume(file.Name, out hash, out size, out type, out state)) size = file.Size; var res = new List<string>(); using (var tmpfile = backend.Get(file.Name, size, hash)) using (var tmp = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(file.Name), tmpfile, m_options)) foreach (var cf in tmp.ControlFiles) if (Library.Utility.FilterExpression.Matches(filter, cf.Key)) { var targetpath = System.IO.Path.Combine(m_options.Restorepath, cf.Key); using (var ts = System.IO.File.Create(targetpath)) Library.Utility.Utility.CopyStream(cf.Value, ts); res.Add(targetpath); } m_result.SetResult(res); lastEx = null; break; } catch(Exception ex) { lastEx = ex; if (ex is System.Threading.ThreadAbortException) throw; } if (lastEx != null) throw lastEx; } finally { backend.WaitForComplete(db, null); } db.WriteResults(); } }
public void Run(IEnumerable <string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { var parsedfilter = new Library.Utility.FilterExpression(filterstrings); var filter = Library.Utility.JoinedFilterExpression.Join(parsedfilter, compositefilter); var simpleList = !((filter is FilterExpression expression && expression.Type == Library.Utility.FilterType.Simple) || m_options.AllVersions); //Use a speedy local query if (!m_options.NoLocalDb && System.IO.File.Exists(m_options.Dbpath)) { using (var db = new Database.LocalListDatabase(m_options.Dbpath)) { m_result.SetDatabase(db); using (var filesets = db.SelectFileSets(m_options.Time, m_options.Version)) { if (!filter.Empty) { if (simpleList || (m_options.ListFolderContents && !m_options.AllVersions)) { filesets.TakeFirst(); } } IEnumerable <Database.LocalListDatabase.IFileversion> files; if (m_options.ListFolderContents) { files = filesets.SelectFolderContents(filter); } else if (m_options.ListPrefixOnly) { files = filesets.GetLargestPrefix(filter); } else if (filter.Empty) { files = null; } else { files = filesets.SelectFiles(filter); } if (m_options.ListSetsOnly) { m_result.SetResult( filesets.QuickSets.Select(x => new ListResultFileset(x.Version, x.IsFullBackup, x.Time, x.FileCount, x.FileSizes)).ToArray(), null ); } else { m_result.SetResult( filesets.Sets.Select(x => new ListResultFileset(x.Version, x.IsFullBackup, x.Time, x.FileCount, x.FileSizes)).ToArray(), files == null ? null : (from n in files select(Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Path, n.Sizes.ToArray()))) .ToArray() ); } return; } } } Logging.Log.WriteInformationMessage(LOGTAG, "NoLocalDatabase", "No local database, accessing remote store"); //TODO: Add prefix and foldercontents if (m_options.ListFolderContents) { throw new UserInformationException("Listing folder contents is not supported without a local database, consider using the \"repair\" option to rebuild the database.", "FolderContentListingRequiresLocalDatabase"); } else if (m_options.ListPrefixOnly) { throw new UserInformationException("Listing prefixes is not supported without a local database, consider using the \"repair\" option to rebuild the database.", "PrefixListingRequiresLocalDatabase"); } // Otherwise, grab info from remote location using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(tmpdb, "List", true)) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filteredList = ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) { throw new UserInformationException("No filesets found on remote target", "EmptyRemoteFolder"); } var numberSeq = CreateResultSequence(filteredList); if (filter.Empty) { m_result.SetResult(numberSeq, null); m_result.EncryptedFiles = filteredList.Any(x => !string.IsNullOrWhiteSpace(x.Value.EncryptionModule)); return; } var firstEntry = filteredList[0].Value; filteredList.RemoveAt(0); Dictionary <string, List <long> > res; if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { return; } using (var tmpfile = backend.Get(firstEntry.File.Name, firstEntry.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(firstEntry.File.Name), tmpfile, m_options)) if (simpleList) { m_result.SetResult( numberSeq.Take(1), (from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) orderby n.Path select new ListResultFile(n.Path, new long[] { n.Size })) .ToArray() ); return; } else { res = rd.Files .Where(x => Library.Utility.FilterExpression.Matches(filter, x.Path)) .ToDictionary( x => x.Path, y => { var lst = new List <long>(); lst.Add(y.Size); return(lst); }, Library.Utility.Utility.ClientFilenameStringComparer ); } long flindex = 1; foreach (var flentry in filteredList) { using (var tmpfile = backend.Get(flentry.Value.File.Name, flentry.Value.File == null ? -1 : flentry.Value.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(flentry.Value.CompressionModule, tmpfile, m_options)) { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { return; } foreach (var p in from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) select n) { List <long> lst; if (!res.TryGetValue(p.Path, out lst)) { lst = new List <long>(); res[p.Path] = lst; for (var i = 0; i < flindex; i++) { lst.Add(-1); } } lst.Add(p.Size); } foreach (var n in from i in res where i.Value.Count < flindex + 1 select i) { n.Value.Add(-1); } flindex++; } } m_result.SetResult( numberSeq, from n in res orderby n.Key select(Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Key, n.Value)) ); } }