private void UploadFile(string uri, RequestInfo info) { var data = info.Request.QueryString["data"].Value; var remotename = info.Request.QueryString["filename"].Value; using(var ms = new System.IO.MemoryStream()) using(var b = Duplicati.Library.DynamicLoader.BackendLoader.GetBackend(uri, new Dictionary<string, string>())) { using(var tf = new Duplicati.Library.Utility.TempFile()) { System.IO.File.WriteAllText(tf, data); b.Put(remotename, tf); } } info.OutputOK(); }
private void UploadFile(string uri, RequestInfo info) { var data = info.Request.QueryString["data"].Value; var remotename = info.Request.QueryString["filename"].Value; using (var ms = new System.IO.MemoryStream()) using (var b = Duplicati.Library.DynamicLoader.BackendLoader.GetBackend(uri, new Dictionary <string, string>())) { using (var tf = new Duplicati.Library.Utility.TempFile()) { System.IO.File.WriteAllText(tf, data); b.Put(remotename, tf); } } info.OutputOK(); }
/// <summary> /// Run the recreate procedure /// </summary> /// <param name="path">Path to the database that will be created</param> /// <param name="filelistfilter">A filter that can be used to disregard certain remote files, intended to be used to select a certain filelist</param> /// <param name="filter">Filters the files in a filelist to prevent downloading unwanted data</param> /// <param name="blockprocessor">A callback hook that can be used to work with downloaded block volumes, intended to be use to recover data blocks while processing blocklists</param> public void Run(string path, Library.Utility.IFilter filter = null, NumberedFilterFilelistDelegate filelistfilter = null, BlockVolumePostProcessor blockprocessor = null) { if (System.IO.File.Exists(path)) throw new Exception(string.Format("Cannot recreate database because file already exists: {0}", path)); using(var tf = new Duplicati.Library.Utility.TempFile()) { using(var db = new LocalDatabase(tf, "Recreate")) { m_result.SetDatabase(db); DoRun(db, false, filter, filelistfilter, blockprocessor); db.WriteResults(); } System.IO.File.Move(tf, path); } }
public virtual long SizeOverhead(long filesize) { using (Utility.TempFile t1 = new Duplicati.Library.Utility.TempFile()) using (Utility.TempFile t2 = new Duplicati.Library.Utility.TempFile()) { using (System.IO.Stream s1 = System.IO.File.Create(t1)) { long bytesleft = filesize; byte[] buf = new byte[1024]; Random rnd = new Random(); while (bytesleft > 0) { rnd.NextBytes(buf); s1.Write(buf, 0, (int)Math.Min(buf.Length, bytesleft)); bytesleft -= buf.Length; } } Encrypt(t1, t2); return(Math.Max(0, new System.IO.FileInfo(t2).Length - filesize)); } }
public virtual long SizeOverhead(long filesize) { using (Utility.TempFile t1 = new Duplicati.Library.Utility.TempFile()) using (Utility.TempFile t2 = new Duplicati.Library.Utility.TempFile()) { using (System.IO.Stream s1 = System.IO.File.Create(t1)) { long bytesleft = filesize; byte[] buf = new byte[1024]; Random rnd = new Random(); while (bytesleft > 0) { rnd.NextBytes(buf); s1.Write(buf, 0, (int)Math.Min(buf.Length, bytesleft)); bytesleft -= buf.Length; } } Encrypt(t1, t2); return Math.Max(0, new System.IO.FileInfo(t2).Length - filesize); } }
public static int Main(string[] _args) { var args = new List <string>(_args); var opts = Duplicati.Library.Utility.CommandLineParser.ExtractOptions(args); string inputfolder; string outputfolder; string keyfile; string manifestfile; string keyfilepassword; string gpgkeyfile; string gpgpath; opts.TryGetValue("input", out inputfolder); opts.TryGetValue("output", out outputfolder); opts.TryGetValue("keyfile", out keyfile); opts.TryGetValue("manifest", out manifestfile); opts.TryGetValue("keyfile-password", out keyfilepassword); opts.TryGetValue("gpgkeyfile", out gpgkeyfile); opts.TryGetValue("gpgpath", out gpgpath); var usedoptions = new string[] { "input", "output", "keyfile", "manifest", "keyfile-password", "gpgkeyfile", "gpgpath" }; if (string.IsNullOrWhiteSpace(inputfolder)) { Console.WriteLine("Missing input folder"); return(4); } if (string.IsNullOrWhiteSpace(outputfolder)) { Console.WriteLine("Missing output folder"); return(4); } if (string.IsNullOrWhiteSpace(keyfile)) { Console.WriteLine("Missing keyfile"); return(4); } if (!System.IO.Directory.Exists(inputfolder)) { Console.WriteLine("Input folder not found"); return(4); } if (string.IsNullOrWhiteSpace(keyfilepassword)) { Console.WriteLine("Enter keyfile passphrase: "); keyfilepassword = Console.ReadLine().Trim(); } if (!System.IO.File.Exists(keyfile)) { Console.WriteLine("Keyfile not found, creating new"); var newkey = System.Security.Cryptography.RSACryptoServiceProvider.Create().ToXmlString(true); using (var enc = new Duplicati.Library.Encryption.AESEncryption(keyfilepassword, new Dictionary <string, string>())) using (var fs = System.IO.File.OpenWrite(keyfile)) using (var ms = new System.IO.MemoryStream(System.Text.Encoding.UTF8.GetBytes(newkey))) enc.Encrypt(ms, fs); } if (!System.IO.Directory.Exists(outputfolder)) { System.IO.Directory.CreateDirectory(outputfolder); } var privkey = (System.Security.Cryptography.RSACryptoServiceProvider)System.Security.Cryptography.RSACryptoServiceProvider.Create(); using (var enc = new Duplicati.Library.Encryption.AESEncryption(keyfilepassword, new Dictionary <string, string>())) using (var ms = new System.IO.MemoryStream()) using (var fs = System.IO.File.OpenRead(keyfile)) { enc.Decrypt(fs, ms); ms.Position = 0; using (var sr = new System.IO.StreamReader(ms)) privkey.FromXmlString(sr.ReadToEnd()); } if (Duplicati.Library.AutoUpdater.AutoUpdateSettings.SignKey == null || privkey.ToXmlString(false) != Duplicati.Library.AutoUpdater.AutoUpdateSettings.SignKey.ToXmlString(false)) { Console.WriteLine("The public key in the project is not the same as the public key from the file"); Console.WriteLine("Try setting the key to: "); Console.WriteLine(privkey.ToXmlString(false)); return(5); } string gpgkeyid = null; string gpgkeypassphrase = null; if (string.IsNullOrWhiteSpace(gpgkeyfile)) { Console.WriteLine("No gpgfile, skipping GPG signature files"); } else if (!System.IO.File.Exists(gpgkeyfile)) { Console.WriteLine("Missing gpgfile"); return(6); } else { using (var enc = new Duplicati.Library.Encryption.AESEncryption(keyfilepassword, new Dictionary <string, string>())) using (var ms = new System.IO.MemoryStream()) using (var fs = System.IO.File.OpenRead(gpgkeyfile)) { enc.Decrypt(fs, ms); ms.Position = 0; // No real format, just two lines using (var sr = new System.IO.StreamReader(ms)) { var lines = sr.ReadToEnd().Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries); gpgkeyid = lines[0]; gpgkeypassphrase = lines[1]; } } } Duplicati.Library.AutoUpdater.UpdateInfo updateInfo; using (var fs = System.IO.File.OpenRead(manifestfile)) using (var sr = new System.IO.StreamReader(fs)) using (var jr = new Newtonsoft.Json.JsonTextReader(sr)) updateInfo = new Newtonsoft.Json.JsonSerializer().Deserialize <Duplicati.Library.AutoUpdater.UpdateInfo>(jr); var isopts = new Dictionary <string, string>(opts, StringComparer.InvariantCultureIgnoreCase); foreach (var usedopt in usedoptions) { isopts.Remove(usedopt); } foreach (var k in updateInfo.GetType().GetFields()) { if (isopts.ContainsKey(k.Name)) { try { //Console.WriteLine("Setting {0} to {1}", k.Name, isopts[k.Name]); if (k.FieldType == typeof(string[])) { k.SetValue(updateInfo, isopts[k.Name].Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries)); } else if (k.FieldType == typeof(Version)) { k.SetValue(updateInfo, new Version(isopts[k.Name])); } else if (k.FieldType == typeof(int)) { k.SetValue(updateInfo, int.Parse(isopts[k.Name])); } else if (k.FieldType == typeof(long)) { k.SetValue(updateInfo, long.Parse(isopts[k.Name])); } else { k.SetValue(updateInfo, isopts[k.Name]); } } catch (Exception ex) { Console.WriteLine("Failed setting {0} to {1}: {2}", k.Name, isopts[k.Name], ex.Message); } isopts.Remove(k.Name); } } foreach (var opt in isopts) { Console.WriteLine("Warning! unused option: {0} = {1}", opt.Key, opt.Value); } using (var tf = new Duplicati.Library.Utility.TempFile()) { using (var fs = System.IO.File.OpenWrite(tf)) using (var tw = new System.IO.StreamWriter(fs)) new Newtonsoft.Json.JsonSerializer().Serialize(tw, updateInfo); Duplicati.Library.AutoUpdater.UpdaterManager.CreateUpdatePackage(privkey, inputfolder, outputfolder, tf); } if (gpgkeyid != null) { gpgpath = gpgpath ?? "gpg"; var srcfile = System.IO.Path.Combine(outputfolder, "package.zip"); var proc = System.Diagnostics.Process.Start(new System.Diagnostics.ProcessStartInfo() { FileName = gpgpath, Arguments = string.Format("--passphrase-fd 0 --batch --yes --default-key={1} --output \"{0}.sig\" --detach-sig \"{0}\"", srcfile, gpgkeyid), RedirectStandardInput = true, UseShellExecute = false }); proc.StandardInput.WriteLine(gpgkeypassphrase); proc.WaitForExit(); proc = System.Diagnostics.Process.Start(new System.Diagnostics.ProcessStartInfo() { FileName = gpgpath, Arguments = string.Format("--passphrase-fd 0 --batch --yes --default-key={1} --armor --output \"{0}.sig.asc\" --detach-sig \"{0}\"", srcfile, gpgkeyid), RedirectStandardInput = true, UseShellExecute = false }); proc.StandardInput.WriteLine(gpgkeypassphrase); proc.WaitForExit(); } return(0); }
public void POST(string key, RequestInfo info) { if ("import".Equals(key, StringComparison.InvariantCultureIgnoreCase)) { ImportBackup(info); return; } AddOrUpdateBackupData data = null; try { data = Serializer.Deserialize<AddOrUpdateBackupData>(new StringReader(info.Request.Form["data"].Value)); if (data.Backup == null) { info.ReportClientError("Data object had no backup entry"); return; } data.Backup.ID = null; if (Duplicati.Library.Utility.Utility.ParseBool(info.Request.Form["temporary"].Value, false)) { using(var tf = new Duplicati.Library.Utility.TempFile()) data.Backup.DBPath = tf; Program.DataConnection.RegisterTemporaryBackup(data.Backup); info.OutputOK(new { status = "OK", ID = data.Backup.ID }); } else { if (Library.Utility.Utility.ParseBool(info.Request.Form["existing_db"].Value, false)) { data.Backup.DBPath = Library.Main.DatabaseLocator.GetDatabasePath(data.Backup.TargetURL, null, false, false); if (string.IsNullOrWhiteSpace(data.Backup.DBPath)) throw new Exception("Unable to find remote db path?"); } lock(Program.DataConnection.m_lock) { if (Program.DataConnection.Backups.Where(x => x.Name.Equals(data.Backup.Name, StringComparison.InvariantCultureIgnoreCase)).Any()) { info.ReportClientError("There already exists a backup with the name: " + data.Backup.Name); return; } Program.DataConnection.AddOrUpdateBackupAndSchedule(data.Backup, data.Schedule); } info.OutputOK(new { status = "OK", ID = data.Backup.ID }); } } catch (Exception ex) { if (data == null) info.ReportClientError(string.Format("Unable to parse backup or schedule object: {0}", ex.Message)); else info.ReportClientError(string.Format("Unable to save schedule or backup object: {0}", ex.Message)); } }
public void POST(string key, RequestInfo info) { var parts = (key ?? "").Split(new char[] { '/' }, 2); var bk = Program.DataConnection.GetBackup(parts.First()); if (bk == null) { info.ReportClientError("Invalid or missing backup id"); } else { if (parts.Length > 1) { var operation = parts.Last().Split(new char[] { '/' }).First().ToLowerInvariant(); switch (operation) { case "deletedb": System.IO.File.Delete(bk.DBPath); info.OutputOK(); return; case "movedb": UpdateDatabasePath(bk, info, true); return; case "updatedb": UpdateDatabasePath(bk, info, false); return; case "restore": RestoreFiles(bk, info); return; case "createreport": CreateReport(bk, info); return; case "repair": Repair(bk, info); return; case "repairupdate": RepairUpdate(bk, info); return; case "verify": Verify(bk, info); return; case "compact": Compact(bk, info); return; case "start": case "run": RunBackup(bk, info); return; case "report-remote-size": ReportRemoteSize(bk, info); return; case "copytotemp": var ipx = Serializer.Deserialize <Database.Backup>(new StringReader(Newtonsoft.Json.JsonConvert.SerializeObject(bk))); using (var tf = new Duplicati.Library.Utility.TempFile()) ipx.DBPath = tf; ipx.ID = null; info.OutputOK(new { status = "OK", ID = Program.DataConnection.RegisterTemporaryBackup(ipx) }); return; } } info.ReportClientError("Invalid request"); } }
using Duplicati.Server.Serialization.Interface;
public static Duplicati.Library.Interface.IBasicResults Run(IRunnerData data, bool fromQueue) { if (data is CustomRunnerTask) { try { var sink = new MessageSink(data.TaskID, null); Program.GenerateProgressState = () => sink.Copy(); Program.StatusEventNotifyer.SignalNewEvent(); ((CustomRunnerTask)data).Run(sink); } catch (Exception ex) { Program.DataConnection.LogError(string.Empty, "Failed while executing custom task", ex); } return(null); } var backup = data.Backup; if (backup.Metadata == null) { backup.Metadata = new Dictionary <string, string>(); } Duplicati.Library.Utility.TempFolder tempfolder = null; try { var sink = new MessageSink(data.TaskID, backup.ID); if (fromQueue) { Program.GenerateProgressState = () => sink.Copy(); Program.StatusEventNotifyer.SignalNewEvent(); } var options = ApplyOptions(backup, data.Operation, GetCommonOptions(backup, data.Operation)); if (data.ExtraOptions != null) { foreach (var k in data.ExtraOptions) { options[k.Key] = k.Value; } } // Pack in the system or task config for easy restore if (data.Operation == DuplicatiOperation.Backup && options.ContainsKey("store-task-config")) { tempfolder = StoreTaskConfigAndGetTempFolder(data, options); } // Attach a log scope that tags all messages to relay the TaskID and BackupID using (Library.Logging.Log.StartScope(log => { log[LogWriteHandler.LOG_EXTRA_TASKID] = data.TaskID.ToString(); log[LogWriteHandler.LOG_EXTRA_BACKUPID] = data.BackupID; })) using (tempfolder) using (var controller = new Duplicati.Library.Main.Controller(backup.TargetURL, options, sink)) { try { if (options.ContainsKey("throttle-upload")) { ((RunnerData)data).OriginalUploadSpeed = Duplicati.Library.Utility.Sizeparser.ParseSize(options["throttle-upload"], "kb"); } } catch { } try { if (options.ContainsKey("throttle-download")) { ((RunnerData)data).OriginalDownloadSpeed = Duplicati.Library.Utility.Sizeparser.ParseSize(options["throttle-download"], "kb"); } } catch { } ((RunnerData)data).Controller = controller; data.UpdateThrottleSpeed(); switch (data.Operation) { case DuplicatiOperation.Backup: { var filter = ApplyFilter(backup, data.Operation, GetCommonFilter(backup, data.Operation)); var sources = (from n in backup.Sources let p = SpecialFolders.ExpandEnvironmentVariables(n) where !string.IsNullOrWhiteSpace(p) select p).ToArray(); var r = controller.Backup(sources, filter); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.List: { var r = controller.List(data.FilterStrings); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Repair: { var r = controller.Repair(data.FilterStrings == null ? null : new Library.Utility.FilterExpression(data.FilterStrings)); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.RepairUpdate: { var r = controller.UpdateDatabaseWithVersions(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Remove: { var r = controller.Delete(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Restore: { var r = controller.Restore(data.FilterStrings); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Verify: { var r = controller.Test(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Compact: { var r = controller.Compact(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.CreateReport: { using (var tf = new Duplicati.Library.Utility.TempFile()) { var r = controller.CreateLogDatabase(tf); var tempid = Program.DataConnection.RegisterTempFile("create-bug-report", r.TargetPath, DateTime.Now.AddDays(3)); if (string.Equals(tf, r.TargetPath, Library.Utility.Utility.ClientFilenameStringComparison)) { tf.Protected = true; } Program.DataConnection.RegisterNotification( NotificationType.Information, "Bugreport ready", "Bugreport is ready for download", null, null, "bug-report:created:" + tempid, null, "BugreportCreatedReady", "", (n, a) => n ); return(r); } } case DuplicatiOperation.ListRemote: { var r = controller.ListRemote(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Delete: { if (Library.Utility.Utility.ParseBoolOption(data.ExtraOptions, "delete-remote-files")) { controller.DeleteAllRemoteFiles(); } if (Library.Utility.Utility.ParseBoolOption(data.ExtraOptions, "delete-local-db")) { string dbpath; options.TryGetValue("db-path", out dbpath); if (!string.IsNullOrWhiteSpace(dbpath) && System.IO.File.Exists(dbpath)) { System.IO.File.Delete(dbpath); } } Program.DataConnection.DeleteBackup(backup); Program.Scheduler.Reschedule(); return(null); } case DuplicatiOperation.Vacuum: { var r = controller.Vacuum(); UpdateMetadata(backup, r); return(r); } default: //TODO: Log this return(null); } } } catch (Exception ex) { Program.DataConnection.LogError(data.Backup.ID, string.Format("Failed while executing \"{0}\" with id: {1}", data.Operation, data.Backup.ID), ex); UpdateMetadataError(data.Backup, ex); Library.UsageReporter.Reporter.Report(ex); if (!fromQueue) { throw; } return(null); } finally { ((RunnerData)data).Controller = null; } }
private void CopyBackupToTemporary(HttpServer.IHttpRequest request, HttpServer.IHttpResponse response, HttpServer.Sessions.IHttpSession session, BodyWriter bw) { HttpServer.HttpInput input = request.Method.ToUpper() == "POST" ? request.Form : request.QueryString; var bk = Program.DataConnection.GetBackup(input["id"].Value); if (bk == null) { ReportError(response, bw, "Invalid or missing backup id"); return; } //var ipx = new Database.Backup(); //Newtonsoft.Json.JsonConvert.PopulateObject(Newtonsoft.Json.JsonConvert.SerializeObject(bk), ipx); var ipx = Serializer.Deserialize<Database.Backup>(new StringReader(Newtonsoft.Json.JsonConvert.SerializeObject(bk))); using(var tf = new Duplicati.Library.Utility.TempFile()) ipx.DBPath = tf; ipx.ID = null; bw.OutputOK(new { status = "OK", ID = Program.DataConnection.RegisterTemporaryBackup(ipx) }); }
public static int Main(string[] _args) { var args = new List <string>(_args); var opts = Duplicati.Library.Utility.CommandLineParser.ExtractOptions(args); string inputfolder; string outputfolder; string keyfile; string manifestfile; string keyfilepassword; opts.TryGetValue("input", out inputfolder); opts.TryGetValue("output", out outputfolder); opts.TryGetValue("keyfile", out keyfile); opts.TryGetValue("manifest", out manifestfile); opts.TryGetValue("keyfile-password", out keyfilepassword); var usedoptions = new string[] { "input", "output", "keyfile", "manifest", "keyfile-password" }; if (string.IsNullOrWhiteSpace(inputfolder)) { Console.WriteLine("Missing input folder"); return(4); } if (string.IsNullOrWhiteSpace(outputfolder)) { Console.WriteLine("Missing output folder"); return(4); } if (string.IsNullOrWhiteSpace(keyfile)) { Console.WriteLine("Missing keyfile"); return(4); } if (!System.IO.Directory.Exists(inputfolder)) { Console.WriteLine("Input folder not found"); return(4); } if (string.IsNullOrWhiteSpace(keyfilepassword)) { Console.WriteLine("Enter keyfile passphrase: "); keyfilepassword = Console.ReadLine().Trim(); } if (!System.IO.File.Exists(keyfile)) { Console.WriteLine("Keyfile not found, creating new"); var newkey = System.Security.Cryptography.RSACryptoServiceProvider.Create().ToXmlString(true); using (var enc = new Duplicati.Library.Encryption.AESEncryption(keyfilepassword, new Dictionary <string, string>())) using (var fs = System.IO.File.OpenWrite(keyfile)) using (var ms = new System.IO.MemoryStream(System.Text.Encoding.UTF8.GetBytes(newkey))) enc.Encrypt(ms, fs); } if (!System.IO.Directory.Exists(outputfolder)) { System.IO.Directory.CreateDirectory(outputfolder); } var privkey = (System.Security.Cryptography.RSACryptoServiceProvider)System.Security.Cryptography.RSACryptoServiceProvider.Create(); using (var enc = new Duplicati.Library.Encryption.AESEncryption(keyfilepassword, new Dictionary <string, string>())) using (var ms = new System.IO.MemoryStream()) using (var fs = System.IO.File.OpenRead(keyfile)) { enc.Decrypt(fs, ms); ms.Position = 0; using (var sr = new System.IO.StreamReader(ms)) privkey.FromXmlString(sr.ReadToEnd()); } if (Duplicati.Library.AutoUpdater.AutoUpdateSettings.SignKey == null || privkey.ToXmlString(false) != Duplicati.Library.AutoUpdater.AutoUpdateSettings.SignKey.ToXmlString(false)) { Console.WriteLine("The public key in the project is not the same as the public key from the file"); Console.WriteLine("Try setting the key to: "); Console.WriteLine(privkey.ToXmlString(false)); return(5); } Duplicati.Library.AutoUpdater.UpdateInfo updateInfo; using (var fs = System.IO.File.OpenRead(manifestfile)) using (var sr = new System.IO.StreamReader(fs)) using (var jr = new Newtonsoft.Json.JsonTextReader(sr)) updateInfo = new Newtonsoft.Json.JsonSerializer().Deserialize <Duplicati.Library.AutoUpdater.UpdateInfo>(jr); var isopts = new Dictionary <string, string>(opts, StringComparer.InvariantCultureIgnoreCase); foreach (var usedopt in usedoptions) { isopts.Remove(usedopt); } foreach (var k in updateInfo.GetType().GetFields()) { if (isopts.ContainsKey(k.Name)) { try { //Console.WriteLine("Setting {0} to {1}", k.Name, isopts[k.Name]); if (k.FieldType == typeof(string[])) { k.SetValue(updateInfo, isopts[k.Name].Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries)); } else if (k.FieldType == typeof(Version)) { k.SetValue(updateInfo, new Version(isopts[k.Name])); } else if (k.FieldType == typeof(int)) { k.SetValue(updateInfo, int.Parse(isopts[k.Name])); } else if (k.FieldType == typeof(long)) { k.SetValue(updateInfo, long.Parse(isopts[k.Name])); } else { k.SetValue(updateInfo, isopts[k.Name]); } } catch (Exception ex) { Console.WriteLine("Failed setting {0} to {1}: {2}", k.Name, isopts[k.Name], ex.Message); } isopts.Remove(k.Name); } } foreach (var opt in isopts) { Console.WriteLine("Warning! unused option: {0} = {1}", opt.Key, opt.Value); } using (var tf = new Duplicati.Library.Utility.TempFile()) { using (var fs = System.IO.File.OpenWrite(tf)) using (var tw = new System.IO.StreamWriter(fs)) new Newtonsoft.Json.JsonSerializer().Serialize(tw, updateInfo); Duplicati.Library.AutoUpdater.UpdaterManager.CreateUpdatePackage(privkey, inputfolder, outputfolder, tf); } return(0); }
static bool Run(List<string> args, Dictionary<string, string> options, bool first) { string allowedChars = ValidFilenameChars; if (options.ContainsKey("extended-chars")) allowedChars += options["extended-chars"]; else allowedChars += ExtendedChars; bool autoCreateFolders = Library.Utility.Utility.ParseBoolOption(options, "auto-create-folder"); Library.Interface.IBackend backend = Library.DynamicLoader.BackendLoader.GetBackend(args[0], options); if (backend == null) { Console.WriteLine("Unsupported backend"); Console.WriteLine(); Console.WriteLine("Supported backends: " + string.Join(",", Duplicati.Library.DynamicLoader.BackendLoader.Keys)); return false; } string disabledModulesValue; string enabledModulesValue; options.TryGetValue("enable-module", out enabledModulesValue); options.TryGetValue("disable-module", out disabledModulesValue); string[] enabledModules = enabledModulesValue == null ? new string[0] : enabledModulesValue.Trim().ToLower().Split(','); string[] disabledModules = disabledModulesValue == null ? new string[0] : disabledModulesValue.Trim().ToLower().Split(','); List<Library.Interface.IGenericModule> loadedModules = new List<IGenericModule>(); foreach (Library.Interface.IGenericModule m in Library.DynamicLoader.GenericLoader.Modules) if (Array.IndexOf<string>(disabledModules, m.Key.ToLower()) < 0 && (m.LoadAsDefault || Array.IndexOf<string>(enabledModules, m.Key.ToLower()) >= 0)) { m.Configure(options); loadedModules.Add(m); } try { List<Library.Interface.IFileEntry> curlist = null; try { curlist = backend.List(); } catch (FolderMissingException fex) { if (autoCreateFolders) { try { backend.CreateFolder(); curlist = backend.List(); } catch (Exception ex) { Console.WriteLine("Autocreate folder failed with message: " + ex.Message); } } if (curlist == null) throw fex; } foreach (Library.Interface.IFileEntry fe in curlist) if (!fe.IsFolder) { if (Library.Utility.Utility.ParseBoolOption(options, "auto-clean") && first) if (Library.Utility.Utility.ParseBoolOption(options, "force")) { Console.WriteLine("Auto clean, removing file: {0}", fe.Name); backend.Delete(fe.Name); continue; } else Console.WriteLine("Specify the --force flag to actually delete files"); Console.WriteLine("*** Remote folder is not empty, aborting"); return false; } int number_of_files = 10; int min_file_size = 1024; int max_file_size = 1024 * 1024 * 50; int min_filename_size = 5; int max_filename_size = 80; bool disableStreaming = Library.Utility.Utility.ParseBoolOption(options, "disable-streaming-transfers"); bool skipOverwriteTest = Library.Utility.Utility.ParseBoolOption(options, "skip-overwrite-test"); if (options.ContainsKey("number-of-files")) number_of_files = int.Parse(options["number-of-files"]); if (options.ContainsKey("min-file-size")) min_file_size = (int)Duplicati.Library.Utility.Sizeparser.ParseSize(options["min-file-size"], "mb"); if (options.ContainsKey("max-file-size")) max_file_size = (int)Duplicati.Library.Utility.Sizeparser.ParseSize(options["max-file-size"], "mb"); if (options.ContainsKey("min-filename-length")) min_filename_size = int.Parse(options["min-filename-length"]); if (options.ContainsKey("max-filename-length")) max_filename_size = int.Parse(options["max-filename-length"]); Random rnd = new Random(); System.Security.Cryptography.SHA256 sha = System.Security.Cryptography.SHA256.Create(); //Create random files using (Library.Utility.TempFolder tf = new Duplicati.Library.Utility.TempFolder()) { List<TempFile> files = new List<TempFile>(); for (int i = 0; i < number_of_files; i++) { StringBuilder filename = new StringBuilder(); int filenamelen = rnd.Next(min_filename_size, max_filename_size); for (int j = 0; j < filenamelen; j++) filename.Append(allowedChars[rnd.Next(0, allowedChars.Length)]); string localfilename = CreateRandomFile(tf, i, min_file_size, max_file_size, rnd); //Calculate local hash and length using (System.IO.FileStream fs = new System.IO.FileStream(localfilename, System.IO.FileMode.Open, System.IO.FileAccess.Read)) files.Add(new TempFile(filename.ToString(), localfilename, sha.ComputeHash(fs), fs.Length)); } byte[] dummyFileHash = null; if (!skipOverwriteTest) { Console.WriteLine("Uploading wrong files ..."); using (Library.Utility.TempFile dummy = Library.Utility.TempFile.WrapExistingFile(CreateRandomFile(tf, files.Count, 1024, 2048, rnd))) { using (System.IO.FileStream fs = new System.IO.FileStream(dummy, System.IO.FileMode.Open, System.IO.FileAccess.Read)) dummyFileHash = sha.ComputeHash(fs); //Upload a dummy file for entry 0 and the last one, they will be replaced by the real files afterwards //We upload entry 0 twice just to try to freak any internal cache list Uploadfile(dummy, 0, files[0].remotefilename, backend, disableStreaming); Uploadfile(dummy, 0, files[0].remotefilename, backend, disableStreaming); Uploadfile(dummy, files.Count - 1, files[files.Count - 1].remotefilename, backend, disableStreaming); } } Console.WriteLine("Uploading files ..."); for (int i = 0; i < files.Count; i++) Uploadfile(files[i].localfilename, i, files[i].remotefilename, backend, disableStreaming); Console.WriteLine("Verifying file list ..."); curlist = backend.List(); foreach (Library.Interface.IFileEntry fe in curlist) if (!fe.IsFolder) { bool found = false; foreach (TempFile tx in files) if (tx.remotefilename == fe.Name) { if (tx.found) Console.WriteLine("*** File with name {0} was found more than once", tx.remotefilename); found = true; tx.found = true; if (fe.Size > 0 && tx.length != fe.Size) Console.WriteLine("*** File with name {0} has size {1} but the size was reported as {2}", tx.remotefilename, tx.length, fe.Size); break; } if (!found) Console.WriteLine("*** File with name {0} was found on server but not uploaded!", fe.Name); } foreach (TempFile tx in files) if (!tx.found) Console.WriteLine("*** File with name {0} was uploaded but not found afterwards", tx.remotefilename); Console.WriteLine("Downloading files"); for (int i = 0; i < files.Count; i++) { using (Duplicati.Library.Utility.TempFile cf = new Duplicati.Library.Utility.TempFile()) { Exception e = null; Console.Write("Downloading file {0} ... ", i); try { if (backend is Library.Interface.IStreamingBackend && !disableStreaming) { using (System.IO.FileStream fs = new System.IO.FileStream(cf, System.IO.FileMode.Create, System.IO.FileAccess.Write, System.IO.FileShare.None)) using (NonSeekableStream nss = new NonSeekableStream(fs)) (backend as Library.Interface.IStreamingBackend).Get(files[i].remotefilename, nss); } else backend.Get(files[i].remotefilename, cf); e = null; } catch (Exception ex) { e = ex; } if (e != null) Console.WriteLine("failed\n*** Error: {0}", e.ToString()); else Console.WriteLine("done"); Console.Write("Checking hash ... "); using (System.IO.FileStream fs = new System.IO.FileStream(cf, System.IO.FileMode.Open, System.IO.FileAccess.Read)) if (Convert.ToBase64String(sha.ComputeHash(fs)) != Convert.ToBase64String(files[i].hash)) { if (dummyFileHash != null && Convert.ToBase64String(sha.ComputeHash(fs)) == Convert.ToBase64String(dummyFileHash)) Console.WriteLine("failed\n*** Downloaded file was the dummy file"); else Console.WriteLine("failed\n*** Downloaded file was corrupt"); } else Console.WriteLine("done"); } } Console.WriteLine("Deleting files..."); foreach (TempFile tx in files) try { backend.Delete(tx.remotefilename); } catch (Exception ex) { Console.WriteLine("*** Failed to delete file {0}, message: {1}", tx.remotefilename, ex.ToString()); } curlist = backend.List(); foreach (Library.Interface.IFileEntry fe in curlist) if (!fe.IsFolder) { Console.WriteLine("*** Remote folder contains {0} after cleanup", fe.Name); } } } finally { foreach (Library.Interface.IGenericModule m in loadedModules) if (m is IDisposable) ((IDisposable)m).Dispose(); } return true; }
public static int Main(string[] _args) { var args = new List<string>(_args); var opts = Duplicati.Library.Utility.CommandLineParser.ExtractOptions(args); string inputfolder; string outputfolder; string keyfile; string manifestfile; string keyfilepassword; string gpgkeyfile; string gpgpath; opts.TryGetValue("input", out inputfolder); opts.TryGetValue("output", out outputfolder); opts.TryGetValue("keyfile", out keyfile); opts.TryGetValue("manifest", out manifestfile); opts.TryGetValue("keyfile-password", out keyfilepassword); opts.TryGetValue("gpgkeyfile", out gpgkeyfile); opts.TryGetValue("gpgpath", out gpgpath); var usedoptions = new string[] { "input", "output", "keyfile", "manifest", "keyfile-password", "gpgkeyfile", "gpgpath" }; if (string.IsNullOrWhiteSpace(inputfolder)) { Console.WriteLine("Missing input folder"); return 4; } if (string.IsNullOrWhiteSpace(outputfolder)) { Console.WriteLine("Missing output folder"); return 4; } if (string.IsNullOrWhiteSpace(keyfile)) { Console.WriteLine("Missing keyfile"); return 4; } if (!System.IO.Directory.Exists(inputfolder)) { Console.WriteLine("Input folder not found"); return 4; } if (string.IsNullOrWhiteSpace(keyfilepassword)) { Console.WriteLine("Enter keyfile passphrase: "); keyfilepassword = Console.ReadLine().Trim(); } if (!System.IO.File.Exists(keyfile)) { Console.WriteLine("Keyfile not found, creating new"); var newkey = System.Security.Cryptography.RSACryptoServiceProvider.Create().ToXmlString(true); using (var enc = new Duplicati.Library.Encryption.AESEncryption(keyfilepassword, new Dictionary<string, string>())) using (var fs = System.IO.File.OpenWrite(keyfile)) using (var ms = new System.IO.MemoryStream(System.Text.Encoding.UTF8.GetBytes(newkey))) enc.Encrypt(ms, fs); } if (!System.IO.Directory.Exists(outputfolder)) System.IO.Directory.CreateDirectory(outputfolder); var privkey = (System.Security.Cryptography.RSACryptoServiceProvider)System.Security.Cryptography.RSACryptoServiceProvider.Create(); using(var enc = new Duplicati.Library.Encryption.AESEncryption(keyfilepassword, new Dictionary<string, string>())) using(var ms = new System.IO.MemoryStream()) using(var fs = System.IO.File.OpenRead(keyfile)) { enc.Decrypt(fs, ms); ms.Position = 0; using(var sr = new System.IO.StreamReader(ms)) privkey.FromXmlString(sr.ReadToEnd()); } if (Duplicati.Library.AutoUpdater.AutoUpdateSettings.SignKey == null || privkey.ToXmlString(false) != Duplicati.Library.AutoUpdater.AutoUpdateSettings.SignKey.ToXmlString(false)) { Console.WriteLine("The public key in the project is not the same as the public key from the file"); Console.WriteLine("Try setting the key to: "); Console.WriteLine(privkey.ToXmlString(false)); return 5; } string gpgkeyid = null; string gpgkeypassphrase = null; if (string.IsNullOrWhiteSpace(gpgkeyfile)) { Console.WriteLine("No gpgfile, skipping GPG signature files"); } else if (!System.IO.File.Exists(gpgkeyfile)) { Console.WriteLine("Missing gpgfile"); return 6; } else { using(var enc = new Duplicati.Library.Encryption.AESEncryption(keyfilepassword, new Dictionary<string, string>())) using(var ms = new System.IO.MemoryStream()) using(var fs = System.IO.File.OpenRead(gpgkeyfile)) { enc.Decrypt(fs, ms); ms.Position = 0; // No real format, just two lines using (var sr = new System.IO.StreamReader(ms)) { var lines = sr.ReadToEnd().Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries); gpgkeyid = lines[0]; gpgkeypassphrase = lines[1]; } } } Duplicati.Library.AutoUpdater.UpdateInfo updateInfo; using (var fs = System.IO.File.OpenRead(manifestfile)) using (var sr = new System.IO.StreamReader(fs)) using (var jr = new Newtonsoft.Json.JsonTextReader(sr)) updateInfo = new Newtonsoft.Json.JsonSerializer().Deserialize<Duplicati.Library.AutoUpdater.UpdateInfo>(jr); var isopts = new Dictionary<string, string>(opts, StringComparer.InvariantCultureIgnoreCase); foreach (var usedopt in usedoptions) isopts.Remove(usedopt); foreach (var k in updateInfo.GetType().GetFields()) if (isopts.ContainsKey(k.Name)) { try { //Console.WriteLine("Setting {0} to {1}", k.Name, isopts[k.Name]); if (k.FieldType == typeof(string[])) k.SetValue(updateInfo, isopts[k.Name].Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries)); else if (k.FieldType == typeof(Version)) k.SetValue(updateInfo, new Version(isopts[k.Name])); else if (k.FieldType == typeof(int)) k.SetValue(updateInfo, int.Parse(isopts[k.Name])); else if (k.FieldType == typeof(long)) k.SetValue(updateInfo, long.Parse(isopts[k.Name])); else k.SetValue(updateInfo, isopts[k.Name]); } catch (Exception ex) { Console.WriteLine("Failed setting {0} to {1}: {2}", k.Name, isopts[k.Name], ex.Message); } isopts.Remove(k.Name); } foreach(var opt in isopts) Console.WriteLine("Warning! unused option: {0} = {1}", opt.Key, opt.Value); using (var tf = new Duplicati.Library.Utility.TempFile()) { using (var fs = System.IO.File.OpenWrite(tf)) using (var tw = new System.IO.StreamWriter(fs)) new Newtonsoft.Json.JsonSerializer().Serialize(tw, updateInfo); Duplicati.Library.AutoUpdater.UpdaterManager.CreateUpdatePackage(privkey, inputfolder, outputfolder, tf); } if (gpgkeyid != null) { gpgpath = gpgpath ?? "gpg"; var srcfile = System.IO.Path.Combine(outputfolder, "package.zip"); var proc = System.Diagnostics.Process.Start(new System.Diagnostics.ProcessStartInfo() { FileName = gpgpath, Arguments = string.Format("--passphrase-fd 0 --batch --yes --default-key={1} --output \"{0}.sig\" --detach-sig \"{0}\"", srcfile, gpgkeyid), RedirectStandardInput = true, UseShellExecute = false }); proc.StandardInput.WriteLine(gpgkeypassphrase); proc.WaitForExit(); proc = System.Diagnostics.Process.Start(new System.Diagnostics.ProcessStartInfo() { FileName = gpgpath, Arguments = string.Format("--passphrase-fd 0 --batch --yes --default-key={1} --armor --output \"{0}.sig.asc\" --detach-sig \"{0}\"", srcfile, gpgkeyid), RedirectStandardInput = true, UseShellExecute = false }); proc.StandardInput.WriteLine(gpgkeypassphrase); proc.WaitForExit(); } return 0; }
public static Duplicati.Library.Interface.IBasicResults Run(IRunnerData data, bool fromQueue) { var backup = data.Backup; Duplicati.Library.Utility.TempFolder tempfolder = null; if (backup.Metadata == null) backup.Metadata = new Dictionary<string, string>(); try { var options = ApplyOptions(backup, data.Operation, GetCommonOptions(backup, data.Operation)); var sink = new MessageSink(data.TaskID, backup.ID); if (fromQueue) { Program.GenerateProgressState = () => sink.Copy(); Program.StatusEventNotifyer.SignalNewEvent(); } if (data.ExtraOptions != null) foreach(var k in data.ExtraOptions) options[k.Key] = k.Value; // Log file is using the internal log-handler // so we can display output in the GUI as well as log // into the given file if (options.ContainsKey("log-file")) { var file = options["log-file"]; string o; Library.Logging.LogMessageType level; options.TryGetValue("log-level", out o); Enum.TryParse<Library.Logging.LogMessageType>(o, true, out level); options.Remove("log-file"); options.Remove("log-level"); Program.LogHandler.SetOperationFile(file, level); } // Pack in the system or task config for easy restore if (data.Operation == DuplicatiOperation.Backup && options.ContainsKey("store-task-config")) { var all_tasks = string.Equals(options["store-task-config"], "all", StringComparison.InvariantCultureIgnoreCase) || string.Equals(options["store-task-config"], "*", StringComparison.InvariantCultureIgnoreCase); var this_task = Duplicati.Library.Utility.Utility.ParseBool(options["store-task-config"], false); options.Remove("store-task-config"); if (all_tasks || this_task) { if (tempfolder == null) tempfolder = new Duplicati.Library.Utility.TempFolder(); var temppath = System.IO.Path.Combine(tempfolder, "task-setup.json"); using(var tempfile = Duplicati.Library.Utility.TempFile.WrapExistingFile(temppath)) { object taskdata = null; if (all_tasks) taskdata = Program.DataConnection.Backups.Where(x => !x.IsTemporary).Select(x => Program.DataConnection.PrepareBackupForExport(Program.DataConnection.GetBackup(x.ID))); else taskdata = new [] { Program.DataConnection.PrepareBackupForExport(data.Backup) }; using(var fs = System.IO.File.OpenWrite(tempfile)) using(var sw = new System.IO.StreamWriter(fs, System.Text.Encoding.UTF8)) Serializer.SerializeJson(sw, taskdata, true); tempfile.Protected = true; string controlfiles = null; options.TryGetValue("control-files", out controlfiles); if (string.IsNullOrWhiteSpace(controlfiles)) controlfiles = tempfile; else controlfiles += System.IO.Path.PathSeparator + tempfile; options["control-files"] = controlfiles; } } } using(tempfolder) using(var controller = new Duplicati.Library.Main.Controller(backup.TargetURL, options, sink)) { ((RunnerData)data).Controller = controller; switch (data.Operation) { case DuplicatiOperation.Backup: { var filter = ApplyFilter(backup, data.Operation, GetCommonFilter(backup, data.Operation)); var sources = (from n in backup.Sources let p = SpecialFolders.ExpandEnvironmentVariables(n) where !string.IsNullOrWhiteSpace(p) select p).ToArray(); var r = controller.Backup(sources, filter); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.List: { var r = controller.List(data.FilterStrings); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.Repair: { var r = controller.Repair(data.FilterStrings == null ? null : new Library.Utility.FilterExpression(data.FilterStrings)); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.RepairUpdate: { var r = controller.UpdateDatabaseWithVersions(); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.Remove: { var r = controller.Delete(); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.Restore: { var r = controller.Restore(data.FilterStrings); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.Verify: { var r = controller.Test(); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.CreateReport: { using(var tf = new Duplicati.Library.Utility.TempFile()) { var r = controller.CreateLogDatabase(tf); var tempid = Program.DataConnection.RegisterTempFile("create-bug-report", r.TargetPath, DateTime.Now.AddDays(3)); if (string.Equals(tf, r.TargetPath, Library.Utility.Utility.ClientFilenameStringComparision)) tf.Protected = true; Program.DataConnection.RegisterNotification( NotificationType.Information, "Bugreport ready", "Bugreport is ready for download", null, null, "bug-report:created:" + tempid, (n, a) => n ); return r; } } default: //TODO: Log this return null; } } } catch (Exception ex) { Program.DataConnection.LogError(data.Backup.ID, string.Format("Failed while executing \"{0}\" with id: {1}", data.Operation, data.Backup.ID), ex); UpdateMetadataError(data.Backup, ex); Library.UsageReporter.Reporter.Report(ex); if (!fromQueue) throw; return null; } finally { ((RunnerData)data).Controller = null; Program.LogHandler.RemoveOperationFile(); } }
private void Put(BackupEntryBase remote, string filename, bool forcesync) { if (!remote.IsEncrypted && !m_options.NoEncryption && remote as VerificationEntry == null) { if (m_encryption == null) m_encryption = DynamicLoader.EncryptionLoader.GetModule(m_options.EncryptionModule, m_options.Passphrase, m_options.RawOptions); using (Utility.TempFile raw = new Duplicati.Library.Utility.TempFile(filename)) using (Utility.TempFile enc = new Duplicati.Library.Utility.TempFile()) { m_encryption.Encrypt(raw, enc); filename = enc; enc.Protected = true; raw.Protected = false; } remote.IsEncrypted = true; } remote.RemoteHash = Utility.Utility.CalculateHash(filename); remote.Filename = GenerateFilename(remote); remote.Filesize = new System.IO.FileInfo(filename).Length; if (!m_async) PutInternal(remote, filename); else { if (forcesync) { int count; lock (m_queuelock) count = m_pendingOperations.Count; while (count > 0) { m_asyncItemProcessed.WaitOne(1000 * 5, false); lock (m_queuelock) count = m_pendingOperations.Count; } PutInternal(remote, filename); } else { bool waitForCompletion; //There are 3 files in a volume (signature, content and manifest) + a verification file int uploads_in_set = m_options.CreateVerificationFile ? 4 : 3; lock (m_queuelock) { if (m_workerException != null) throw m_workerException; m_pendingOperations.Enqueue(new KeyValuePair<BackupEntryBase, string>(remote, filename)); m_asyncItemReady.Set(); waitForCompletion = m_options.AsynchronousUploadLimit > 0 && m_pendingOperations.Count > (m_options.AsynchronousUploadLimit * uploads_in_set); } while (waitForCompletion) { m_asyncItemProcessed.WaitOne(1000 * 5, false); lock (m_queuelock) { if (m_workerException != null) throw m_workerException; waitForCompletion = m_options.AsynchronousUploadLimit > 0 && m_pendingOperations.Count > (m_options.AsynchronousUploadLimit * uploads_in_set); } } } } }
private void GetInternal(BackupEntryBase remote, Manifestfile manifest, string filename, Manifestfile.HashEntry hash) { int retries = m_options.NumberOfRetries; Exception lastEx = null; m_statusmessage = string.Format(Strings.BackendWrapper.StatusMessageDownloading, remote.Filename); do { try { if (manifest != null && !string.IsNullOrEmpty(m_options.SignatureCachePath) && hash != null && remote is SignatureEntry) { string cachefilename = FindCacheEntry(remote as SignatureEntry); if (cachefilename != null && System.IO.File.Exists(cachefilename)) { if ((hash.Size < 0 || new System.IO.FileInfo(cachefilename).Length == hash.Size) && Utility.Utility.CalculateHash(cachefilename) == hash.Hash) { if (manifest.Version > 2 && !string.IsNullOrEmpty(remote.EncryptionMode)) { try { using (Library.Interface.IEncryption enc = DynamicLoader.EncryptionLoader.GetModule(remote.EncryptionMode, m_options.Passphrase, m_options.RawOptions)) enc.Decrypt(cachefilename, filename); return; } catch (Exception ex) { m_statistics.LogWarning(string.Format(Strings.BackendWrapper.CachedSignatureDecryptWarning, cachefilename, ex.Message), null); try { System.IO.File.Delete(cachefilename); } catch { } } } else { //TODO: Don't copy, but just return it as write protected System.IO.File.Copy(cachefilename, filename, true); return; } } else { m_statistics.LogWarning(string.Format(Strings.BackendWrapper.CachedSignatureHashMismatchWarning, cachefilename), null); try { System.IO.File.Delete(cachefilename); } catch { } } } } Utility.TempFile tempfile = null; try { if (!string.IsNullOrEmpty(remote.EncryptionMode)) tempfile = new Duplicati.Library.Utility.TempFile(); else tempfile = new Duplicati.Library.Utility.TempFile(filename); ResetBackend(); m_statistics.AddNumberOfRemoteCalls(1); if (m_backend is Duplicati.Library.Interface.IStreamingBackend && !m_options.DisableStreamingTransfers) { using (System.IO.FileStream fs = System.IO.File.Open(tempfile, System.IO.FileMode.Create, System.IO.FileAccess.Write, System.IO.FileShare.None)) using (Utility.ProgressReportingStream pgs = new Duplicati.Library.Utility.ProgressReportingStream(fs, remote.Fileentry.Size)) using (Utility.ThrottledStream ts = new Duplicati.Library.Utility.ThrottledStream(pgs, m_options.MaxUploadPrSecond, m_options.MaxDownloadPrSecond)) { pgs.Progress += new Duplicati.Library.Utility.ProgressReportingStream.ProgressDelegate(pgs_Progress); ts.Callback += new Duplicati.Library.Utility.ThrottledStream.ThrottledStreamCallback(ThrottledStream_Callback); ((Duplicati.Library.Interface.IStreamingBackend)m_backend).Get(remote.Filename, ts); } } else { if (!m_async && ProgressEvent != null) ProgressEvent(50, m_statusmessage); m_backend.Get(remote.Filename, tempfile); if (!m_async && ProgressEvent != null) ProgressEvent(100, m_statusmessage); } //This is required so we are sure that the file was downloaded completely and not partially, // as any exception here will cause a retry, but using a partial file may cause random errors if (remote.Fileentry.Size > 0 && remote.Fileentry.Size != new System.IO.FileInfo(tempfile).Length) throw new Exception(string.Format(Strings.BackendWrapper.DownloadedFileSizeError, remote.Filename, remote.Fileentry.Size, new System.IO.FileInfo(tempfile).Length)); remote.RemoteHash = Utility.Utility.CalculateHash(tempfile); //Manifest version 3 has hashes WITH encryption if (manifest != null && manifest.Version > 2) { if (hash != null && remote.RemoteHash != hash.Hash) throw new HashMismathcException(string.Format(Strings.BackendWrapper.HashMismatchError, remote.Filename, hash.Hash, Utility.Utility.CalculateHash(tempfile))); if (!string.IsNullOrEmpty(m_options.SignatureCachePath) && remote is SignatureEntry) { string cachefilename = System.IO.Path.Combine(m_options.SignatureCachePath, m_cachefilenamestrategy.GenerateFilename(remote)); try { System.IO.File.Copy(tempfile, cachefilename, true); } catch (Exception ex) { m_statistics.LogWarning(string.Format(Strings.BackendWrapper.SaveCacheFileError, cachefilename), ex); } } } if (!string.IsNullOrEmpty(remote.EncryptionMode)) { try { using (Library.Interface.IEncryption enc = DynamicLoader.EncryptionLoader.GetModule(remote.EncryptionMode, m_options.Passphrase, m_options.RawOptions)) enc.Decrypt(tempfile, filename); } catch (Exception ex) { //If we fail here, make sure that we throw a crypto exception if (ex is System.Security.Cryptography.CryptographicException) throw; else throw new System.Security.Cryptography.CryptographicException(ex.Message, ex); } tempfile.Dispose(); //Remove the encrypted file //Wrap the new file as a temp file tempfile = new Duplicati.Library.Utility.TempFile(filename); } //Manifest version 1+2 has hashes WITHOUT encryption if (manifest != null && manifest.Version <= 2) { if (hash != null && Utility.Utility.CalculateHash(tempfile) != hash.Hash) throw new HashMismathcException(string.Format(Strings.BackendWrapper.HashMismatchError, remote.Filename, hash.Hash, Utility.Utility.CalculateHash(tempfile))); if (!string.IsNullOrEmpty(m_options.SignatureCachePath) && remote is SignatureEntry) { string cachefilename = System.IO.Path.Combine(m_options.SignatureCachePath, m_cachefilenamestrategy.GenerateFilename(remote)); try { System.IO.File.Copy(tempfile, cachefilename, true); } catch (Exception ex) { m_statistics.LogWarning(string.Format(Strings.BackendWrapper.SaveCacheFileError, cachefilename), ex); } } } lastEx = null; tempfile.Protected = true; //Don't delete it if (m_backendInterfaceLogger != null) { if (remote is ManifestEntry) m_backendInterfaceLogger.RegisterGet(remote.Fileentry, true, System.IO.File.ReadAllText(tempfile)); else m_backendInterfaceLogger.RegisterGet(remote.Fileentry, true, null); } } finally { try { if (tempfile != null) tempfile.Dispose(); } catch { } } } catch (System.Threading.ThreadAbortException tex) { if (m_backendInterfaceLogger != null) m_backendInterfaceLogger.RegisterGet(remote.Fileentry, false, tex.ToString()); throw; } catch (Exception ex) { lastEx = ex; m_statistics.LogRetryAttempt(ex.Message, ex); if (m_backendInterfaceLogger != null) m_backendInterfaceLogger.RegisterGet(remote.Fileentry, false, ex.ToString()); DisposeBackend(); retries--; if (retries > 0 && m_options.RetryDelay.Ticks > 0) System.Threading.Thread.Sleep(m_options.RetryDelay); } } while (lastEx != null && retries > 0); if (lastEx != null) if (lastEx is HashMismathcException) throw lastEx; else if (lastEx is System.Security.Cryptography.CryptographicException) throw lastEx; else throw new Exception(string.Format(Strings.BackendWrapper.FileDownloadError2, filename, lastEx.Message), lastEx); m_statistics.AddBytesDownloaded(new System.IO.FileInfo(filename).Length); }
public static void CreateUpdatePackage(System.Security.Cryptography.RSACryptoServiceProvider key, string inputfolder, string outputfolder, string manifest = null) { // Read the existing manifest UpdateInfo remoteManifest; var manifestpath = manifest ?? System.IO.Path.Combine(inputfolder, UPDATE_MANIFEST_FILENAME); using(var s = System.IO.File.OpenRead(manifestpath)) using(var sr = new System.IO.StreamReader(s)) using(var jr = new Newtonsoft.Json.JsonTextReader(sr)) remoteManifest = new Newtonsoft.Json.JsonSerializer().Deserialize<UpdateInfo>(jr); if (remoteManifest.Files == null) remoteManifest.Files = new FileEntry[0]; if (remoteManifest.ReleaseTime.Ticks == 0) remoteManifest.ReleaseTime = DateTime.UtcNow; var ignoreFiles = (from n in remoteManifest.Files where n.Ignore select n).ToArray(); var ignoreMap = ignoreFiles.ToDictionary(k => k.Path, k => "", Duplicati.Library.Utility.Utility.ClientFilenameStringComparer); remoteManifest.MD5 = null; remoteManifest.SHA256 = null; remoteManifest.Files = null; remoteManifest.UncompressedSize = 0; var localManifest = remoteManifest.Clone(); localManifest.RemoteURLS = null; inputfolder = Duplicati.Library.Utility.Utility.AppendDirSeparator(inputfolder); var baselen = inputfolder.Length; var dirsep = System.IO.Path.DirectorySeparatorChar.ToString(); ignoreMap.Add(UPDATE_MANIFEST_FILENAME, ""); var md5 = System.Security.Cryptography.MD5.Create(); var sha256 = System.Security.Cryptography.SHA256.Create(); Func<string, string> computeMD5 = (path) => { md5.Initialize(); using(var fs = System.IO.File.OpenRead(path)) return Convert.ToBase64String(md5.ComputeHash(fs)); }; Func<string, string> computeSHA256 = (path) => { sha256.Initialize(); using(var fs = System.IO.File.OpenRead(path)) return Convert.ToBase64String(sha256.ComputeHash(fs)); }; // Build a zip using (var archive_temp = new Duplicati.Library.Utility.TempFile()) { using (var zipfile = new Duplicati.Library.Compression.FileArchiveZip(archive_temp, new Dictionary<string, string>())) { Func<string, string, bool> addToArchive = (path, relpath) => { if (ignoreMap.ContainsKey(relpath)) return false; if (path.EndsWith(dirsep)) return true; using (var source = System.IO.File.OpenRead(path)) using (var target = zipfile.CreateFile(relpath, Duplicati.Library.Interface.CompressionHint.Compressible, System.IO.File.GetLastAccessTimeUtc(path))) { source.CopyTo(target); remoteManifest.UncompressedSize += source.Length; } return true; }; // Build the update manifest localManifest.Files = (from fse in Duplicati.Library.Utility.Utility.EnumerateFileSystemEntries(inputfolder) let relpath = fse.Substring(baselen) where addToArchive(fse, relpath) select new FileEntry() { Path = relpath, LastWriteTime = System.IO.File.GetLastAccessTimeUtc(fse), MD5 = fse.EndsWith(dirsep) ? null : computeMD5(fse), SHA256 = fse.EndsWith(dirsep) ? null : computeSHA256(fse) }) .Union(ignoreFiles).ToArray(); // Write a signed manifest with the files using (var ms = new System.IO.MemoryStream()) using (var sw = new System.IO.StreamWriter(ms)) { new Newtonsoft.Json.JsonSerializer().Serialize(sw, localManifest); sw.Flush(); using (var ms2 = new System.IO.MemoryStream()) { SignatureReadingStream.CreateSignedStream(ms, ms2, key); ms2.Position = 0; using (var sigfile = zipfile.CreateFile(UPDATE_MANIFEST_FILENAME, Duplicati.Library.Interface.CompressionHint.Compressible, DateTime.UtcNow)) ms2.CopyTo(sigfile); } } } remoteManifest.CompressedSize = new System.IO.FileInfo(archive_temp).Length; remoteManifest.MD5 = computeMD5(archive_temp); remoteManifest.SHA256 = computeSHA256(archive_temp); System.IO.File.Move(archive_temp, System.IO.Path.Combine(outputfolder, "package.zip")); } // Write a signed manifest for upload using(var tf = new Duplicati.Library.Utility.TempFile()) { using (var ms = new System.IO.MemoryStream()) using (var sw = new System.IO.StreamWriter(ms)) { new Newtonsoft.Json.JsonSerializer().Serialize(sw, remoteManifest); sw.Flush(); using (var fs = System.IO.File.Create(tf)) SignatureReadingStream.CreateSignedStream(ms, fs, key); } System.IO.File.Move(tf, System.IO.Path.Combine(outputfolder, UPDATE_MANIFEST_FILENAME)); } }
private void AddBackup(HttpServer.IHttpRequest request, HttpServer.IHttpResponse response, HttpServer.Sessions.IHttpSession session, BodyWriter bw) { var str = request.Form["data"].Value; if (string.IsNullOrWhiteSpace(str)) { ReportError(response, bw, "Missing backup object"); return; } AddOrUpdateBackupData data = null; try { data = Serializer.Deserialize <AddOrUpdateBackupData>(new StringReader(str)); if (data.Backup == null) { ReportError(response, bw, "Data object had no backup entry"); return; } data.Backup.ID = null; if (Duplicati.Library.Utility.Utility.ParseBool(request.Form["temporary"].Value, false)) { using (var tf = new Duplicati.Library.Utility.TempFile()) data.Backup.DBPath = tf; Program.DataConnection.RegisterTemporaryBackup(data.Backup); bw.OutputOK(new { status = "OK", ID = data.Backup.ID }); } else { if (Library.Utility.Utility.ParseBool(request.Form["existing_db"].Value, false)) { data.Backup.DBPath = Library.Main.DatabaseLocator.GetDatabasePath(data.Backup.TargetURL, null, false, false); if (string.IsNullOrWhiteSpace(data.Backup.DBPath)) { throw new Exception("Unable to find remote db path?"); } } lock (Program.DataConnection.m_lock) { if (Program.DataConnection.Backups.Where(x => x.Name.Equals(data.Backup.Name, StringComparison.InvariantCultureIgnoreCase)).Any()) { ReportError(response, bw, "There already exists a backup with the name: " + data.Backup.Name); return; } Program.DataConnection.AddOrUpdateBackupAndSchedule(data.Backup, data.Schedule); } bw.OutputOK(new { status = "OK", ID = data.Backup.ID }); } } catch (Exception ex) { if (data == null) { ReportError(response, bw, string.Format("Unable to parse backup or schedule object: {0}", ex.Message)); } else { ReportError(response, bw, string.Format("Unable to save schedule or backup object: {0}", ex.Message)); } } }
static bool Run(List <string> args, Dictionary <string, string> options, bool first) { string allowedChars = ValidFilenameChars; if (options.ContainsKey("extended-chars")) { allowedChars += options["extended-chars"]; } else { allowedChars += ExtendedChars; } bool autoCreateFolders = Library.Utility.Utility.ParseBoolOption(options, "auto-create-folder"); Library.Interface.IBackend backend = Library.DynamicLoader.BackendLoader.GetBackend(args[0], options); if (backend == null) { Console.WriteLine("Unsupported backend"); Console.WriteLine(); Console.WriteLine("Supported backends: " + string.Join(",", Duplicati.Library.DynamicLoader.BackendLoader.Keys)); return(false); } string disabledModulesValue; string enabledModulesValue; options.TryGetValue("enable-module", out enabledModulesValue); options.TryGetValue("disable-module", out disabledModulesValue); string[] enabledModules = enabledModulesValue == null ? new string[0] : enabledModulesValue.Trim().ToLower().Split(','); string[] disabledModules = disabledModulesValue == null ? new string[0] : disabledModulesValue.Trim().ToLower().Split(','); List <Library.Interface.IGenericModule> loadedModules = new List <IGenericModule>(); foreach (Library.Interface.IGenericModule m in Library.DynamicLoader.GenericLoader.Modules) { if (Array.IndexOf <string>(disabledModules, m.Key.ToLower()) < 0 && (m.LoadAsDefault || Array.IndexOf <string>(enabledModules, m.Key.ToLower()) >= 0)) { m.Configure(options); loadedModules.Add(m); } } try { List <Library.Interface.IFileEntry> curlist = null; try { curlist = backend.List(); } catch (FolderMissingException fex) { if (autoCreateFolders) { try { if (backend is IBackend_v2) { ((IBackend_v2)backend).CreateFolder(); } curlist = backend.List(); } catch (Exception ex) { Console.WriteLine("Autocreate folder failed with message: " + ex.Message); } } if (curlist == null) { throw fex; } } foreach (Library.Interface.IFileEntry fe in curlist) { if (!fe.IsFolder) { if (Library.Utility.Utility.ParseBoolOption(options, "auto-clean") && first) { if (Library.Utility.Utility.ParseBoolOption(options, "force")) { Console.WriteLine("Auto clean, removing file: {0}", fe.Name); backend.Delete(fe.Name); continue; } else { Console.WriteLine("Specify the --force flag to actually delete files"); } } Console.WriteLine("*** Remote folder is not empty, aborting"); return(false); } } int number_of_files = 10; int min_file_size = 1024; int max_file_size = 1024 * 1024 * 50; int min_filename_size = 5; int max_filename_size = 80; bool disableStreaming = Library.Utility.Utility.ParseBoolOption(options, "disable-streaming-transfers"); bool skipOverwriteTest = Library.Utility.Utility.ParseBoolOption(options, "skip-overwrite-test"); if (options.ContainsKey("number-of-files")) { number_of_files = int.Parse(options["number-of-files"]); } if (options.ContainsKey("min-file-size")) { min_file_size = (int)Duplicati.Library.Utility.Sizeparser.ParseSize(options["min-file-size"], "mb"); } if (options.ContainsKey("max-file-size")) { max_file_size = (int)Duplicati.Library.Utility.Sizeparser.ParseSize(options["max-file-size"]); } if (options.ContainsKey("min-filename-length")) { min_filename_size = int.Parse(options["min-filename-length"]); } if (options.ContainsKey("max-filename-length")) { max_filename_size = int.Parse(options["max-filename-length"]); } Random rnd = new Random(); System.Security.Cryptography.SHA256 sha = System.Security.Cryptography.SHA256.Create(); //Create random files using (Library.Utility.TempFolder tf = new Duplicati.Library.Utility.TempFolder()) { List <TempFile> files = new List <TempFile>(); for (int i = 0; i < number_of_files; i++) { StringBuilder filename = new StringBuilder(); int filenamelen = rnd.Next(min_filename_size, max_filename_size); for (int j = 0; j < filenamelen; j++) { filename.Append(allowedChars[rnd.Next(0, allowedChars.Length)]); } string localfilename = CreateRandomFile(tf, i, min_file_size, max_file_size, rnd); //Calculate local hash and length using (System.IO.FileStream fs = new System.IO.FileStream(localfilename, System.IO.FileMode.Open, System.IO.FileAccess.Read)) files.Add(new TempFile(filename.ToString(), localfilename, sha.ComputeHash(fs), fs.Length)); } byte[] dummyFileHash = null; if (!skipOverwriteTest) { Console.WriteLine("Uploading wrong files ..."); using (Library.Utility.TempFile dummy = new Library.Utility.TempFile(CreateRandomFile(tf, files.Count, 1024, 2048, rnd))) { using (System.IO.FileStream fs = new System.IO.FileStream(dummy, System.IO.FileMode.Open, System.IO.FileAccess.Read)) dummyFileHash = sha.ComputeHash(fs); //Upload a dummy file for entry 0 and the last one, they will be replaced by the real files afterwards //We upload entry 0 twice just to try to freak any internal cache list Uploadfile(dummy, 0, files[0].remotefilename, backend, disableStreaming); Uploadfile(dummy, 0, files[0].remotefilename, backend, disableStreaming); Uploadfile(dummy, files.Count - 1, files[files.Count - 1].remotefilename, backend, disableStreaming); } } Console.WriteLine("Uploading files ..."); for (int i = 0; i < files.Count; i++) { Uploadfile(files[i].localfilename, i, files[i].remotefilename, backend, disableStreaming); } Console.WriteLine("Verifying file list ..."); curlist = backend.List(); foreach (Library.Interface.IFileEntry fe in curlist) { if (!fe.IsFolder) { bool found = false; foreach (TempFile tx in files) { if (tx.remotefilename == fe.Name) { if (tx.found) { Console.WriteLine("*** File with name {0} was found more than once", tx.remotefilename); } found = true; tx.found = true; if (fe.Size > 0 && tx.length != fe.Size) { Console.WriteLine("*** File with name {0} has size {1} but the size was reported as {2}", tx.remotefilename, tx.length, fe.Size); } break; } } if (!found) { Console.WriteLine("*** File with name {0} was found on server but not uploaded!", fe.Name); } } } foreach (TempFile tx in files) { if (!tx.found) { Console.WriteLine("*** File with name {0} was uploaded but not found afterwards", tx.remotefilename); } } Console.WriteLine("Downloading files"); for (int i = 0; i < files.Count; i++) { using (Duplicati.Library.Utility.TempFile cf = new Duplicati.Library.Utility.TempFile()) { Exception e = null; Console.Write("Downloading file {0} ... ", i); try { if (backend is Library.Interface.IStreamingBackend && !disableStreaming) { using (System.IO.FileStream fs = new System.IO.FileStream(cf, System.IO.FileMode.Create, System.IO.FileAccess.Write, System.IO.FileShare.None)) using (NonSeekableStream nss = new NonSeekableStream(fs)) (backend as Library.Interface.IStreamingBackend).Get(files[i].remotefilename, nss); } else { backend.Get(files[i].remotefilename, cf); } e = null; } catch (Exception ex) { e = ex; } if (e != null) { Console.WriteLine("failed\n*** Error: {0}", e.ToString()); } else { Console.WriteLine("done"); } Console.Write("Checking hash ... "); using (System.IO.FileStream fs = new System.IO.FileStream(cf, System.IO.FileMode.Open, System.IO.FileAccess.Read)) if (Convert.ToBase64String(sha.ComputeHash(fs)) != Convert.ToBase64String(files[i].hash)) { if (dummyFileHash != null && Convert.ToBase64String(sha.ComputeHash(fs)) == Convert.ToBase64String(dummyFileHash)) { Console.WriteLine("failed\n*** Downloaded file was the dummy file"); } else { Console.WriteLine("failed\n*** Downloaded file was corrupt"); } } else { Console.WriteLine("done"); } } } Console.WriteLine("Deleting files..."); foreach (TempFile tx in files) { try { backend.Delete(tx.remotefilename); } catch (Exception ex) { Console.WriteLine("*** Failed to delete file {0}, message: {1}", tx.remotefilename, ex.ToString()); } } curlist = backend.List(); foreach (Library.Interface.IFileEntry fe in curlist) { if (!fe.IsFolder) { Console.WriteLine("*** Remote folder contains {0} after cleanup", fe.Name); } } } } finally { foreach (Library.Interface.IGenericModule m in loadedModules) { if (m is IDisposable) { ((IDisposable)m).Dispose(); } } } return(true); }
public static Duplicati.Library.Interface.IBasicResults Run(IRunnerData data, bool fromQueue) { var backup = data.Backup; if (backup.Metadata == null) backup.Metadata = new Dictionary<string, string>(); try { var options = ApplyOptions(backup, data.Operation, GetCommonOptions(backup, data.Operation)); var sink = new MessageSink(data.TaskID, backup.ID); if (fromQueue) { Program.GenerateProgressState = () => sink.Copy(); Program.StatusEventNotifyer.SignalNewEvent(); } if (data.ExtraOptions != null) foreach(var k in data.ExtraOptions) options[k.Key] = k.Value; if (options.ContainsKey("log-file")) { var file = options["log-file"]; string o; Library.Logging.LogMessageType level; options.TryGetValue("log-level", out o); Enum.TryParse<Library.Logging.LogMessageType>(o, true, out level); options.Remove("log-file"); options.Remove("log-level"); Program.LogHandler.SetOperationFile(file, level); } using(var controller = new Duplicati.Library.Main.Controller(backup.TargetURL, options, sink)) { ((RunnerData)data).Controller = controller; switch (data.Operation) { case DuplicatiOperation.Backup: { var filter = ApplyFilter(backup, data.Operation, GetCommonFilter(backup, data.Operation)); var sources = (from n in backup.Sources let p = SpecialFolders.ExpandEnvironmentVariables(n) where !string.IsNullOrWhiteSpace(p) select p).ToArray(); var r = controller.Backup(sources, filter); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.List: { var r = controller.List(data.FilterStrings); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.Repair: { var r = controller.Repair(); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.Remove: { var r = controller.Delete(); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.Restore: { var r = controller.Restore(data.FilterStrings); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.Verify: { var r = controller.Test(); UpdateMetadata(backup, r); return r; } case DuplicatiOperation.CreateReport: { using(var tf = new Duplicati.Library.Utility.TempFile()) { var r = controller.CreateLogDatabase(tf); var tempid = Program.DataConnection.RegisterTempFile("create-bug-report", r.TargetPath, DateTime.Now.AddDays(3)); if (string.Equals(tf, r.TargetPath, Library.Utility.Utility.ClientFilenameStringComparision)) tf.Protected = true; Program.DataConnection.RegisterNotification( NotificationType.Information, "Bugreport ready", "Bugreport is ready for download", null, null, "bug-report:created:" + tempid, (n, a) => n ); return r; } } default: //TODO: Log this return null; } } } catch (Exception ex) { Program.DataConnection.LogError(data.Backup.ID, string.Format("Failed while executing \"{0}\" with id: {1}", data.Operation, data.Backup.ID), ex); UpdateMetadataError(data.Backup, ex); if (!fromQueue) throw; return null; } finally { ((RunnerData)data).Controller = null; Program.LogHandler.RemoveOperationFile(); } }
public async Task PutAsync(string remotename, System.IO.Stream stream, CancellationToken cancelToken) { // Some challenges with uploading to Jottacloud: // - Jottacloud supports use of a custom header where we can tell the server the MD5 hash of the file // we are uploading, and then it will verify the content of our request against it. But the HTTP // status code we get back indicates success even if there is a mismatch, so we must dig into the // XML response to see if we were able to correctly upload the new content or not. Another issue // is that if the stream is not seek-able we have a challenge pre-calculating MD5 hash on it before // writing it out on the HTTP request stream. And even if the stream is seek-able it may be throttled. // One way to avoid using the throttled stream for calculating the MD5 is to try to get the // underlying stream from the "m_basestream" field, with fall-back to a temporary file. // - We can instead chose to upload the data without setting the MD5 hash header. The server will // calculate the MD5 on its side and return it in the response back to use. We can then compare it // with the MD5 hash of the stream (using a MD5CalculatingStream), and if there is a mismatch we can // request the server to remove the file again and throw an exception. But there is a requirement that // we specify the file size in a custom header. And if the stream is not seek-able we are not able // to use stream.Length, so we are back at square one. Duplicati.Library.Utility.TempFile tmpFile = null; var baseStream = stream; while (baseStream is Duplicati.Library.Utility.OverrideableStream) { baseStream = typeof(Duplicati.Library.Utility.OverrideableStream).GetField("m_basestream", System.Reflection.BindingFlags.DeclaredOnly | System.Reflection.BindingFlags.Instance | System.Reflection.BindingFlags.NonPublic).GetValue(baseStream) as System.IO.Stream; } if (baseStream == null) { throw new Exception(string.Format("Unable to unwrap stream from: {0}", stream.GetType())); } string md5Hash; if (baseStream.CanSeek) { var originalPosition = baseStream.Position; using (var md5 = System.Security.Cryptography.MD5.Create()) md5Hash = Library.Utility.Utility.ByteArrayAsHexString(md5.ComputeHash(baseStream)); baseStream.Position = originalPosition; } else { // No seeking possible, use a temp file tmpFile = new Duplicati.Library.Utility.TempFile(); using (var os = System.IO.File.OpenWrite(tmpFile)) using (var md5 = new Utility.MD5CalculatingStream(baseStream)) { await Utility.Utility.CopyStreamAsync(md5, os, true, cancelToken, m_copybuffer); md5Hash = md5.GetFinalHashString(); } stream = System.IO.File.OpenRead(tmpFile); } try { // Create request, with query parameter, and a few custom headers. // NB: If we wanted to we could send the same POST request as below but without the file contents // and with "cphash=[md5Hash]" as the only query parameter. Then we will get an HTTP 200 (OK) response // if an identical file already exists, and we can skip uploading the new file. We will get // HTTP 404 (Not Found) if file does not exists or it exists with a different hash, in which // case we must send a new request to upload the new content. var fileSize = stream.Length; var req = CreateRequest(System.Net.WebRequestMethods.Http.Post, remotename, "umode=nomultipart", true); req.Headers.Add("JMd5", md5Hash); // Not required, but it will make the server verify the content and mark the file as corrupt if there is a mismatch. req.Headers.Add("JSize", fileSize.ToString()); // Required, and used to mark file as incomplete if we upload something be the total size of the original file! // File time stamp headers: Since we are working with a stream here we do not know the local file's timestamps, // and then we can just omit the JCreated and JModified and let the server automatically set the current time. //req.Headers.Add("JCreated", timeCreated); //req.Headers.Add("JModified", timeModified); req.ContentType = "application/octet-stream"; req.ContentLength = fileSize; // Write post data request var areq = new Utility.AsyncHttpRequest(req); using (var rs = areq.GetRequestStream()) await Utility.Utility.CopyStreamAsync(stream, rs, true, cancelToken, m_copybuffer); // Send request, and check response using (var resp = (System.Net.HttpWebResponse)areq.GetResponse()) { if (resp.StatusCode != System.Net.HttpStatusCode.Created) { throw new System.Net.WebException(Strings.Jottacloud.FileUploadError, null, System.Net.WebExceptionStatus.ProtocolError, resp); } // Request seems to be successful, but we must verify the response XML content to be sure that the file // was correctly uploaded: The server will verify the JSize header and mark the file as incomplete if // there was mismatch, and it will verify the JMd5 header and mark the file as corrupt if there was a hash // mismatch. The returned XML contains a file element, and if upload was error free it contains a single // child element "currentRevision", which has a "state" child element with the string "COMPLETED". // If there was a problem we should have a "latestRevision" child element, and this will have state with // value "INCOMPLETE" or "CORRUPT". If the file was new or had no previous complete versions the latestRevision // will be the only child, but if not there may also be a "currentRevision" representing the previous // complete version - and then we need to detect the case where our upload failed but there was an existing // complete version! using (var rs = areq.GetResponseStream()) { var doc = new System.Xml.XmlDocument(); try { doc.Load(rs); } catch (System.Xml.XmlException) { throw new System.Net.WebException(Strings.Jottacloud.FileUploadError, System.Net.WebExceptionStatus.ProtocolError); } bool uploadCompletedSuccessfully = false; var xFile = doc["file"]; if (xFile != null) { var xRevState = xFile.SelectSingleNode("latestRevision"); if (xRevState == null) { xRevState = xFile.SelectSingleNode("currentRevision/state"); if (xRevState != null) { uploadCompletedSuccessfully = xRevState.InnerText == "COMPLETED"; // Success: There is no "latestRevision", only a "currentRevision" (and it specifies the file is complete, but I think it always will). } } } if (!uploadCompletedSuccessfully) // Report error (and we just let the incomplete/corrupt file revision stay on the server..) { throw new System.Net.WebException(Strings.Jottacloud.FileUploadError, System.Net.WebExceptionStatus.ProtocolError); } } } } finally { try { if (tmpFile != null) { tmpFile.Dispose(); } } catch { } } }
private void AddBackup(HttpServer.IHttpRequest request, HttpServer.IHttpResponse response, HttpServer.Sessions.IHttpSession session, BodyWriter bw) { var str = request.Form["data"].Value; if (string.IsNullOrWhiteSpace(str)) { ReportError(response, bw, "Missing backup object"); return; } AddOrUpdateBackupData data = null; try { data = Serializer.Deserialize<AddOrUpdateBackupData>(new StringReader(str)); if (data.Backup == null) { ReportError(response, bw, "Data object had no backup entry"); return; } data.Backup.ID = null; if (Duplicati.Library.Utility.Utility.ParseBool(request.Form["temporary"].Value, false)) { using(var tf = new Duplicati.Library.Utility.TempFile()) data.Backup.DBPath = tf; Program.DataConnection.RegisterTemporaryBackup(data.Backup); bw.OutputOK(new { status = "OK", ID = data.Backup.ID }); } else { if (Library.Utility.Utility.ParseBool(request.Form["existing_db"].Value, false)) { data.Backup.DBPath = Library.Main.DatabaseLocator.GetDatabasePath(data.Backup.TargetURL, null, false, false); if (string.IsNullOrWhiteSpace(data.Backup.DBPath)) throw new Exception("Unable to find remote db path?"); } lock(Program.DataConnection.m_lock) { if (Program.DataConnection.Backups.Where(x => x.Name.Equals(data.Backup.Name, StringComparison.InvariantCultureIgnoreCase)).Any()) { ReportError(response, bw, "There already exists a backup with the name: " + data.Backup.Name); return; } Program.DataConnection.AddOrUpdateBackupAndSchedule(data.Backup, data.Schedule); } bw.OutputOK(new { status = "OK", ID = data.Backup.ID }); } } catch (Exception ex) { if (data == null) ReportError(response, bw, string.Format("Unable to parse backup or schedule object: {0}", ex.Message)); else ReportError(response, bw, string.Format("Unable to save schedule or backup object: {0}", ex.Message)); } }
public static int Main(string[] _args) { var args = new List <string>(_args); var opts = Duplicati.Library.Utility.CommandLineParser.ExtractOptions(args); opts.TryGetValue("input", out string inputfolder); opts.TryGetValue("output", out string outputfolder); opts.TryGetValue("allow-new-key", out string allowNewKey); opts.TryGetValue("keyfile", out keyfile); opts.TryGetValue("manifest", out string manifestfile); opts.TryGetValue("keyfile-password", out keyfilepassword); var usedoptions = new [] { "allow-new-key", "input", "output", "keyfile", "manifest", "keyfile-password" }; if (string.IsNullOrWhiteSpace(inputfolder)) { Console.WriteLine("Missing input folder"); return(4); } if (string.IsNullOrWhiteSpace(outputfolder)) { Console.WriteLine("Missing output folder"); return(4); } if (string.IsNullOrWhiteSpace(keyfile)) { Console.WriteLine("Missing keyfile"); return(4); } if (!System.IO.Directory.Exists(inputfolder)) { Console.WriteLine("Input folder not found"); return(4); } if (string.IsNullOrWhiteSpace(keyfilepassword)) { Console.WriteLine("Enter keyfile passphrase: "); keyfilepassword = Console.ReadLine().Trim(); } if (!System.IO.File.Exists(keyfile)) { Console.WriteLine("Keyfile not found, creating new"); var newkey = RSA.Create().ToXmlString(true); using (var enc = new Duplicati.Library.Encryption.AESEncryption(keyfilepassword, new Dictionary <string, string>())) using (var fs = System.IO.File.OpenWrite(keyfile)) using (var ms = new System.IO.MemoryStream(System.Text.Encoding.UTF8.GetBytes(newkey))) enc.Encrypt(ms, fs); } if (!System.IO.Directory.Exists(outputfolder)) { System.IO.Directory.CreateDirectory(outputfolder); } privkey = (RSACryptoServiceProvider)RSA.Create(); LoadKeyFromFile(); if (!Boolean.TryParse(allowNewKey, out Boolean newKeyAllowed) || !newKeyAllowed) { CompareToManifestPublicKey(); } Duplicati.Library.AutoUpdater.UpdateInfo updateInfo; using (var fs = System.IO.File.OpenRead(manifestfile)) using (var sr = new System.IO.StreamReader(fs)) using (var jr = new Newtonsoft.Json.JsonTextReader(sr)) updateInfo = new Newtonsoft.Json.JsonSerializer().Deserialize <Duplicati.Library.AutoUpdater.UpdateInfo>(jr); var isopts = new Dictionary <string, string>(opts, StringComparer.InvariantCultureIgnoreCase); foreach (var usedopt in usedoptions) { isopts.Remove(usedopt); } foreach (var k in updateInfo.GetType().GetFields()) { if (!isopts.ContainsKey(k.Name)) { continue; } try { //Console.WriteLine("Setting {0} to {1}", k.Name, isopts[k.Name]); if (k.FieldType == typeof(string[])) { k.SetValue(updateInfo, isopts[k.Name].Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries)); } else if (k.FieldType == typeof(Version)) { k.SetValue(updateInfo, new Version(isopts[k.Name])); } else if (k.FieldType == typeof(int)) { k.SetValue(updateInfo, int.Parse(isopts[k.Name])); } else if (k.FieldType == typeof(long)) { k.SetValue(updateInfo, long.Parse(isopts[k.Name])); } else { k.SetValue(updateInfo, isopts[k.Name]); } } catch (Exception ex) { Console.WriteLine("Failed setting {0} to {1}: {2}", k.Name, isopts[k.Name], ex.Message); } isopts.Remove(k.Name); } foreach (var opt in isopts) { Console.WriteLine("Warning! unused option: {0} = {1}", opt.Key, opt.Value); } using (var tf = new Duplicati.Library.Utility.TempFile()) { using (var fs = System.IO.File.OpenWrite(tf)) using (var tw = new System.IO.StreamWriter(fs)) new Newtonsoft.Json.JsonSerializer().Serialize(tw, updateInfo); Duplicati.Library.AutoUpdater.UpdaterManager.CreateUpdatePackage(privkey, inputfolder, outputfolder, tf); } return(0); }
public static Duplicati.Library.Interface.IBasicResults Run(IRunnerData data, bool fromQueue) { var backup = data.Backup; try { var options = ApplyOptions(backup, data.Operation, GetCommonOptions(backup, data.Operation)); var sink = new MessageSink(data.TaskID, backup.ID); if (fromQueue) { Program.GenerateProgressState = () => sink.Copy(); Program.StatusEventNotifyer.SignalNewEvent(); } if (data.ExtraOptions != null) { foreach (var k in data.ExtraOptions) { options[k.Key] = k.Value; } } if (options.ContainsKey("log-file")) { var file = options["log-file"]; string o; Library.Logging.LogMessageType level; options.TryGetValue("log-level", out o); Enum.TryParse <Library.Logging.LogMessageType>(o, true, out level); options.Remove("log-file"); options.Remove("log-level"); Program.LogHandler.SetOperationFile(file, level); } using (var controller = new Duplicati.Library.Main.Controller(backup.TargetURL, options, sink)) { ((RunnerData)data).Controller = controller; switch (data.Operation) { case DuplicatiOperation.Backup: { var filter = ApplyFilter(backup, data.Operation, GetCommonFilter(backup, data.Operation)); var sources = (from n in backup.Sources let p = SpecialFolders.ExpandEnvironmentVariables(n) where !string.IsNullOrWhiteSpace(p) select p).ToArray(); var r = controller.Backup(sources, filter); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.List: { var r = controller.List(data.FilterStrings); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Repair: { var r = controller.Repair(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Remove: { var r = controller.Delete(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Restore: { var r = controller.Restore(data.FilterStrings); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Verify: { var r = controller.Test(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.CreateReport: { using (var tf = new Duplicati.Library.Utility.TempFile()) { var r = controller.CreateLogDatabase(tf); var tempid = Program.DataConnection.RegisterTempFile("create-bug-report", r.TargetPath, DateTime.Now.AddDays(3)); if (string.Equals(tf, r.TargetPath, Library.Utility.Utility.ClientFilenameStringComparision)) { tf.Protected = true; } Program.DataConnection.RegisterNotification( NotificationType.Information, "Bugreport ready", "Bugreport is ready for download", null, null, "bug-report:created:" + tempid, (n, a) => n ); return(r); } } default: //TODO: Log this return(null); } } } catch (Exception ex) { Program.DataConnection.LogError(data.Backup.ID, string.Format("Failed while executing \"{0}\" with id: {1}", data.Operation, data.Backup.ID), ex); UpdateMetadataError(data.Backup, ex); if (!fromQueue) { throw; } return(null); } finally { ((RunnerData)data).Controller = null; Program.LogHandler.RemoveOperationFile(); } }
public void POST(string key, RequestInfo info) { if ("import".Equals(key, StringComparison.OrdinalIgnoreCase)) { ImportBackup(info); return; } AddOrUpdateBackupData data = null; try { var str = info.Request.Form["data"].Value; if (string.IsNullOrWhiteSpace(str)) { str = new StreamReader(info.Request.Body, System.Text.Encoding.UTF8).ReadToEnd(); } data = Serializer.Deserialize <AddOrUpdateBackupData>(new StringReader(str)); if (data.Backup == null) { info.ReportClientError("Data object had no backup entry", System.Net.HttpStatusCode.BadRequest); return; } data.Backup.ID = null; if (Duplicati.Library.Utility.Utility.ParseBool(info.Request.Form["temporary"].Value, false)) { using (var tf = new Duplicati.Library.Utility.TempFile()) data.Backup.DBPath = tf; data.Backup.Filters = data.Backup.Filters ?? new Duplicati.Server.Serialization.Interface.IFilter[0]; data.Backup.Settings = data.Backup.Settings ?? new Duplicati.Server.Serialization.Interface.ISetting[0]; Program.DataConnection.RegisterTemporaryBackup(data.Backup); info.OutputOK(new { status = "OK", ID = data.Backup.ID }); } else { if (Library.Utility.Utility.ParseBool(info.Request.Form["existing_db"].Value, false)) { data.Backup.DBPath = Library.Main.DatabaseLocator.GetDatabasePath(data.Backup.TargetURL, null, false, false); if (string.IsNullOrWhiteSpace(data.Backup.DBPath)) { throw new Exception("Unable to find remote db path?"); } } lock (Program.DataConnection.m_lock) { if (Program.DataConnection.Backups.Where(x => x.Name.Equals(data.Backup.Name, StringComparison.OrdinalIgnoreCase)).Any()) { info.ReportClientError("There already exists a backup with the name: " + data.Backup.Name, System.Net.HttpStatusCode.Conflict); return; } var err = Program.DataConnection.ValidateBackup(data.Backup, data.Schedule); if (!string.IsNullOrWhiteSpace(err)) { info.ReportClientError(err, System.Net.HttpStatusCode.BadRequest); return; } Program.DataConnection.AddOrUpdateBackupAndSchedule(data.Backup, data.Schedule); } info.OutputOK(new { status = "OK", ID = data.Backup.ID }); } } catch (Exception ex) { if (data == null) { info.ReportClientError(string.Format("Unable to parse backup or schedule object: {0}", ex.Message), System.Net.HttpStatusCode.BadRequest); } else { info.ReportClientError(string.Format("Unable to save schedule or backup object: {0}", ex.Message), System.Net.HttpStatusCode.InternalServerError); } } }
public static void CreateUpdatePackage(System.Security.Cryptography.RSACryptoServiceProvider key, string inputfolder, string outputfolder, string manifest = null) { // Read the existing manifest UpdateInfo remoteManifest; var manifestpath = manifest ?? System.IO.Path.Combine(inputfolder, UPDATE_MANIFEST_FILENAME); using (var s = System.IO.File.OpenRead(manifestpath)) using (var sr = new System.IO.StreamReader(s)) using (var jr = new Newtonsoft.Json.JsonTextReader(sr)) remoteManifest = new Newtonsoft.Json.JsonSerializer().Deserialize <UpdateInfo>(jr); if (remoteManifest.Files == null) { remoteManifest.Files = new FileEntry[0]; } if (remoteManifest.ReleaseTime.Ticks == 0) { remoteManifest.ReleaseTime = DateTime.UtcNow; } var ignoreFiles = (from n in remoteManifest.Files where n.Ignore select n).ToArray(); var ignoreMap = ignoreFiles.ToDictionary(k => k.Path, k => "", Duplicati.Library.Utility.Utility.ClientFilenameStringComparer); remoteManifest.MD5 = null; remoteManifest.SHA256 = null; remoteManifest.Files = null; remoteManifest.UncompressedSize = 0; var localManifest = remoteManifest.Clone(); localManifest.RemoteURLS = null; inputfolder = Duplicati.Library.Utility.Utility.AppendDirSeparator(inputfolder); var baselen = inputfolder.Length; var dirsep = System.IO.Path.DirectorySeparatorChar.ToString(); ignoreMap.Add(UPDATE_MANIFEST_FILENAME, ""); var md5 = System.Security.Cryptography.MD5.Create(); var sha256 = System.Security.Cryptography.SHA256.Create(); Func <string, string> computeMD5 = (path) => { md5.Initialize(); using (var fs = System.IO.File.OpenRead(path)) return(Convert.ToBase64String(md5.ComputeHash(fs))); }; Func <string, string> computeSHA256 = (path) => { sha256.Initialize(); using (var fs = System.IO.File.OpenRead(path)) return(Convert.ToBase64String(sha256.ComputeHash(fs))); }; // Build a zip using (var archive_temp = new Duplicati.Library.Utility.TempFile()) { using (var zipfile = new Duplicati.Library.Compression.FileArchiveZip(archive_temp, new Dictionary <string, string>())) { Func <string, string, bool> addToArchive = (path, relpath) => { if (ignoreMap.ContainsKey(relpath)) { return(false); } if (path.EndsWith(dirsep)) { return(true); } using (var source = System.IO.File.OpenRead(path)) using (var target = zipfile.CreateFile(relpath, Duplicati.Library.Interface.CompressionHint.Compressible, System.IO.File.GetLastAccessTimeUtc(path))) { source.CopyTo(target); remoteManifest.UncompressedSize += source.Length; } return(true); }; // Build the update manifest localManifest.Files = (from fse in Duplicati.Library.Utility.Utility.EnumerateFileSystemEntries(inputfolder) let relpath = fse.Substring(baselen) where addToArchive(fse, relpath) select new FileEntry() { Path = relpath, LastWriteTime = System.IO.File.GetLastAccessTimeUtc(fse), MD5 = fse.EndsWith(dirsep) ? null : computeMD5(fse), SHA256 = fse.EndsWith(dirsep) ? null : computeSHA256(fse) }) .Union(ignoreFiles).ToArray(); // Write a signed manifest with the files using (var ms = new System.IO.MemoryStream()) using (var sw = new System.IO.StreamWriter(ms)) { new Newtonsoft.Json.JsonSerializer().Serialize(sw, localManifest); sw.Flush(); using (var ms2 = new System.IO.MemoryStream()) { SignatureReadingStream.CreateSignedStream(ms, ms2, key); ms2.Position = 0; using (var sigfile = zipfile.CreateFile(UPDATE_MANIFEST_FILENAME, Duplicati.Library.Interface.CompressionHint.Compressible, DateTime.UtcNow)) ms2.CopyTo(sigfile); } } } remoteManifest.CompressedSize = new System.IO.FileInfo(archive_temp).Length; remoteManifest.MD5 = computeMD5(archive_temp); remoteManifest.SHA256 = computeSHA256(archive_temp); System.IO.File.Move(archive_temp, System.IO.Path.Combine(outputfolder, "package.zip")); } // Write a signed manifest for upload using (var tf = new Duplicati.Library.Utility.TempFile()) { using (var ms = new System.IO.MemoryStream()) using (var sw = new System.IO.StreamWriter(ms)) { new Newtonsoft.Json.JsonSerializer().Serialize(sw, remoteManifest); sw.Flush(); using (var fs = System.IO.File.Create(tf)) SignatureReadingStream.CreateSignedStream(ms, fs, key); } System.IO.File.Move(tf, System.IO.Path.Combine(outputfolder, UPDATE_MANIFEST_FILENAME)); } }
static bool Run(List <string> args, Dictionary <string, string> options, bool first) { Library.Interface.IBackend backend = Library.DynamicLoader.BackendLoader.GetBackend(args[0], options); if (backend == null) { Console.WriteLine("Unsupported backend"); Console.WriteLine(); Console.WriteLine("Supported backends: " + string.Join(",", Duplicati.Library.DynamicLoader.BackendLoader.Keys)); return(false); } string allowedChars = ValidFilenameChars; if (options.ContainsKey("extended-chars")) { allowedChars += String.IsNullOrEmpty(options["extended-chars"]) ? ExtendedChars : options["extended-chars"]; } bool autoCreateFolders = Library.Utility.Utility.ParseBoolOption(options, "auto-create-folder"); string disabledModulesValue; string enabledModulesValue; options.TryGetValue("enable-module", out enabledModulesValue); options.TryGetValue("disable-module", out disabledModulesValue); string[] enabledModules = enabledModulesValue == null ? new string[0] : enabledModulesValue.Trim().ToLower(CultureInfo.InvariantCulture).Split(','); string[] disabledModules = disabledModulesValue == null ? new string[0] : disabledModulesValue.Trim().ToLower(CultureInfo.InvariantCulture).Split(','); List <Library.Interface.IGenericModule> loadedModules = new List <IGenericModule>(); foreach (Library.Interface.IGenericModule m in Library.DynamicLoader.GenericLoader.Modules) { if (!disabledModules.Contains(m.Key, StringComparer.OrdinalIgnoreCase) && (m.LoadAsDefault || enabledModules.Contains(m.Key, StringComparer.OrdinalIgnoreCase))) { m.Configure(options); loadedModules.Add(m); } } try { IEnumerable <Library.Interface.IFileEntry> curlist = null; try { backend.Test(); curlist = backend.List(); } catch (FolderMissingException) { if (autoCreateFolders) { try { backend.CreateFolder(); curlist = backend.List(); } catch (Exception ex) { Console.WriteLine("Autocreate folder failed with message: " + ex.Message); } } if (curlist == null) { throw; } } foreach (Library.Interface.IFileEntry fe in curlist) { if (!fe.IsFolder) { if (Library.Utility.Utility.ParseBoolOption(options, "auto-clean") && first) { if (Library.Utility.Utility.ParseBoolOption(options, "force")) { Console.WriteLine("Auto clean, removing file: {0}", fe.Name); backend.Delete(fe.Name); continue; } else { Console.WriteLine("Specify the --force flag to actually delete files"); } } Console.WriteLine("*** Remote folder is not empty, aborting"); return(false); } } int number_of_files = 10; int min_file_size = 1024; int max_file_size = 1024 * 1024 * 50; int min_filename_size = 5; int max_filename_size = 80; bool disableStreaming = Library.Utility.Utility.ParseBoolOption(options, "disable-streaming-transfers"); bool skipOverwriteTest = Library.Utility.Utility.ParseBoolOption(options, "skip-overwrite-test"); bool trimFilenameSpaces = Library.Utility.Utility.ParseBoolOption(options, "trim-filename-spaces"); if (options.ContainsKey("number-of-files")) { number_of_files = int.Parse(options["number-of-files"]); } if (options.ContainsKey("min-file-size")) { min_file_size = (int)Duplicati.Library.Utility.Sizeparser.ParseSize(options["min-file-size"], "mb"); } if (options.ContainsKey("max-file-size")) { max_file_size = (int)Duplicati.Library.Utility.Sizeparser.ParseSize(options["max-file-size"], "mb"); } if (options.ContainsKey("min-filename-length")) { min_filename_size = int.Parse(options["min-filename-length"]); } if (options.ContainsKey("max-filename-length")) { max_filename_size = int.Parse(options["max-filename-length"]); } Random rnd = new Random(); System.Security.Cryptography.SHA256 sha = System.Security.Cryptography.SHA256.Create(); //Create random files using (Library.Utility.TempFolder tf = new Duplicati.Library.Utility.TempFolder()) { List <TempFile> files = new List <TempFile>(); for (int i = 0; i < number_of_files; i++) { string filename = CreateRandomRemoteFileName(min_filename_size, max_filename_size, allowedChars, trimFilenameSpaces, rnd); string localfilename = CreateRandomFile(tf, i, min_file_size, max_file_size, rnd); //Calculate local hash and length using (System.IO.FileStream fs = new System.IO.FileStream(localfilename, System.IO.FileMode.Open, System.IO.FileAccess.Read)) files.Add(new TempFile(filename, localfilename, sha.ComputeHash(fs), fs.Length)); } byte[] dummyFileHash = null; if (!skipOverwriteTest) { Console.WriteLine("Uploading wrong files ..."); using (Library.Utility.TempFile dummy = Library.Utility.TempFile.WrapExistingFile(CreateRandomFile(tf, files.Count, 1024, 2048, rnd))) { using (System.IO.FileStream fs = new System.IO.FileStream(dummy, System.IO.FileMode.Open, System.IO.FileAccess.Read)) dummyFileHash = sha.ComputeHash(fs); //Upload a dummy file for entry 0 and the last one, they will be replaced by the real files afterwards //We upload entry 0 twice just to try to freak any internal cache list Uploadfile(dummy, 0, files[0].remotefilename, backend, disableStreaming); Uploadfile(dummy, 0, files[0].remotefilename, backend, disableStreaming); Uploadfile(dummy, files.Count - 1, files[files.Count - 1].remotefilename, backend, disableStreaming); } } Console.WriteLine("Uploading files ..."); for (int i = 0; i < files.Count; i++) { Uploadfile(files[i].localfilename, i, files[i].remotefilename, backend, disableStreaming); } TempFile originalRenamedFile = null; string renamedFileNewName = null; IRenameEnabledBackend renameEnabledBackend = backend as IRenameEnabledBackend; if (renameEnabledBackend != null) { // Rename the second file in the list, if there are more than one. If not, just do the first one. int renameIndex = files.Count > 1 ? 1 : 0; originalRenamedFile = files[renameIndex]; renamedFileNewName = CreateRandomRemoteFileName(min_filename_size, max_filename_size, allowedChars, trimFilenameSpaces, rnd); Console.WriteLine("Renaming file {0} from {1} to {2}", renameIndex, originalRenamedFile.remotefilename, renamedFileNewName); renameEnabledBackend.Rename(originalRenamedFile.remotefilename, renamedFileNewName); files[renameIndex] = new TempFile(renamedFileNewName, originalRenamedFile.localfilename, originalRenamedFile.hash, originalRenamedFile.length); } Console.WriteLine("Verifying file list ..."); curlist = backend.List(); foreach (Library.Interface.IFileEntry fe in curlist) { if (!fe.IsFolder) { bool found = false; foreach (TempFile tx in files) { if (tx.remotefilename == fe.Name) { if (tx.found) { Console.WriteLine("*** File with name {0} was found more than once", tx.remotefilename); } found = true; tx.found = true; if (fe.Size > 0 && tx.length != fe.Size) { Console.WriteLine("*** File with name {0} has size {1} but the size was reported as {2}", tx.remotefilename, tx.length, fe.Size); } break; } } if (!found) { if (originalRenamedFile != null && renamedFileNewName != null && originalRenamedFile.remotefilename == fe.Name) { Console.WriteLine("*** File with name {0} was found on server but was supposed to have been renamed to {1}!", fe.Name, renamedFileNewName); } else { Console.WriteLine("*** File with name {0} was found on server but not uploaded!", fe.Name); } } } } foreach (TempFile tx in files) { if (!tx.found) { Console.WriteLine("*** File with name {0} was uploaded but not found afterwards", tx.remotefilename); } } Console.WriteLine("Downloading files"); for (int i = 0; i < files.Count; i++) { using (Duplicati.Library.Utility.TempFile cf = new Duplicati.Library.Utility.TempFile()) { Exception e = null; Console.Write("Downloading file {0} ... ", i); try { if (backend is IStreamingBackend streamingBackend && !disableStreaming) { using (System.IO.FileStream fs = new System.IO.FileStream(cf, System.IO.FileMode.Create, System.IO.FileAccess.Write, System.IO.FileShare.None)) using (NonSeekableStream nss = new NonSeekableStream(fs)) streamingBackend.Get(files[i].remotefilename, nss); } else { backend.Get(files[i].remotefilename, cf); } e = null; } catch (Exception ex) { e = ex; } if (e != null) { Console.WriteLine("failed\n*** Error: {0}", e); } else { Console.WriteLine("done"); } Console.Write("Checking hash ... "); using (System.IO.FileStream fs = new System.IO.FileStream(cf, System.IO.FileMode.Open, System.IO.FileAccess.Read)) if (Convert.ToBase64String(sha.ComputeHash(fs)) != Convert.ToBase64String(files[i].hash)) { if (dummyFileHash != null && Convert.ToBase64String(sha.ComputeHash(fs)) == Convert.ToBase64String(dummyFileHash)) { Console.WriteLine("failed\n*** Downloaded file was the dummy file"); } else { Console.WriteLine("failed\n*** Downloaded file was corrupt"); } } else { Console.WriteLine("done"); } }
public void POST(string key, RequestInfo info) { var parts = (key ?? "").Split(new char[] { '/' }, 2); var bk = Program.DataConnection.GetBackup(parts.First()); if (bk == null) info.ReportClientError("Invalid or missing backup id"); else { if (parts.Length > 1) { var operation = parts.Last().Split(new char[] { '/' }).First().ToLowerInvariant(); switch (operation) { case "deletedb": System.IO.File.Delete(bk.DBPath); info.OutputOK(); return; case "restore": RestoreFiles(bk, info); return; case "createreport": CreateReport(bk, info); return; case "repair": Repair(bk, info); return; case "repairupdate": RepairUpdate(bk, info); return; case "verify": Verify(bk, info); return; case "start": case "run": RunBackup(bk, info); return; case "copytotemp": var ipx = Serializer.Deserialize<Database.Backup>(new StringReader(Newtonsoft.Json.JsonConvert.SerializeObject(bk))); using(var tf = new Duplicati.Library.Utility.TempFile()) ipx.DBPath = tf; ipx.ID = null; info.OutputOK(new { status = "OK", ID = Program.DataConnection.RegisterTemporaryBackup(ipx) }); return; } } info.ReportClientError("Invalid request"); } }
public static Duplicati.Library.Interface.IBasicResults Run(IRunnerData data, bool fromQueue) { if (data is CustomRunnerTask) { try { var sink = new MessageSink(data.TaskID, null); Program.GenerateProgressState = () => sink.Copy(); Program.StatusEventNotifyer.SignalNewEvent(); ((CustomRunnerTask)data).Run(sink); } catch (Exception ex) { Program.DataConnection.LogError(string.Empty, "Failed while executing custom task", ex); } return(null); } var backup = data.Backup; Duplicati.Library.Utility.TempFolder tempfolder = null; if (backup.Metadata == null) { backup.Metadata = new Dictionary <string, string>(); } try { var sink = new MessageSink(data.TaskID, backup.ID); if (fromQueue) { Program.GenerateProgressState = () => sink.Copy(); Program.StatusEventNotifyer.SignalNewEvent(); } var options = ApplyOptions(backup, data.Operation, GetCommonOptions(backup, data.Operation)); if (data.ExtraOptions != null) { foreach (var k in data.ExtraOptions) { options[k.Key] = k.Value; } } // Pack in the system or task config for easy restore if (data.Operation == DuplicatiOperation.Backup && options.ContainsKey("store-task-config")) { var all_tasks = string.Equals(options["store-task-config"], "all", StringComparison.InvariantCultureIgnoreCase) || string.Equals(options["store-task-config"], "*", StringComparison.InvariantCultureIgnoreCase); var this_task = Duplicati.Library.Utility.Utility.ParseBool(options["store-task-config"], false); options.Remove("store-task-config"); if (all_tasks || this_task) { if (tempfolder == null) { tempfolder = new Duplicati.Library.Utility.TempFolder(); } var temppath = System.IO.Path.Combine(tempfolder, "task-setup.json"); using (var tempfile = Duplicati.Library.Utility.TempFile.WrapExistingFile(temppath)) { object taskdata = null; if (all_tasks) { taskdata = Program.DataConnection.Backups.Where(x => !x.IsTemporary).Select(x => Program.DataConnection.PrepareBackupForExport(Program.DataConnection.GetBackup(x.ID))); } else { taskdata = new [] { Program.DataConnection.PrepareBackupForExport(data.Backup) } }; using (var fs = System.IO.File.OpenWrite(tempfile)) using (var sw = new System.IO.StreamWriter(fs, System.Text.Encoding.UTF8)) Serializer.SerializeJson(sw, taskdata, true); tempfile.Protected = true; string controlfiles = null; options.TryGetValue("control-files", out controlfiles); if (string.IsNullOrWhiteSpace(controlfiles)) { controlfiles = tempfile; } else { controlfiles += System.IO.Path.PathSeparator + tempfile; } options["control-files"] = controlfiles; } } } using (tempfolder) using (var controller = new Duplicati.Library.Main.Controller(backup.TargetURL, options, sink)) { ((RunnerData)data).Controller = controller; switch (data.Operation) { case DuplicatiOperation.Backup: { var filter = ApplyFilter(backup, data.Operation, GetCommonFilter(backup, data.Operation)); var sources = (from n in backup.Sources let p = SpecialFolders.ExpandEnvironmentVariables(n) where !string.IsNullOrWhiteSpace(p) select p).ToArray(); var r = controller.Backup(sources, filter); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.List: { var r = controller.List(data.FilterStrings); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Repair: { var r = controller.Repair(data.FilterStrings == null ? null : new Library.Utility.FilterExpression(data.FilterStrings)); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.RepairUpdate: { var r = controller.UpdateDatabaseWithVersions(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Remove: { var r = controller.Delete(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Restore: { var r = controller.Restore(data.FilterStrings); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Verify: { var r = controller.Test(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Compact: { var r = controller.Compact(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.CreateReport: { using (var tf = new Duplicati.Library.Utility.TempFile()) { var r = controller.CreateLogDatabase(tf); var tempid = Program.DataConnection.RegisterTempFile("create-bug-report", r.TargetPath, DateTime.Now.AddDays(3)); if (string.Equals(tf, r.TargetPath, Library.Utility.Utility.ClientFilenameStringComparision)) { tf.Protected = true; } Program.DataConnection.RegisterNotification( NotificationType.Information, "Bugreport ready", "Bugreport is ready for download", null, null, "bug-report:created:" + tempid, (n, a) => n ); return(r); } } case DuplicatiOperation.ListRemote: { var r = controller.ListRemote(); UpdateMetadata(backup, r); return(r); } case DuplicatiOperation.Delete: { if (Library.Utility.Utility.ParseBoolOption(data.ExtraOptions, "delete-remote-files")) { controller.DeleteAllRemoteFiles(); } if (Library.Utility.Utility.ParseBoolOption(data.ExtraOptions, "delete-local-db")) { string dbpath; options.TryGetValue("db-path", out dbpath); if (!string.IsNullOrWhiteSpace(dbpath) && System.IO.File.Exists(dbpath)) { System.IO.File.Delete(dbpath); } } Program.DataConnection.DeleteBackup(backup); Program.Scheduler.Reschedule(); return(null); } default: //TODO: Log this return(null); } } } catch (Exception ex) { Program.DataConnection.LogError(data.Backup.ID, string.Format("Failed while executing \"{0}\" with id: {1}", data.Operation, data.Backup.ID), ex); UpdateMetadataError(data.Backup, ex); Library.UsageReporter.Reporter.Report(ex); if (!fromQueue) { throw; } return(null); } finally { ((RunnerData)data).Controller = null; } }
public static void CompileResxFiles(string folder, List<string> excludeFolders, string @namespace, string assemblyname, string versionAssembly, string keyfile, string culture, string productname, string fxVer) { string resgenexe; string alexe; if (Duplicati.Library.Utility.Utility.IsClientLinux) { resgenexe = ExecuteAndRead("which", "resgen"); alexe = ExecuteAndRead("which", "al"); } else //Windows { //Order of these paths is also the search order string[] known_sdk_paths = null; if (fxVer == "2.0") { known_sdk_paths = new string[] { Environment.ExpandEnvironmentVariables("%WINDIR%\\Microsoft.Net\\Framework\\v2.0.50727\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES%\\Microsoft.NET\\SDK\\v2.0\\bin\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES(X86)%\\Microsoft.NET\\SDK\\v2.0\\bin\\"), }; } else if (fxVer == "3.0") { known_sdk_paths = new string[] { Environment.ExpandEnvironmentVariables("%WINDIR%\\Microsoft.Net\\Framework\\v3.0\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES%\\Microsoft SDKs\\Windows\\v6.0A\\bin\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES(X86)%\\Microsoft SDKs\\Windows\\v6.0A\\bin\\"), }; } else if (fxVer == "3.5") { known_sdk_paths = new string[] { Environment.ExpandEnvironmentVariables("%WINDIR%\\Microsoft.Net\\Framework\\v3.5\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES%\\Microsoft SDKs\\Windows\\v7.0A\\bin\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES(X86)%\\Microsoft SDKs\\Windows\\v7.0A\\bin\\"), }; } else { known_sdk_paths = new string[] { Environment.ExpandEnvironmentVariables("%WINDIR%\\Microsoft.Net\\Framework\\v4.0\\"), Environment.ExpandEnvironmentVariables("%WINDIR%\\Microsoft.Net\\Framework\\v4.0.30319\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES%\\Microsoft SDKs\\Windows\\v8.0A\\bin\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES%\\Microsoft SDKs\\Windows\\v8.0A\\bin\\NETFX 4.0 Tools\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES%\\Microsoft SDKs\\Windows\\v7.1\\Bin\\NETFX 4.0 Tools\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES(X86)%\\Microsoft SDKs\\Windows\\v8.0A\\bin\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES(X86)%\\Microsoft SDKs\\Windows\\v8.0A\\bin\\NETFX 4.0 Tools\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES(X86)%\\Microsoft SDKs\\Windows\\v7.1\\Bin\\NETFX 4.0 Tools\\"), }; } resgenexe = "resgen.exe"; alexe = "al.exe"; foreach(var p in known_sdk_paths) if (System.IO.File.Exists(System.IO.Path.Combine(p, resgenexe))) { resgenexe = System.IO.Path.Combine(p, resgenexe); break; } foreach (var p in known_sdk_paths) if (System.IO.File.Exists(System.IO.Path.Combine(p, alexe))) { alexe = System.IO.Path.Combine(p, alexe); break; } } if (!System.IO.File.Exists(resgenexe)) { Console.WriteLine("Unable to locate file: {0}", resgenexe); Console.WriteLine("This can be fixed by installing a microsoft platform SDK, or visual studio (express is fine)"); return; } if (!System.IO.File.Exists(alexe)) { Console.WriteLine("Unable to locate file: {0}", alexe); Console.WriteLine("This can be fixed by installing the .Net framework version 2.0"); return; } if (!string.IsNullOrEmpty(keyfile) && Duplicati.Library.Utility.Utility.IsClientLinux) keyfile = keyfile.Replace("\\", System.IO.Path.DirectorySeparatorChar.ToString()); List<string> resources = new List<string>(); folder = Duplicati.Library.Utility.Utility.AppendDirSeparator(folder); foreach (string s in Duplicati.Library.Utility.Utility.EnumerateFiles(folder)) { if (s.ToLower().EndsWith("." + culture.ToLower() + ".resx")) { if (excludeFolders.Any(xf => s.ToLower().StartsWith(Duplicati.Library.Utility.Utility.AppendDirSeparator(xf).ToLower()))) continue; string resname = System.IO.Path.ChangeExtension(s, ".resources"); if (!System.IO.File.Exists(resname) || System.IO.File.GetLastWriteTime(resname) < System.IO.File.GetLastWriteTime(s)) { Console.WriteLine("Compiling: " + s); System.Diagnostics.ProcessStartInfo pi = new System.Diagnostics.ProcessStartInfo(resgenexe, "\"" + s + "\""); pi.CreateNoWindow = true; pi.WindowStyle = System.Diagnostics.ProcessWindowStyle.Hidden; pi.RedirectStandardOutput = true; pi.RedirectStandardError = true; pi.UseShellExecute = false; pi.WorkingDirectory = System.IO.Path.GetDirectoryName(s); System.Diagnostics.Process pr = System.Diagnostics.Process.Start(pi); pr.WaitForExit(); if (pr.ExitCode != 0) { Console.WriteLine("Error"); Console.WriteLine(pr.StandardOutput.ReadToEnd()); Console.WriteLine(pr.StandardError.ReadToEnd()); throw new Exception("Resgen failure: " + s); } } else Console.WriteLine("Not modified: " + s); resources.Add(resname); } } if (resources.Count == 0) return; if (!System.IO.File.Exists(versionAssembly) && Duplicati.Library.Utility.Utility.IsClientLinux) versionAssembly = versionAssembly.Replace("\\", System.IO.Path.DirectorySeparatorChar.ToString()); if (!System.IO.File.Exists(versionAssembly)) { Console.WriteLine("Unable to locate file: {0}", versionAssembly); Console.WriteLine("This can be fixed by compiling the application or modifying the file configuration.xml"); return; } using (Duplicati.Library.Utility.TempFile tf = new Duplicati.Library.Utility.TempFile()) { using (System.IO.StreamWriter sw = new System.IO.StreamWriter(tf)) { System.Reflection.Assembly asm = System.Reflection.Assembly.ReflectionOnlyLoadFrom(versionAssembly); sw.WriteLine("/t:lib"); sw.WriteLine("/out:\"" + assemblyname + "\""); sw.WriteLine("/product:\"" + productname + "\""); sw.WriteLine("/title:\"" + productname + "\""); sw.WriteLine("/version:" + asm.GetName().Version.ToString()); if (!string.IsNullOrEmpty(keyfile)) sw.WriteLine("/keyfile:\"" + keyfile + "\""); sw.WriteLine("/culture:" + culture); foreach (string s in resources) { string resname = s.Substring(folder.Length); resname = resname.Replace("\\", "."); resname = resname.Replace(" ", "_"); resname = @namespace + "." + resname; sw.WriteLine("/embed:\"" + s + "\"," + resname); } } Console.WriteLine("Linking ..."); System.Diagnostics.ProcessStartInfo pi = new System.Diagnostics.ProcessStartInfo(alexe, "@\"" + tf + "\""); pi.CreateNoWindow = true; pi.WindowStyle = System.Diagnostics.ProcessWindowStyle.Hidden; System.Diagnostics.Process pr = System.Diagnostics.Process.Start(pi); pr.WaitForExit(); if (pr.ExitCode != 0) throw new Exception("Linker failure"); } }
public static void CompileResxFiles(string folder, List <string> excludeFolders, string @namespace, string assemblyname, string versionAssembly, string keyfile, string culture, string productname, string fxVer) { string resgenexe; string alexe; if (Duplicati.Library.Utility.Utility.IsClientLinux) { resgenexe = ExecuteAndRead("which", "resgen"); alexe = ExecuteAndRead("which", "al"); } else //Windows { //Order of these paths is also the search order string[] known_sdk_paths = null; if (fxVer == "2.0") { known_sdk_paths = new string[] { Environment.ExpandEnvironmentVariables("%WINDIR%\\Microsoft.Net\\Framework\\v2.0.50727\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES%\\Microsoft.NET\\SDK\\v2.0\\bin\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES(X86)%\\Microsoft.NET\\SDK\\v2.0\\bin\\"), }; } else if (fxVer == "3.0") { known_sdk_paths = new string[] { Environment.ExpandEnvironmentVariables("%WINDIR%\\Microsoft.Net\\Framework\\v3.0\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES%\\Microsoft SDKs\\Windows\\v6.0A\\bin\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES(X86)%\\Microsoft SDKs\\Windows\\v6.0A\\bin\\"), }; } else if (fxVer == "3.5") { known_sdk_paths = new string[] { Environment.ExpandEnvironmentVariables("%WINDIR%\\Microsoft.Net\\Framework\\v3.5\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES%\\Microsoft SDKs\\Windows\\v7.0A\\bin\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES(X86)%\\Microsoft SDKs\\Windows\\v7.0A\\bin\\"), }; } else { known_sdk_paths = new string[] { Environment.ExpandEnvironmentVariables("%WINDIR%\\Microsoft.Net\\Framework\\v4.0\\"), Environment.ExpandEnvironmentVariables("%WINDIR%\\Microsoft.Net\\Framework\\v4.0.30319\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES%\\Microsoft SDKs\\Windows\\v8.0A\\bin\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES%\\Microsoft SDKs\\Windows\\v8.0A\\bin\\NETFX 4.0 Tools\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES%\\Microsoft SDKs\\Windows\\v7.1\\Bin\\NETFX 4.0 Tools\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES(X86)%\\Microsoft SDKs\\Windows\\v8.0A\\bin\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES(X86)%\\Microsoft SDKs\\Windows\\v8.0A\\bin\\NETFX 4.0 Tools\\"), Environment.ExpandEnvironmentVariables("%PROGRAMFILES(X86)%\\Microsoft SDKs\\Windows\\v7.1\\Bin\\NETFX 4.0 Tools\\"), }; } resgenexe = "resgen.exe"; alexe = "al.exe"; foreach (var p in known_sdk_paths) { if (System.IO.File.Exists(System.IO.Path.Combine(p, resgenexe))) { resgenexe = System.IO.Path.Combine(p, resgenexe); break; } } foreach (var p in known_sdk_paths) { if (System.IO.File.Exists(System.IO.Path.Combine(p, alexe))) { alexe = System.IO.Path.Combine(p, alexe); break; } } } if (!System.IO.File.Exists(resgenexe)) { Console.WriteLine("Unable to locate file: {0}", resgenexe); Console.WriteLine("This can be fixed by installing a microsoft platform SDK, or visual studio (express is fine)"); return; } if (!System.IO.File.Exists(alexe)) { Console.WriteLine("Unable to locate file: {0}", alexe); Console.WriteLine("This can be fixed by installing the .Net framework version 2.0"); return; } if (!string.IsNullOrEmpty(keyfile) && Duplicati.Library.Utility.Utility.IsClientLinux) { keyfile = keyfile.Replace("\\", System.IO.Path.DirectorySeparatorChar.ToString()); } List <string> resources = new List <string>(); folder = Duplicati.Library.Utility.Utility.AppendDirSeparator(folder); foreach (string s in Duplicati.Library.Utility.Utility.EnumerateFiles(folder)) { if (s.ToLower().EndsWith("." + culture.ToLower() + ".resx")) { if (excludeFolders.Any(xf => s.ToLower().StartsWith(Duplicati.Library.Utility.Utility.AppendDirSeparator(xf).ToLower()))) { continue; } string resname = System.IO.Path.ChangeExtension(s, ".resources"); if (!System.IO.File.Exists(resname) || System.IO.File.GetLastWriteTime(resname) < System.IO.File.GetLastWriteTime(s)) { Console.WriteLine("Compiling: " + s); System.Diagnostics.ProcessStartInfo pi = new System.Diagnostics.ProcessStartInfo(resgenexe, "\"" + s + "\""); pi.CreateNoWindow = true; pi.WindowStyle = System.Diagnostics.ProcessWindowStyle.Hidden; pi.RedirectStandardOutput = true; pi.RedirectStandardError = true; pi.UseShellExecute = false; pi.WorkingDirectory = System.IO.Path.GetDirectoryName(s); System.Diagnostics.Process pr = System.Diagnostics.Process.Start(pi); pr.WaitForExit(); if (pr.ExitCode != 0) { Console.WriteLine("Error"); Console.WriteLine(pr.StandardOutput.ReadToEnd()); Console.WriteLine(pr.StandardError.ReadToEnd()); throw new Exception("Resgen failure: " + s); } } else { Console.WriteLine("Not modified: " + s); } resources.Add(resname); } } if (resources.Count == 0) { return; } if (!System.IO.File.Exists(versionAssembly) && Duplicati.Library.Utility.Utility.IsClientLinux) { versionAssembly = versionAssembly.Replace("\\", System.IO.Path.DirectorySeparatorChar.ToString()); } if (!System.IO.File.Exists(versionAssembly)) { Console.WriteLine("Unable to locate file: {0}", versionAssembly); Console.WriteLine("This can be fixed by compiling the application or modifying the file configuration.xml"); return; } using (Duplicati.Library.Utility.TempFile tf = new Duplicati.Library.Utility.TempFile()) { using (System.IO.StreamWriter sw = new System.IO.StreamWriter(tf)) { System.Reflection.Assembly asm = System.Reflection.Assembly.ReflectionOnlyLoadFrom(versionAssembly); sw.WriteLine("/t:lib"); sw.WriteLine("/out:\"" + assemblyname + "\""); sw.WriteLine("/product:\"" + productname + "\""); sw.WriteLine("/title:\"" + productname + "\""); sw.WriteLine("/version:" + asm.GetName().Version.ToString()); if (!string.IsNullOrEmpty(keyfile)) { sw.WriteLine("/keyfile:\"" + keyfile + "\""); } sw.WriteLine("/culture:" + culture); foreach (string s in resources) { string resname = s.Substring(folder.Length); resname = resname.Replace("\\", "."); resname = resname.Replace(" ", "_"); resname = @namespace + "." + resname; sw.WriteLine("/embed:\"" + s + "\"," + resname); } } Console.WriteLine("Linking ..."); System.Diagnostics.ProcessStartInfo pi = new System.Diagnostics.ProcessStartInfo(alexe, "@\"" + tf + "\""); pi.CreateNoWindow = true; pi.WindowStyle = System.Diagnostics.ProcessWindowStyle.Hidden; System.Diagnostics.Process pr = System.Diagnostics.Process.Start(pi); pr.WaitForExit(); if (pr.ExitCode != 0) { throw new Exception("Linker failure"); } } }
public static int Main(string[] _args) { var args = new List<string>(_args); var opts = Duplicati.Library.Utility.CommandLineParser.ExtractOptions(args); string inputfolder; string outputfolder; string keyfile; string manifestfile; string keyfilepassword; opts.TryGetValue("input", out inputfolder); opts.TryGetValue("output", out outputfolder); opts.TryGetValue("keyfile", out keyfile); opts.TryGetValue("manifest", out manifestfile); opts.TryGetValue("keyfile-password", out keyfilepassword); var usedoptions = new string[] { "input", "output", "keyfile", "manifest", "keyfile-password" }; if (string.IsNullOrWhiteSpace(inputfolder)) { Console.WriteLine("Missing input folder"); return 4; } if (string.IsNullOrWhiteSpace(outputfolder)) { Console.WriteLine("Missing output folder"); return 4; } if (string.IsNullOrWhiteSpace(keyfile)) { Console.WriteLine("Missing keyfile"); return 4; } if (!System.IO.Directory.Exists(inputfolder)) { Console.WriteLine("Input folder not found"); return 4; } if (string.IsNullOrWhiteSpace(keyfilepassword)) { Console.WriteLine("Enter keyfile passphrase: "); keyfilepassword = Console.ReadLine().Trim(); } if (!System.IO.File.Exists(keyfile)) { Console.WriteLine("Keyfile not found, creating new"); var newkey = System.Security.Cryptography.RSACryptoServiceProvider.Create().ToXmlString(true); using (var enc = new Duplicati.Library.Encryption.AESEncryption(keyfilepassword, new Dictionary<string, string>())) using (var fs = System.IO.File.OpenWrite(keyfile)) using (var ms = new System.IO.MemoryStream(System.Text.Encoding.UTF8.GetBytes(newkey))) enc.Encrypt(ms, fs); } if (!System.IO.Directory.Exists(outputfolder)) System.IO.Directory.CreateDirectory(outputfolder); var privkey = (System.Security.Cryptography.RSACryptoServiceProvider)System.Security.Cryptography.RSACryptoServiceProvider.Create(); using(var enc = new Duplicati.Library.Encryption.AESEncryption(keyfilepassword, new Dictionary<string, string>())) using(var ms = new System.IO.MemoryStream()) using(var fs = System.IO.File.OpenRead(keyfile)) { enc.Decrypt(fs, ms); ms.Position = 0; using(var sr = new System.IO.StreamReader(ms)) privkey.FromXmlString(sr.ReadToEnd()); } if (Duplicati.Library.AutoUpdater.AutoUpdateSettings.SignKey == null || privkey.ToXmlString(false) != Duplicati.Library.AutoUpdater.AutoUpdateSettings.SignKey.ToXmlString(false)) { Console.WriteLine("The public key in the project is not the same as the public key from the file"); Console.WriteLine("Try setting the key to: "); Console.WriteLine(privkey.ToXmlString(false)); return 5; } Duplicati.Library.AutoUpdater.UpdateInfo updateInfo; using (var fs = System.IO.File.OpenRead(manifestfile)) using (var sr = new System.IO.StreamReader(fs)) using (var jr = new Newtonsoft.Json.JsonTextReader(sr)) updateInfo = new Newtonsoft.Json.JsonSerializer().Deserialize<Duplicati.Library.AutoUpdater.UpdateInfo>(jr); var isopts = new Dictionary<string, string>(opts, StringComparer.InvariantCultureIgnoreCase); foreach (var usedopt in usedoptions) isopts.Remove(usedopt); foreach (var k in updateInfo.GetType().GetFields()) if (isopts.ContainsKey(k.Name)) { try { //Console.WriteLine("Setting {0} to {1}", k.Name, isopts[k.Name]); if (k.FieldType == typeof(string[])) k.SetValue(updateInfo, isopts[k.Name].Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries)); else if (k.FieldType == typeof(Version)) k.SetValue(updateInfo, new Version(isopts[k.Name])); else if (k.FieldType == typeof(int)) k.SetValue(updateInfo, int.Parse(isopts[k.Name])); else if (k.FieldType == typeof(long)) k.SetValue(updateInfo, long.Parse(isopts[k.Name])); else k.SetValue(updateInfo, isopts[k.Name]); } catch (Exception ex) { Console.WriteLine("Failed setting {0} to {1}: {2}", k.Name, isopts[k.Name], ex.Message); } isopts.Remove(k.Name); } foreach(var opt in isopts) Console.WriteLine("Warning! unused option: {0} = {1}", opt.Key, opt.Value); using (var tf = new Duplicati.Library.Utility.TempFile()) { using (var fs = System.IO.File.OpenWrite(tf)) using (var tw = new System.IO.StreamWriter(fs)) new Newtonsoft.Json.JsonSerializer().Serialize(tw, updateInfo); Duplicati.Library.AutoUpdater.UpdaterManager.CreateUpdatePackage(privkey, inputfolder, outputfolder, tf); } return 0; }