/// <summary> /// Saves the package contents to the specified local directory. /// </summary> /// <param name="baseUri">The base URI where the package content exists.</param> /// <param name="localDirectory">The local directory path to save the content to.</param> /// <param name="progressHandler">The optional handler to report progress to.</param> /// <param name="progressStatus">The optional progress status description.</param> public void Save(Path baseUri, Path localDirectory, ProgressChangedEventHandler progressHandler = null, string progressStatus = null) { int processedCount = 0; foreach (IAction file in Actions) { Content content; switch (file) { case AddAction add: content = add.Content; break; case PatchAction patch: content = patch.Content; break; default: continue; } foreach (var part in content.Parts) { Path repoPath = baseUri + (Id + "/") + (content.Id + "/") + part.Path; Path packagePath = localDirectory + (Id + "/") + (content.Id + "/") + part.Path; new ReadOnlyFile(repoPath).Copy(packagePath); } // propagate current progress upstream processedCount++; int progressPercentage = (int)(processedCount / (float)Actions.Count * 100); progressHandler?.Invoke(this, new ProgressChangedEventArgs(progressPercentage, progressStatus)); } }
private void VersionDetectionWorker_DoWork(object sender, DoWorkEventArgs e) { Logger?.Debug("Version detection request"); // guard against multiple tasks running at the same time if (!TryEnterTask()) { return; } try { Logger?.Info("Version detection started"); // avoids hashing your entire computer because you decided to run this at the root of your C drive and not configure the ContentPath in the app.config string requiredFilePath = Path.Combine(_contentPath + "\\", Config.FileMustExist); if (!string.IsNullOrWhiteSpace(Config.FileMustExist) && !File.Exists(requiredFilePath)) { Logger?.Warn("{0} not found", requiredFilePath); return; } // determine current version quickly via a targeted file check if possible if (!string.IsNullOrWhiteSpace(Config.VersionFilePath)) { var snapshots = PackageRepository.FindSnapshotsFromFile(_contentPath, Config.VersionFilePath); if (snapshots?.Count == 1) { _detectedSnapshot = snapshots.First(); } } // otherwise fall back to full hash verification if (_detectedSnapshot == null) { Logger?.Info("Falling back to full hash verification"); _detectedSnapshot = PackageRepository.FindSnapshotFromDirectory(_contentPath, WorkerOnProgressChanged, "Detecting version"); } Logger?.Info("Version detection completed"); } finally { // allow other tasks to run ExitTask(); } }
// TODO: subscribe all workers to this and cancel upon app close? // CancellationTokenSource cancellation; #endregion #region Construction & Destruction public MainWindow() { // catch any unhandled exceptions (in this thread only) Application.Current.DispatcherUnhandledException += UnhandledException; // initialize the default logging instance used throughout the core library LoggerBase.CurrentInstance = new SeriLogger(Config.LogLevel); InitializeComponent(); // TODO: needs more testing try { // HACK: load this WPF dependency from the default system location instead of the current directory, otherwise it may interfere with patching if present string path = Path.Combine(Environment.SystemDirectory, "D3DCompiler_47.dll"); Logger?.Debug("Loading unmanaged library from {0}", path); if (LoadUnmanagedLibrary(path) == IntPtr.Zero) // will automatically forward to SysWOW64 if needed { throw new Win32Exception(); } } catch (Exception e) { Logger?.Warn(e, "Unmanaged library load failure."); } // update the title with the application version Title += " " + Assembly.GetExecutingAssembly().GetName().Version; // wire up worker responsible for version detection _detectionWorker = Utilities.Utility.CreateBackgroundWorker(VersionDetectionWorker_DoWork, WorkerOnProgressChanged, DetectionCompletedHandler); // wire up worker responsible for applying patches _patchWorker = Utilities.Utility.CreateBackgroundWorker(PatchWorkerDoWork, WorkerOnProgressChanged, PatchCompletedHandler); // wire up worker responsible for creating packages _packageWorker = Utilities.Utility.CreateBackgroundWorker(PackageWorker_DoWork, WorkerOnProgressChanged, PackageCompletedHandler); // wire up worker responsible for creating snapshots _snapshotWorker = Utilities.Utility.CreateBackgroundWorker(SnapshotWorker_DoWork, WorkerOnProgressChanged, SnapshotCompletedHandler); // wire up worker responsible for updating the application _updateWorker = Utilities.Utility.CreateBackgroundWorker(UpdateWorker_DoWork, WorkerOnProgressChanged, UpdateCompletedHandler); // check for updates _updateWorker.RunWorkerAsync(); }
/// <summary> /// Combines files from the specified source directory into the specified ouput file. Assumes alphabetical sort indicates assembly order. /// </summary> /// <param name="sourcePath">The source path containing parts of a file.</param> /// <param name="outputFilePath">The combined file path.</param> public static void JoinFiles(Path sourcePath, Path outputFilePath) { if (!outputFilePath.Uri.IsFile) { throw new ArgumentException("Output path must be a file."); } // TODO: support other URIs // TODO: use ReadOnlyFile and refactor to use sourceBasePath using (FileStream fs = File.OpenWrite(outputFilePath)) { foreach (var file in Directory.GetFiles(sourcePath)) { using (FileStream s = File.OpenRead(file)) { s.CopyTo(fs); } } } }
/// <summary> /// Generates package content differentials between the source and target snapshots in the specified output path. Note that additional package modifications may be required afterwards to account for content not included in the snapshots. /// </summary> /// <param name="repo">The initial repository configuration.</param> /// <param name="sourcePath">The absolute local source content path.</param> /// <param name="targetPath">The absolute localtarget content path.</param> /// <param name="deltaPath">The delta content path; these files should be uploaded to the repo.</param> /// <param name="settings">The optional package settings.</param> /// <param name="progressHandler">The optional handler to report progress to.</param> /// <param name="progressStatus">The optional progress status description.</param> /// <returns>The created package information.</returns> public Package(Repository repo, Path sourcePath, Path targetPath, Path deltaPath, PackageSettings settings = null, ProgressChangedEventHandler progressHandler = null, string progressStatus = null) { Repository = repo ?? throw new ArgumentNullException(nameof(repo)); if (!Directory.Exists(sourcePath)) { throw new DirectoryNotFoundException("Source path does not exist."); } if (!Directory.Exists(targetPath)) { throw new DirectoryNotFoundException("Target path does not exist."); } if (!Directory.Exists(deltaPath)) { throw new DirectoryNotFoundException("Delta path does not exist."); } // use the specified settings or initialize with defaults if null PackageSettings pkgSettings = settings ?? new PackageSettings(); // generate the empty package and create it's content directory based on its ID Path packagePath = Path.Combine(deltaPath, Id + "\\"); Directory.CreateDirectory(packagePath); // TODO: automatic detection of relative versus absolute paths (absolute should override and not use base path if specified) try { #region Snapshots // generate source snapshot, use existing one in repo if content matches var sourceSnapshot = new Snapshot(sourcePath, progressHandler, "Generating source snapshot"); if (repo.Snapshots.Contains(sourceSnapshot)) { sourceSnapshot = repo.Snapshots[repo.Snapshots.IndexOf(sourceSnapshot)]; } else { repo.Snapshots.Add(sourceSnapshot); } // generate target snapshot, use existing one in repo if content matches var targetSnapshot = new Snapshot(targetPath, progressHandler, "Generating target snapshot"); if (repo.Snapshots.Contains(targetSnapshot)) { targetSnapshot = repo.Snapshots[repo.Snapshots.IndexOf(targetSnapshot)]; } else { repo.Snapshots.Add(targetSnapshot); } // abort if package already exists if (repo.Packages.Any(pkg => Equals(pkg.SourceSnapshot, sourceSnapshot) && Equals(pkg.TargetSnapshot, targetSnapshot))) { throw new NotSupportedException("Package already exists for the specified source and target content."); } // link snapshots to the package SourceSnapshot = sourceSnapshot; TargetSnapshot = targetSnapshot; #endregion #region Lookup Tables // generate lookup tables var sourcePaths = new Dictionary <Path, FileInformation>(); var sourceFiles = new Dictionary <Path, List <FileInformation> >(); var sourceHashes = new Dictionary <string, List <FileInformation> >(); foreach (var info in sourceSnapshot.Files) { sourcePaths[info.Path] = info; Path fileName = Path.GetFileName(info.Path); if (!sourceFiles.ContainsKey(fileName)) { sourceFiles[fileName] = new List <FileInformation>(); } sourceFiles[fileName].Add(info); if (info.Hash != null) { var hash = BitConverter.ToString(info.Hash); if (!sourceHashes.ContainsKey(hash)) { sourceHashes[hash] = new List <FileInformation>(); } sourceHashes[hash].Add(info); } } var targetPaths = new Dictionary <Path, FileInformation>(); var targetFiles = new Dictionary <Path, List <FileInformation> >(); var targetHashes = new Dictionary <string, List <FileInformation> >(); foreach (var info in targetSnapshot.Files) { targetPaths[info.Path] = info; Path fileName = Path.GetFileName(info.Path); if (!targetFiles.ContainsKey(fileName)) { targetFiles[fileName] = new List <FileInformation>(); } targetFiles[fileName].Add(info); if (info.Hash != null) { var hash = BitConverter.ToString(info.Hash); if (!targetHashes.ContainsKey(hash)) { targetHashes[hash] = new List <FileInformation>(); } targetHashes[hash].Add(info); } } #endregion #region Package Content Generation progressHandler?.Invoke(this, new ProgressChangedEventArgs(0, progressStatus)); // TODO: no good way to get regular progress callbacks from all patchers so just count files processed instead for now int processedCount = 0; Parallel.ForEach(targetSnapshot.Files, file => { Path sourceFilePath = Path.Combine(sourcePath, file.Path); Path targetFilePath = Path.Combine(targetPath, file.Path); bool sourcePathMatch = sourcePaths.ContainsKey(file.Path); // only applicable for files string hash = !file.Path.IsDirectory && file.Hash != null ? BitConverter.ToString(file.Hash) : null; bool sourceHashMatch = !file.Path.IsDirectory && file.Hash != null ? sourceHashes.ContainsKey(hash) : false; bool sourceHashSingleMatch = !file.Path.IsDirectory && (sourceHashMatch && sourceHashes[hash].Count == 1); bool targetHashSingleMatch = !file.Path.IsDirectory && file.Hash != null ? targetHashes[hash].Count == 1 : false; // unchanged empty directory if (file.Path.IsDirectory & sourcePathMatch) { // do nothing } // added empty directory else if (file.Path.IsDirectory && !sourcePathMatch) { lock (((ICollection)Actions).SyncRoot) { Actions.Add(new AddAction(file.Path)); } } // search for added optional files (target file with a null hash) else if (!file.Path.IsDirectory && file.Hash == null) { var content = new Content(targetFilePath, packagePath, pkgSettings); lock (((ICollection)Actions).SyncRoot) { Actions.Add(new AddAction(file.Path, content, false, true)); } } // unchanged file else if (!file.Path.IsDirectory && sourcePathMatch && sourceHashMatch) { // do nothing } // changed files (same path, different hash) else if (!file.Path.IsDirectory && sourcePathMatch && !sourceHashMatch) { Path tempDeltaFile = Utility.GetTempFilePath(); try { // patches should already be compressed, no need to do so again PackageSettings pkgSettingsCopy = (PackageSettings)pkgSettings.Clone(); // deep clone for thread safety pkgSettingsCopy.CompressionEnabled = false; // generate delta patch to temp location and generate content Utility.GetPatcher(pkgSettingsCopy.PatchAlgorithmType).Create(sourceFilePath, targetFilePath, tempDeltaFile); var content = new Content(tempDeltaFile, packagePath, pkgSettingsCopy); lock (((ICollection)Actions).SyncRoot) { Actions.Add(new PatchAction(file.Path, file.Path, pkgSettingsCopy.PatchAlgorithmType, content)); } } finally { File.Delete(tempDeltaFile); } } // moved files (different path, single hash match on both sides) else if (!file.Path.IsDirectory && !sourcePathMatch && sourceHashSingleMatch && targetHashSingleMatch) { lock (((ICollection)Actions).SyncRoot) { Actions.Add(new MoveAction(sourceHashes[hash][0].Path, file.Path)); } } // search for copied files (multiple hash matches, different paths) else if (!file.Path.IsDirectory && !sourcePathMatch && sourceHashMatch) { lock (((ICollection)Actions).SyncRoot) { Actions.Add(new CopyAction(sourceHashes[hash][0].Path, file.Path)); } } // search for added files (path and hash that exists in target but not source) else if (!file.Path.IsDirectory && !sourcePathMatch && !sourceHashMatch) { var content = new Content(targetFilePath, packagePath, pkgSettings); lock (((ICollection)Actions).SyncRoot) { Actions.Add(new AddAction(file.Path, content)); } } else { throw new InvalidOperationException("File action not found."); } // propagate current progress upstream int count = Interlocked.Increment(ref processedCount); int progressPercentage = (int)(count / (float)targetSnapshot.Files.Count * 100); progressHandler?.Invoke(this, new ProgressChangedEventArgs(progressPercentage, progressStatus)); }); // deleted files & directories (path that exists in source but not target) foreach (var file in sourceSnapshot.Files) { if (!targetPaths.ContainsKey(file.Path)) { Actions.Add(new RemoveAction(file.Path, true)); } } // TODO: handle empty directory deletions caused by files getting deleted within // loop through destination directories and delete any that don't exist in source // TODO: handle file/directory renames that consist of case-changes only // loop through all files (should be 1:1 now) and rename if case mismatches (implies underlying directories are also renamed where applicable) #endregion } catch (Exception ex) { Directory.Delete(packagePath, true); } }
//TODO: will need to disable multi-threaded processing of actions during application if specified //[JsonProperty(Required = Required.Default)] //public bool PreserveActionOrder { get; set; } // TODO: when order is not important, perform multi-threaded execution in order of biggest to smallest /// <summary> /// Applies package actions in-order against the target directory using the contents of the package directory. /// </summary> /// <param name="packageDirectory">The package contents directory.</param> /// <param name="targetDirectory">The target base directory.</param> /// <param name="validateBefore">Indicates whether or not the target directory should be validated before package application.</param> /// <param name="validateAfter">Indicates whether or not the target directory should be validated after package application.</param> /// <param name="progressHandler">The optional handler to report progress to.</param> /// <param name="progressStatus">The optional progress status description.</param> public void Apply(Path packageDirectory, Path targetDirectory, bool validateBefore = true, bool validateAfter = true, ProgressChangedEventHandler progressHandler = null, string progressStatus = null) { // validate source content if (validateBefore) { Logger?.Info("Validating package source content"); } if (validateBefore && !new Snapshot(targetDirectory, progressHandler, "Validating source content").Contains(SourceSnapshot)) { throw new DataException("Directory contents do not match the source snapshot."); } // create temporary directory to contain target backup in case of rollback Path backupDirectory = Utility.GetTempDirectory(); int processedCount = 0; // TODO: verify backup/rollback logic try { const string backupStepName = "Performing backup"; progressHandler?.Invoke(this, new ProgressChangedEventArgs(0, backupStepName)); // backup content to be modified, skipping NoAction types foreach (var action in Actions.Where(a => !(a is NoAction))) { Path targetPath = Path.Combine(targetDirectory, action.TargetPath); if (File.Exists(targetPath)) { Path backupPath = Path.Combine(backupDirectory, action.TargetPath); Directory.CreateDirectory(Path.GetDirectoryName(backupPath)); File.Copy(targetPath, backupPath, true); } // propagate current progress upstream processedCount++; int progressPercentage = (int)(processedCount / (float)Actions.Count * 100); progressHandler?.Invoke(this, new ProgressChangedEventArgs(progressPercentage, backupStepName)); } progressHandler?.Invoke(this, new ProgressChangedEventArgs(0, progressStatus)); // apply the actions in-order processedCount = 0; foreach (var action in Actions) { action.Run(new ActionContext(targetDirectory, packageDirectory)); // propagate current progress upstream processedCount++; int progressPercentage = (int)(processedCount / (float)Actions.Count * 100); progressHandler?.Invoke(this, new ProgressChangedEventArgs(progressPercentage, progressStatus)); } // TODO: selective file validation fed from list of changed files after patch application // validate modified content if (validateAfter) { Logger?.Info("Validating package output content"); } if (validateAfter && !new Snapshot(targetDirectory, progressHandler, "Validating output").Contains(TargetSnapshot)) { throw new DataException("Directory contents do not match the target snapshot."); } } catch (Exception ex) { Logger?.Error(ex, "Performing rollback."); const string rollbackStepName = "Performing rollback"; progressHandler?.Invoke(this, new ProgressChangedEventArgs(0, rollbackStepName)); processedCount = 0; // if failure is detected, delete any existing targets and restore any backups foreach (var action in Actions.Where(a => !(a is NoAction))) { // log any failures encountered during the rollback, keep chugging through regardless try { File.Delete(Path.Combine(targetDirectory, action.TargetPath)); Path backupPath = Path.Combine(backupDirectory, action.TargetPath); if (File.Exists(backupPath)) { File.Copy(backupPath, Path.Combine(targetDirectory, action.TargetPath), true); } } catch (Exception e) { Logger?.Warn(e, "Unknown package restore failure."); } // propagate current progress upstream processedCount++; int progressPercentage = (int)(processedCount / (float)Actions.Count * 100); progressHandler?.Invoke(this, new ProgressChangedEventArgs(progressPercentage, rollbackStepName)); } // re-throw the original exception throw; } finally { Directory.Delete(backupDirectory, true); } // TODO: remove empty directories or leave that up to the package actions? }
private void PatchWorkerDoWork(object sender, DoWorkEventArgs e) { Logger?.Debug("Patch request"); // guard against multiple tasks running at the same time if (!TryEnterTask()) { return; } Logger?.Info("Patch started"); var packageDirectory = Utility.GetTempDirectory(); try { // disable UI Dispatcher.Invoke(() => { Patch.IsEnabled = false; TargetVersion.IsEnabled = false; }); var version = e.Argument as Version; Package package = PackageRepository.FindPackage(_detectedSnapshot.Version, version); Logger?.Info("Package found for source version {0} to target version {1}", package.SourceSnapshot.Version, package.TargetSnapshot.Version); // download package contents locally Logger?.Info("Attempting to download package contents from {0}", Config.ContentRepoUri); package.Save(Path.GetDirectoryName(Config.ContentRepoUri), packageDirectory, WorkerOnProgressChanged, "Downloading package"); // apply package Logger?.Info("Applying package"); package.Apply(Path.Combine(packageDirectory, package.Id + "\\"), _contentPath, Config.ValidateBeforePackageApply, Config.ValidateAfterPackageApply, WorkerOnProgressChanged, "Applying package"); // update detected version _detectedSnapshot = package.TargetSnapshot; Logger?.Info("Patch completed"); MessageBox.Show("Patch applied successfully!", "Done", MessageBoxButton.OK, MessageBoxImage.Information); } catch { MessageBox.Show("Patch failed! Make sure the base game files haven't been modified in any way. If unsure, please run the updater against a fresh copy.", "Hold up!", MessageBoxButton.OK, MessageBoxImage.Error); } finally { // allow other tasks to run ExitTask(); // cleanup the temp package directory Directory.Delete(packageDirectory, true); } }
private void UpdateWorker_DoWork(object sender, DoWorkEventArgs e) { Logger?.Debug("Update request"); // guard against multiple tasks running at the same time if (!TryEnterTask()) { return; } try { // attempt to open the updater repo config _updateWorker.ReportProgress(0, "Checking for updates"); Logger?.Info("Downloading updater repository configuration from {0}", Config.UpdaterRepoUri); var repo = Repository.Deserialize(new ReadOnlyFile(Config.UpdaterRepoUri).ReadAllText()); // check for the newest version available var updateSnapshot = repo.Snapshots.OrderByDescending(t => t.Version).FirstOrDefault(); if (updateSnapshot == null) { Logger?.Info("No updates available"); return; } // get the current version var binPath = Assembly.GetExecutingAssembly().Location; var binDir = System.IO.Path.GetDirectoryName(binPath) + "\\"; var currentVersion = new Version(FileVersionInfo.GetVersionInfo(binPath).FileVersion); // abort if no updates available if (updateSnapshot.Version <= currentVersion) { Logger?.Info("Already updated"); return; } if (!Config.UpdaterAutoUpdate) { // give user the choice to accept or decline MessageBoxResult choice = MessageBox.Show( $"Update {updateSnapshot.Version} is available. Would you like to apply it?", "Update Available", MessageBoxButton.YesNo, MessageBoxImage.Information); // abort if declined if (choice != MessageBoxResult.Yes) { Logger?.Debug("Update declined"); return; } } const string backupExtension = ".backup"; string remoteFileDirectory = Path.Combine(Path.GetDirectoryName(Config.UpdaterRepoUri) + "/", updateSnapshot.Version.ToString()); try { // apply updates foreach (var file in updateSnapshot.Files) { Logger?.Info("Updating {0}", file); // build local and remote file paths string remoteFilePath = Path.Combine(remoteFileDirectory + "/", file.Path); string localFilePath = Path.Combine(binDir, file.Path); // rename local file with backup extension if it exists, deleting any that already exist if (File.Exists(localFilePath)) { string backupPath = localFilePath + backupExtension; File.Delete(backupPath); File.Move(localFilePath, backupPath); } // download remote file new ReadOnlyFile(remoteFilePath).Copy(localFilePath); // TODO: validate downloaded content Logger?.Info("Finished updating {0}", file); } Logger?.Info("Update completed, restarting application"); // start a new instance of the updated application and exit the existing // TODO: start cmd with timer to delete backup files Process.Start(Assembly.GetExecutingAssembly().Location); Dispatcher.Invoke(() => { Application.Current.Shutdown(); }); } catch { foreach (var file in Directory.GetFiles(binDir, "*" + backupExtension, SearchOption.AllDirectories)) { string originalPath = file.Substring(0, file.Length - backupExtension.Length); File.Delete(originalPath); File.Move(file, originalPath); } throw; } } finally { // allow other tasks to run ExitTask(); } }