public ActionResult Clone(int id) { var p = _projects.First(ID: id); //since we're cloning into the target - we need to run from one directory up //so get the parent var di = new DirectoryInfo(p.DeployPath); var deployParent = di.Parent.FullName; //when we clone - make sure to use the base directory name var dirName = di.Name; var cmd = string.Format("cd {0} && git clone {1} {2}", deployParent, p.RepositoryUrl, dirName); //execute var result = GitRunner.RunCommand(cmd); //clean up result += GitRunner.CleanUpClone(p.DeployPath); //set the latest commit as the deployed one var repo = new GitRepo(p.DeployPath); p.DeployedID= repo.CurrentCommit.Hash; _projects.Update(p, id); TempData["message"] = result; return RedirectToAction("show", new { id = id }); }
public void Init_CreatesGitFiles() { using (var temp = new TempDir()) { var git = new GitRepo(m_log, temp.Path); git.Init(Enumerable.Empty<GitConfigOption>()); Assert.IsTrue(Directory.Exists(temp.GetPath("refs"))); Assert.IsTrue(File.Exists(temp.GetPath("HEAD"))); } }
public void Init_CreatesDirectoryIfItDoesNotExist() { using (var temp = new TempDir()) { var gitdir = temp.GetPath(@"dir1\dir2"); var git = new GitRepo(m_log, gitdir); git.Init(Enumerable.Empty<GitConfigOption>()); Assert.IsTrue(Directory.Exists(gitdir)); Assert.IsTrue(Directory.Exists(temp.GetPath(@"dir1\dir2\refs"))); } }
public void Init_Option_Set() { using (var temp = new TempDir()) { var git = new GitRepo(m_log, temp.Path); git.Init(new[] { new GitConfigOption("foo.bar", "blah", add: false) }); var configFile = temp.GetPath("config"); var configContents = File.ReadAllLines(configFile); var sectionLineNumber = FindSectionHeader(configContents, "foo"); Assert.IsTrue(sectionLineNumber >= 0); Assert.IsTrue(Regex.IsMatch(configContents[sectionLineNumber + 1].Trim(), @"bar\s*=\s*blah")); } }
private bool TryDownloadGitObjects(ITracer tracer, GVFSEnlistment enlistment, RetryConfig retryConfig) { string errorMessage = null; if (!this.ShowStatusWhileRunning( () => { CacheServerInfo cacheServer = new CacheServerInfo(enlistment.RepoUrl, null); using (GitObjectsHttpRequestor objectRequestor = new GitObjectsHttpRequestor(tracer, enlistment, cacheServer, retryConfig)) { PhysicalFileSystem fileSystem = new PhysicalFileSystem(); GitRepo gitRepo = new GitRepo(tracer, enlistment, fileSystem); GVFSGitObjects gitObjects = new GVFSGitObjects(new GVFSContext(tracer, fileSystem, gitRepo, enlistment), objectRequestor); GitProcess.Result revParseResult = enlistment.CreateGitProcess().RevParse("HEAD"); if (revParseResult.ExitCodeIsFailure) { errorMessage = "Unable to determine HEAD commit id: " + revParseResult.Errors; return(false); } string headCommit = revParseResult.Output.TrimEnd('\n'); if (!this.TryDownloadCommit(headCommit, enlistment, objectRequestor, gitObjects, gitRepo, out errorMessage) || !this.TryDownloadRootGitAttributes(enlistment, gitObjects, gitRepo, out errorMessage)) { return(false); } } return(true); }, "Downloading git objects", suppressGvfsLogMessage: true)) { this.WriteMessage(tracer, errorMessage); return(false); } return(true); }
private bool TryPrefetchCommitsAndTrees(ITracer tracer, GVFSEnlistment enlistment, GitObjectsHttpRequestor objectRequestor) { PhysicalFileSystem fileSystem = new PhysicalFileSystem(); GitRepo repo = new GitRepo(tracer, enlistment, fileSystem); GVFSContext context = new GVFSContext(tracer, fileSystem, repo, enlistment); GitObjects gitObjects = new GVFSGitObjects(context, objectRequestor); string[] packs = gitObjects.ReadPackFileNames(GVFSConstants.PrefetchPackPrefix); long max = -1; foreach (string pack in packs) { long?timestamp = this.GetTimestamp(pack); if (timestamp.HasValue && timestamp > max) { max = timestamp.Value; } } return(gitObjects.TryDownloadPrefetchPacks(max)); }
public void Init_Option_Add() { using (var temp = new TempDir()) { var git = new GitRepo(m_log, temp.Path); git.Init(new[] { new GitConfigOption("foo.bar", "blah1", add: true), new GitConfigOption("foo.bar", "blah2", add: true), }); var configFile = temp.GetPath("config"); var configContents = File.ReadAllLines(configFile); var sectionLineNumber = FindSectionHeader(configContents, "foo"); Assert.IsTrue(sectionLineNumber >= 0); Assert.IsTrue(Regex.IsMatch(configContents[sectionLineNumber + 1].Trim(), @"bar\s*=\s*blah1")); Assert.IsTrue(Regex.IsMatch(configContents[sectionLineNumber + 2].Trim(), @"bar\s*=\s*blah2")); } }
protected bool TryDownloadCommit( string commitId, GVFSEnlistment enlistment, GitObjectsHttpRequestor objectRequestor, GVFSGitObjects gitObjects, GitRepo repo, out string error, bool checkLocalObjectCache = true) { if (!checkLocalObjectCache || !repo.CommitAndRootTreeExists(commitId)) { if (!gitObjects.TryDownloadCommit(commitId)) { error = "Could not download commit " + commitId + " from: " + Uri.EscapeUriString(objectRequestor.CacheServer.ObjectsEndpointUrl); return(false); } } error = null; return(true); }
private async Task <ICollection <GitCommit> > EnrichRepoWithCommitsAsync(GitRepo repo) { var url = $"{option.Endpoint}/repos/{option.UserName}/{repo.Name}/commits"; var response = await client .GetAsync($"{option.Endpoint}/repos/{option.UserName}/{repo.Name}/commits"); if (!response.IsSuccessStatusCode) { throw new Exception("Unable to fetch commits for" + repo.Name); } var body = await response.Content.ReadAsStringAsync(); var commitData = JsonConvert.DeserializeObject <IEnumerable <CommitData> >(body); return(commitData .Select(data => new GitCommit { Message = data.Commit.Message, Sha = data.Sha }) .ToList()); }
protected override void Execute(GVFSEnlistment enlistment) { using (JsonTracer tracer = new JsonTracer(GVFSConstants.GVFSEtwProviderName, "Dehydrate")) { tracer.AddLogFileEventListener( GVFSEnlistment.GetNewGVFSLogFileName(enlistment.GVFSLogsRoot, GVFSConstants.LogFileTypes.Dehydrate), EventLevel.Informational, Keywords.Any); tracer.WriteStartEvent( enlistment.EnlistmentRoot, enlistment.RepoUrl, CacheServerResolver.GetUrlFromConfig(enlistment), new EventMetadata { { "Confirmed", this.Confirmed }, { "NoStatus", this.NoStatus }, { "NamedPipeName", enlistment.NamedPipeName }, { nameof(this.EnlistmentRootPathParameter), this.EnlistmentRootPathParameter }, }); // This is only intended to be run by functional tests if (this.MaintenanceJob != null) { this.InitializeLocalCacheAndObjectsPaths(tracer, enlistment, retryConfig: null, serverGVFSConfig: null, cacheServer: null); PhysicalFileSystem fileSystem = new PhysicalFileSystem(); using (GitRepo gitRepo = new GitRepo(tracer, enlistment, fileSystem)) using (GVFSContext context = new GVFSContext(tracer, fileSystem, gitRepo, enlistment)) { switch (this.MaintenanceJob) { case "LooseObjects": (new LooseObjectsStep(context, forceRun: true)).Execute(); return; case "PackfileMaintenance": (new PackfileMaintenanceStep( context, forceRun: true, batchSize: this.PackfileMaintenanceBatchSize ?? PackfileMaintenanceStep.DefaultBatchSize)).Execute(); return; case "PostFetch": (new PostFetchStep(context, new System.Collections.Generic.List <string>(), requireObjectCacheLock: false)).Execute(); return; default: this.ReportErrorAndExit($"Unknown maintenance job requested: {this.MaintenanceJob}"); break; } } } if (!this.Confirmed) { this.Output.WriteLine( @"WARNING: THIS IS AN EXPERIMENTAL FEATURE Dehydrate will back up your src folder, and then create a new, empty src folder with a fresh virtualization of the repo. All of your downloaded objects, branches, and siblings of the src folder will be preserved. Your modified working directory files will be moved to the backup, and your new working directory will not have any of your uncommitted changes. Before you dehydrate, make sure you have committed any working directory changes you want to keep. If you choose not to, you can still find your uncommitted changes in the backup folder, but it will be harder to find them because 'git status' will not work in the backup. To actually execute the dehydrate, run 'gvfs dehydrate --confirm' from the parent of your enlistment's src folder. "); return; } this.CheckGitStatus(tracer, enlistment); string backupRoot = Path.GetFullPath(Path.Combine(enlistment.EnlistmentRoot, "dehydrate_backup", DateTime.Now.ToString("yyyyMMdd_HHmmss"))); this.Output.WriteLine(); this.WriteMessage(tracer, "Starting dehydration. All of your existing files will be backed up in " + backupRoot); this.WriteMessage(tracer, "WARNING: If you abort the dehydrate after this point, the repo may become corrupt"); this.Output.WriteLine(); this.Unmount(tracer); string error; if (!DiskLayoutUpgrade.TryCheckDiskLayoutVersion(tracer, enlistment.EnlistmentRoot, out error)) { this.ReportErrorAndExit(tracer, error); } RetryConfig retryConfig; if (!RetryConfig.TryLoadFromGitConfig(tracer, enlistment, out retryConfig, out error)) { this.ReportErrorAndExit(tracer, "Failed to determine GVFS timeout and max retries: " + error); } string errorMessage; if (!this.TryAuthenticate(tracer, enlistment, out errorMessage)) { this.ReportErrorAndExit(tracer, errorMessage); } // Local cache and objects paths are required for TryDownloadGitObjects this.InitializeLocalCacheAndObjectsPaths(tracer, enlistment, retryConfig, serverGVFSConfig: null, cacheServer: null); if (this.TryBackupFiles(tracer, enlistment, backupRoot)) { if (this.TryDownloadGitObjects(tracer, enlistment, retryConfig) && this.TryRecreateIndex(tracer, enlistment)) { // Converting the src folder to partial must be the final step before mount this.PrepareSrcFolder(tracer, enlistment); this.Mount(tracer); this.Output.WriteLine(); this.WriteMessage(tracer, "The repo was successfully dehydrated and remounted"); } } else { this.Output.WriteLine(); this.WriteMessage(tracer, "ERROR: Backup failed. We will attempt to mount, but you may need to reclone if that fails"); this.Mount(tracer); this.WriteMessage(tracer, "Dehydrate failed, but remounting succeeded"); } } }
public IRepo CreateSCM(string name, Project project) { GitRepo gRepo = new GitRepo(name, project); return(gRepo); }
protected override void Execute(GVFSEnlistment enlistment) { using (JsonTracer tracer = new JsonTracer(GVFSConstants.GVFSEtwProviderName, "Dehydrate")) { tracer.AddLogFileEventListener( GVFSEnlistment.GetNewGVFSLogFileName(enlistment.GVFSLogsRoot, GVFSConstants.LogFileTypes.Dehydrate), EventLevel.Informational, Keywords.Any); tracer.WriteStartEvent( enlistment.EnlistmentRoot, enlistment.RepoUrl, CacheServerResolver.GetUrlFromConfig(enlistment), new EventMetadata { { "Confirmed", this.Confirmed }, { "NoStatus", this.NoStatus }, { "NamedPipeName", enlistment.NamedPipeName }, { "Folders", this.Folders }, { nameof(this.EnlistmentRootPathParameter), this.EnlistmentRootPathParameter }, }); // This is only intended to be run by functional tests if (this.MaintenanceJob != null) { this.InitializeLocalCacheAndObjectsPaths(tracer, enlistment, retryConfig: null, serverGVFSConfig: null, cacheServer: null); PhysicalFileSystem fileSystem = new PhysicalFileSystem(); using (GitRepo gitRepo = new GitRepo(tracer, enlistment, fileSystem)) using (GVFSContext context = new GVFSContext(tracer, fileSystem, gitRepo, enlistment)) { switch (this.MaintenanceJob) { case "LooseObjects": (new LooseObjectsStep(context, forceRun: true)).Execute(); return; case "PackfileMaintenance": (new PackfileMaintenanceStep( context, forceRun: true, batchSize: this.PackfileMaintenanceBatchSize ?? PackfileMaintenanceStep.DefaultBatchSize)).Execute(); return; case "PostFetch": (new PostFetchStep(context, new System.Collections.Generic.List <string>(), requireObjectCacheLock: false)).Execute(); return; default: this.ReportErrorAndExit($"Unknown maintenance job requested: {this.MaintenanceJob}"); break; } } } bool fullDehydrate = string.IsNullOrEmpty(this.Folders); if (!this.Confirmed && fullDehydrate) { this.Output.WriteLine( @"WARNING: THIS IS AN EXPERIMENTAL FEATURE Dehydrate will back up your src folder, and then create a new, empty src folder with a fresh virtualization of the repo. All of your downloaded objects, branches, and siblings of the src folder will be preserved. Your modified working directory files will be moved to the backup, and your new working directory will not have any of your uncommitted changes. Before you dehydrate, make sure you have committed any working directory changes you want to keep. If you choose not to, you can still find your uncommitted changes in the backup folder, but it will be harder to find them because 'git status' will not work in the backup. To actually execute the dehydrate, run 'gvfs dehydrate --confirm' from the parent of your enlistment's src folder. "); return; } else if (!this.Confirmed) { this.Output.WriteLine( @"WARNING: THIS IS AN EXPERIMENTAL FEATURE All of your downloaded objects, branches, and siblings of the src folder will be preserved. This will remove the folders specified and any working directory files and folders even if ignored by git similar to 'git clean -xdf <path>'. Before you dehydrate, you will have to commit any working directory changes you want to keep and have a clean 'git status'. To actually execute the dehydrate, run 'gvfs dehydrate --confirm --folders <folder list>' from a parent of the folders list. "); return; } if (this.NoStatus && !fullDehydrate) { this.ReportErrorAndExit(tracer, "Dehydrate --no-status not valid with --folders"); return; } bool cleanStatus = this.CheckGitStatus(tracer, enlistment, fullDehydrate); string backupRoot = Path.GetFullPath(Path.Combine(enlistment.EnlistmentRoot, "dehydrate_backup", DateTime.Now.ToString("yyyyMMdd_HHmmss"))); this.Output.WriteLine(); if (fullDehydrate) { this.WriteMessage(tracer, $"Starting {this.RunningVerbName}. All of your existing files will be backed up in " + backupRoot); } this.WriteMessage(tracer, $"WARNING: If you abort the {this.RunningVerbName} after this point, the repo may become corrupt"); this.Output.WriteLine(); this.Unmount(tracer); string error; if (!DiskLayoutUpgrade.TryCheckDiskLayoutVersion(tracer, enlistment.EnlistmentRoot, out error)) { this.ReportErrorAndExit(tracer, error); } if (fullDehydrate) { RetryConfig retryConfig; if (!RetryConfig.TryLoadFromGitConfig(tracer, enlistment, out retryConfig, out error)) { this.ReportErrorAndExit(tracer, "Failed to determine GVFS timeout and max retries: " + error); } string errorMessage; if (!this.TryAuthenticate(tracer, enlistment, out errorMessage)) { this.ReportErrorAndExit(tracer, errorMessage); } // Local cache and objects paths are required for TryDownloadGitObjects this.InitializeLocalCacheAndObjectsPaths(tracer, enlistment, retryConfig, serverGVFSConfig: null, cacheServer: null); this.RunFullDehydrate(tracer, enlistment, backupRoot, retryConfig); } else { string[] folders = this.Folders.Split(new[] { FolderListSeparator }, StringSplitOptions.RemoveEmptyEntries); if (folders.Length > 0) { if (cleanStatus) { this.DehydrateFolders(tracer, enlistment, folders); } else { this.ReportErrorAndExit($"Cannot {this.ActionName}: must have a clean git status."); } } else { this.ReportErrorAndExit($"No folders to {this.ActionName}."); } } } }
public static Uri GetArchiveUrl(GitRepo gitRepo) { return(new Uri($"https://github.com/{gitRepo.Owner}/{gitRepo.Name}/archive/{gitRepo.Branch}.zip")); }
private Result CreateClone( ITracer tracer, GVFSEnlistment enlistment, GitObjectsHttpRequestor objectRequestor, GitRefs refs, string branch) { Result initRepoResult = this.TryInitRepo(tracer, refs, enlistment); if (!initRepoResult.Success) { return(initRepoResult); } PhysicalFileSystem fileSystem = new PhysicalFileSystem(); string errorMessage; if (!this.TryCreateAlternatesFile(fileSystem, enlistment, out errorMessage)) { return(new Result("Error configuring alternate: " + errorMessage)); } GitRepo gitRepo = new GitRepo(tracer, enlistment, fileSystem); GVFSContext context = new GVFSContext(tracer, fileSystem, gitRepo, enlistment); GVFSGitObjects gitObjects = new GVFSGitObjects(context, objectRequestor); if (!this.TryDownloadCommit( refs.GetTipCommitId(branch), enlistment, objectRequestor, gitObjects, gitRepo, out errorMessage)) { return(new Result(errorMessage)); } if (!GVFSVerb.TrySetRequiredGitConfigSettings(enlistment) || !GVFSVerb.TrySetOptionalGitConfigSettings(enlistment)) { return(new Result("Unable to configure git repo")); } CacheServerResolver cacheServerResolver = new CacheServerResolver(tracer, enlistment); if (!cacheServerResolver.TrySaveUrlToLocalConfig(objectRequestor.CacheServer, out errorMessage)) { return(new Result("Unable to configure cache server: " + errorMessage)); } GitProcess git = new GitProcess(enlistment); string originBranchName = "origin/" + branch; GitProcess.Result createBranchResult = git.CreateBranchWithUpstream(branch, originBranchName); if (createBranchResult.HasErrors) { return(new Result("Unable to create branch '" + originBranchName + "': " + createBranchResult.Errors + "\r\n" + createBranchResult.Output)); } File.WriteAllText( Path.Combine(enlistment.WorkingDirectoryRoot, GVFSConstants.DotGit.Head), "ref: refs/heads/" + branch); File.AppendAllText( Path.Combine(enlistment.WorkingDirectoryRoot, GVFSConstants.DotGit.Info.SparseCheckoutPath), GVFSConstants.GitPathSeparatorString + GVFSConstants.SpecialGitFiles.GitAttributes + "\n"); if (!this.TryDownloadRootGitAttributes(enlistment, gitObjects, gitRepo, out errorMessage)) { return(new Result(errorMessage)); } this.CreateGitScript(enlistment); string installHooksError; if (!HooksInstaller.InstallHooks(context, out installHooksError)) { tracer.RelatedError(installHooksError); return(new Result(installHooksError)); } GitProcess.Result forceCheckoutResult = git.ForceCheckout(branch); if (forceCheckoutResult.HasErrors) { string[] errorLines = forceCheckoutResult.Errors.Split('\n'); StringBuilder checkoutErrors = new StringBuilder(); foreach (string gitError in errorLines) { if (IsForceCheckoutErrorCloneFailure(gitError)) { checkoutErrors.AppendLine(gitError); } } if (checkoutErrors.Length > 0) { string error = "Could not complete checkout of branch: " + branch + ", " + checkoutErrors.ToString(); tracer.RelatedError(error); return(new Result(error)); } } if (!RepoMetadata.TryInitialize(tracer, enlistment.DotGVFSRoot, out errorMessage)) { tracer.RelatedError(errorMessage); return(new Result(errorMessage)); } try { RepoMetadata.Instance.SaveCloneMetadata(tracer, enlistment); this.LogEnlistmentInfoAndSetConfigValues(tracer, git, enlistment); } catch (Exception e) { tracer.RelatedError(e.ToString()); return(new Result(e.Message)); } finally { RepoMetadata.Shutdown(); } // Prepare the working directory folder for GVFS last to ensure that gvfs mount will fail if gvfs clone has failed string prepFileSystemError; if (!GVFSPlatform.Instance.KernelDriver.TryPrepareFolderForCallbacks(enlistment.WorkingDirectoryRoot, out prepFileSystemError)) { tracer.RelatedError(prepFileSystemError); return(new Result(prepFileSystemError)); } return(new Result(true)); }
private Result CreateClone( ITracer tracer, GVFSEnlistment enlistment, GitObjectsHttpRequestor objectRequestor, GitRefs refs, string branch) { Result initRepoResult = this.TryInitRepo(tracer, refs, enlistment); if (!initRepoResult.Success) { return(initRepoResult); } PhysicalFileSystem fileSystem = new PhysicalFileSystem(); string errorMessage; if (!this.TryCreateAlternatesFile(fileSystem, enlistment, out errorMessage)) { return(new Result("Error configuring alternate: " + errorMessage)); } GitRepo gitRepo = new GitRepo(tracer, enlistment, fileSystem); GVFSContext context = new GVFSContext(tracer, fileSystem, gitRepo, enlistment); GVFSGitObjects gitObjects = new GVFSGitObjects(context, objectRequestor); if (!this.TryDownloadCommit( refs.GetTipCommitId(branch), enlistment, objectRequestor, gitObjects, gitRepo, out errorMessage)) { return(new Result(errorMessage)); } if (!GVFSVerb.TrySetRequiredGitConfigSettings(enlistment) || !GVFSVerb.TrySetOptionalGitConfigSettings(enlistment)) { return(new Result("Unable to configure git repo")); } CacheServerResolver cacheServerResolver = new CacheServerResolver(tracer, enlistment); if (!cacheServerResolver.TrySaveUrlToLocalConfig(objectRequestor.CacheServer, out errorMessage)) { return(new Result("Unable to configure cache server: " + errorMessage)); } GitProcess git = new GitProcess(enlistment); string originBranchName = "origin/" + branch; GitProcess.Result createBranchResult = git.CreateBranchWithUpstream(branch, originBranchName); if (createBranchResult.ExitCodeIsFailure) { return(new Result("Unable to create branch '" + originBranchName + "': " + createBranchResult.Errors + "\r\n" + createBranchResult.Output)); } File.WriteAllText( Path.Combine(enlistment.WorkingDirectoryRoot, GVFSConstants.DotGit.Head), "ref: refs/heads/" + branch); if (!this.TryDownloadRootGitAttributes(enlistment, gitObjects, gitRepo, out errorMessage)) { return(new Result(errorMessage)); } this.CreateGitScript(enlistment); string installHooksError; if (!HooksInstaller.InstallHooks(context, out installHooksError)) { tracer.RelatedError(installHooksError); return(new Result(installHooksError)); } GitProcess.Result forceCheckoutResult = git.ForceCheckout(branch); if (forceCheckoutResult.ExitCodeIsFailure && forceCheckoutResult.Errors.IndexOf("unable to read tree") > 0) { // It is possible to have the above TryDownloadCommit() fail because we // already have the commit and root tree we intend to check out, but // don't have a tree further down the working directory. If we fail // checkout here, its' because we don't have these trees and the // read-object hook is not available yet. Force downloading the commit // again and retry the checkout. if (!this.TryDownloadCommit( refs.GetTipCommitId(branch), enlistment, objectRequestor, gitObjects, gitRepo, out errorMessage, checkLocalObjectCache: false)) { return(new Result(errorMessage)); } forceCheckoutResult = git.ForceCheckout(branch); } if (forceCheckoutResult.ExitCodeIsFailure) { string[] errorLines = forceCheckoutResult.Errors.Split('\n'); StringBuilder checkoutErrors = new StringBuilder(); foreach (string gitError in errorLines) { if (IsForceCheckoutErrorCloneFailure(gitError)) { checkoutErrors.AppendLine(gitError); } } if (checkoutErrors.Length > 0) { string error = "Could not complete checkout of branch: " + branch + ", " + checkoutErrors.ToString(); tracer.RelatedError(error); return(new Result(error)); } } if (!RepoMetadata.TryInitialize(tracer, enlistment.DotGVFSRoot, out errorMessage)) { tracer.RelatedError(errorMessage); return(new Result(errorMessage)); } try { RepoMetadata.Instance.SaveCloneMetadata(tracer, enlistment); this.LogEnlistmentInfoAndSetConfigValues(tracer, git, enlistment); } catch (Exception e) { tracer.RelatedError(e.ToString()); return(new Result(e.Message)); } finally { RepoMetadata.Shutdown(); } // Prepare the working directory folder for GVFS last to ensure that gvfs mount will fail if gvfs clone has failed Exception exception; string prepFileSystemError; if (!GVFSPlatform.Instance.KernelDriver.TryPrepareFolderForCallbacks(enlistment.WorkingDirectoryRoot, out prepFileSystemError, out exception)) { EventMetadata metadata = new EventMetadata(); metadata.Add(nameof(prepFileSystemError), prepFileSystemError); if (exception != null) { metadata.Add("Exception", exception.ToString()); } tracer.RelatedError(metadata, $"{nameof(this.CreateClone)}: TryPrepareFolderForCallbacks failed"); return(new Result(prepFileSystemError)); } return(new Result(true)); }
protected override void Execute(GSDEnlistment enlistment) { string errorMessage = null; string mountExecutableLocation = null; using (JsonTracer tracer = new JsonTracer(GSDConstants.GSDEtwProviderName, "ExecuteMount")) { PhysicalFileSystem fileSystem = new PhysicalFileSystem(); GitRepo gitRepo = new GitRepo(tracer, enlistment, fileSystem); GSDContext context = new GSDContext(tracer, fileSystem, gitRepo, enlistment); if (!HooksInstaller.InstallHooks(context, out errorMessage)) { this.ReportErrorAndExit("Error installing hooks: " + errorMessage); } CacheServerInfo cacheServer = this.ResolvedCacheServer ?? CacheServerResolver.GetCacheServerFromConfig(enlistment); tracer.AddLogFileEventListener( GSDEnlistment.GetNewGSDLogFileName(enlistment.GSDLogsRoot, GSDConstants.LogFileTypes.MountVerb), EventLevel.Verbose, Keywords.Any); tracer.WriteStartEvent( enlistment.EnlistmentRoot, enlistment.RepoUrl, cacheServer.Url, new EventMetadata { { "Unattended", this.Unattended }, { "IsElevated", GSDPlatform.Instance.IsElevated() }, { "NamedPipeName", enlistment.NamedPipeName }, { nameof(this.EnlistmentRootPathParameter), this.EnlistmentRootPathParameter }, }); RetryConfig retryConfig = null; ServerGSDConfig serverGSDConfig = this.DownloadedGSDConfig; if (!this.SkipVersionCheck) { string authErrorMessage; if (!this.TryAuthenticate(tracer, enlistment, out authErrorMessage)) { this.Output.WriteLine(" WARNING: " + authErrorMessage); this.Output.WriteLine(" Mount will proceed, but new files cannot be accessed until GSD can authenticate."); } if (serverGSDConfig == null) { if (retryConfig == null) { retryConfig = this.GetRetryConfig(tracer, enlistment); } serverGSDConfig = this.QueryGSDConfig(tracer, enlistment, retryConfig); } this.ValidateClientVersions(tracer, enlistment, serverGSDConfig, showWarnings: true); CacheServerResolver cacheServerResolver = new CacheServerResolver(tracer, enlistment); cacheServer = cacheServerResolver.ResolveNameFromRemote(cacheServer.Url, serverGSDConfig); this.Output.WriteLine("Configured cache server: " + cacheServer); } this.InitializeLocalCacheAndObjectsPaths(tracer, enlistment, retryConfig, serverGSDConfig, cacheServer); if (!this.ShowStatusWhileRunning( () => { return(this.PerformPreMountValidation(tracer, enlistment, out mountExecutableLocation, out errorMessage)); }, "Validating repo")) { this.ReportErrorAndExit(tracer, errorMessage); } if (!this.SkipVersionCheck) { string error; if (!RepoMetadata.TryInitialize(tracer, enlistment.DotGSDRoot, out error)) { this.ReportErrorAndExit(tracer, error); } try { GitProcess git = new GitProcess(enlistment); this.LogEnlistmentInfoAndSetConfigValues(tracer, git, enlistment); } finally { RepoMetadata.Shutdown(); } } if (!this.ShowStatusWhileRunning( () => { return(this.TryMount(tracer, enlistment, mountExecutableLocation, out errorMessage)); }, "Mounting")) { this.ReportErrorAndExit(tracer, errorMessage); } if (!this.Unattended) { tracer.RelatedInfo($"{nameof(this.Execute)}: Registering for automount"); if (this.ShowStatusWhileRunning( () => { return(this.RegisterMount(enlistment, out errorMessage)); }, "Registering for automount")) { tracer.RelatedInfo($"{nameof(this.Execute)}: Registered for automount"); } else { this.Output.WriteLine(" WARNING: " + errorMessage); tracer.RelatedInfo($"{nameof(this.Execute)}: Failed to register for automount"); } } } }
// GET: /<controller>/ public IActionResult Index() { var allRepos = GitRepo.GetGitRepos(); return(View(allRepos)); }
public CloneVerb.Result CreateClone(GitRefs refs, string branch) { CloneVerb.Result initRepoResult = this.TryInitRepo(refs, this.enlistment); if (!initRepoResult.Success) { return(initRepoResult); } string errorMessage; if (!this.enlistment.TryConfigureAlternate(out errorMessage)) { return(new CloneVerb.Result("Error configuring alternate: " + errorMessage)); } PhysicalFileSystem fileSystem = new PhysicalFileSystem(); GitRepo gitRepo = new GitRepo(this.tracer, this.enlistment, fileSystem); GVFSGitObjects gitObjects = new GVFSGitObjects(new GVFSContext(this.tracer, fileSystem, gitRepo, this.enlistment), this.objectRequestor); if (!gitObjects.TryEnsureCommitIsLocal(refs.GetTipCommitId(branch), commitDepth: 2)) { return(new CloneVerb.Result("Could not download tip commits from: " + Uri.EscapeUriString(this.objectRequestor.CacheServer.ObjectsEndpointUrl))); } if (!GVFSVerb.TrySetGitConfigSettings(this.enlistment)) { return(new CloneVerb.Result("Unable to configure git repo")); } CacheServerResolver cacheServerResolver = new CacheServerResolver(this.tracer, this.enlistment); if (!cacheServerResolver.TrySaveUrlToLocalConfig(this.objectRequestor.CacheServer, out errorMessage)) { return(new CloneVerb.Result("Unable to configure cache server: " + errorMessage)); } GitProcess git = new GitProcess(this.enlistment); string originBranchName = "origin/" + branch; GitProcess.Result createBranchResult = git.CreateBranchWithUpstream(branch, originBranchName); if (createBranchResult.HasErrors) { return(new CloneVerb.Result("Unable to create branch '" + originBranchName + "': " + createBranchResult.Errors + "\r\n" + createBranchResult.Output)); } File.WriteAllText( Path.Combine(this.enlistment.WorkingDirectoryRoot, GVFSConstants.DotGit.Head), "ref: refs/heads/" + branch); File.AppendAllText( Path.Combine(this.enlistment.WorkingDirectoryRoot, GVFSConstants.DotGit.Info.SparseCheckoutPath), GVFSConstants.GitPathSeparatorString + GVFSConstants.SpecialGitFiles.GitAttributes + "\n"); CloneVerb.Result hydrateResult = this.HydrateRootGitAttributes(gitObjects, gitRepo, branch); if (!hydrateResult.Success) { return(hydrateResult); } this.CreateGitScript(); GitProcess.Result forceCheckoutResult = git.ForceCheckout(branch); if (forceCheckoutResult.HasErrors) { string[] errorLines = forceCheckoutResult.Errors.Split('\n'); StringBuilder checkoutErrors = new StringBuilder(); foreach (string gitError in errorLines) { if (IsForceCheckoutErrorCloneFailure(gitError)) { checkoutErrors.AppendLine(gitError); } } if (checkoutErrors.Length > 0) { string error = "Could not complete checkout of branch: " + branch + ", " + checkoutErrors.ToString(); this.tracer.RelatedError(error); return(new CloneVerb.Result(error)); } } GitProcess.Result updateIndexresult = git.UpdateIndexVersion4(); if (updateIndexresult.HasErrors) { string error = "Could not update index, error: " + updateIndexresult.Errors; this.tracer.RelatedError(error); return(new CloneVerb.Result(error)); } string installHooksError; if (!HooksInstaller.InstallHooks(this.enlistment, out installHooksError)) { this.tracer.RelatedError(installHooksError); return(new CloneVerb.Result(installHooksError)); } if (!RepoMetadata.TryInitialize(this.tracer, this.enlistment.DotGVFSRoot, out errorMessage)) { this.tracer.RelatedError(errorMessage); return(new CloneVerb.Result(errorMessage)); } try { RepoMetadata.Instance.SaveCurrentDiskLayoutVersion(); } catch (Exception e) { this.tracer.RelatedError(e.ToString()); return(new CloneVerb.Result(e.Message)); } finally { RepoMetadata.Shutdown(); } // Prepare the working directory folder for GVFS last to ensure that gvfs mount will fail if gvfs clone has failed string prepGVFltError; if (!GVFltCallbacks.TryPrepareFolderForGVFltCallbacks(this.enlistment.WorkingDirectoryRoot, out prepGVFltError)) { this.tracer.RelatedError(prepGVFltError); return(new CloneVerb.Result(prepGVFltError)); } return(new CloneVerb.Result(true)); }
private void PrefetchCommits(ITracer tracer, GVFSEnlistment enlistment, GitObjectsHttpRequestor objectRequestor, CacheServerInfo cacheServer) { bool success; string error = string.Empty; PhysicalFileSystem fileSystem = new PhysicalFileSystem(); GitRepo repo = new GitRepo(tracer, enlistment, fileSystem); GVFSContext context = new GVFSContext(tracer, fileSystem, repo, enlistment); GitObjects gitObjects = new GVFSGitObjects(context, objectRequestor); List <string> packIndexes = null; if (this.Verbose) { success = this.TryPrefetchCommitsAndTrees(tracer, enlistment, fileSystem, gitObjects, out error, out packIndexes); } else { success = this.ShowStatusWhileRunning( () => this.TryPrefetchCommitsAndTrees(tracer, enlistment, fileSystem, gitObjects, out error, out packIndexes), "Fetching commits and trees " + this.GetCacheServerDisplay(cacheServer)); } if (success) { if (packIndexes.Count == 0) { return; } // We make a best-effort request to run MIDX and commit-graph writes using (NamedPipeClient pipeClient = new NamedPipeClient(enlistment.NamedPipeName)) { if (!pipeClient.Connect()) { tracer.RelatedWarning( metadata: null, message: "Failed to connect to GVFS. Skipping post-fetch job request.", keywords: Keywords.Telemetry); return; } NamedPipeMessages.RunPostFetchJob.Request request = new NamedPipeMessages.RunPostFetchJob.Request(packIndexes); if (pipeClient.TrySendRequest(request.CreateMessage())) { NamedPipeMessages.Message response; if (pipeClient.TryReadResponse(out response)) { tracer.RelatedInfo("Requested post-fetch job with resonse '{0}'", response.Header); } else { tracer.RelatedWarning( metadata: null, message: "Requested post-fetch job failed to respond", keywords: Keywords.Telemetry); } } else { tracer.RelatedWarning( metadata: null, message: "Message to named pipe failed to send, skipping post-fetch job request.", keywords: Keywords.Telemetry); } } } else { this.ReportErrorAndExit(tracer, "Prefetching commits and trees failed: " + error); } }
protected bool TryDownloadRootGitAttributes(GVFSEnlistment enlistment, GVFSGitObjects gitObjects, GitRepo repo, out string error) { List <DiffTreeResult> rootEntries = new List <DiffTreeResult>(); GitProcess git = new GitProcess(enlistment); GitProcess.Result result = git.LsTree( GVFSConstants.DotGit.HeadName, line => rootEntries.Add(DiffTreeResult.ParseFromLsTreeLine(line, repoRoot: string.Empty)), recursive: false); if (result.ExitCodeIsFailure) { error = "Error returned from ls-tree to find " + GVFSConstants.SpecialGitFiles.GitAttributes + " file: " + result.Errors; return(false); } DiffTreeResult gitAttributes = rootEntries.FirstOrDefault(entry => entry.TargetPath.Equals(GVFSConstants.SpecialGitFiles.GitAttributes)); if (gitAttributes == null) { error = "This branch does not contain a " + GVFSConstants.SpecialGitFiles.GitAttributes + " file in the root folder. This file is required by GVFS clone"; return(false); } if (!repo.ObjectExists(gitAttributes.TargetSha)) { if (gitObjects.TryDownloadAndSaveObject(gitAttributes.TargetSha, GVFSGitObjects.RequestSource.GVFSVerb) != GitObjects.DownloadAndSaveObjectResult.Success) { error = "Could not download " + GVFSConstants.SpecialGitFiles.GitAttributes + " file"; return(false); } } error = null; return(true); }
public async Task <IActionResult> Index() { List <Repository> repos = await GitRepo.GetReposAsync(); return(View(repos)); }
public async Task <IActionResult> Index(string repoName) { Repository repo = await GitRepo.GetRepoAsync(repoName); return(View(repo)); }
protected override void Execute(GVFSEnlistment enlistment) { string errorMessage = null; string mountExecutableLocation = null; using (JsonTracer tracer = new JsonTracer(GVFSConstants.GVFSEtwProviderName, "PreMount")) { PhysicalFileSystem fileSystem = new PhysicalFileSystem(); GitRepo gitRepo = new GitRepo(tracer, enlistment, fileSystem); GVFSContext context = new GVFSContext(tracer, fileSystem, gitRepo, enlistment); if (!HooksInstaller.InstallHooks(context, out errorMessage)) { this.ReportErrorAndExit("Error installing hooks: " + errorMessage); } CacheServerInfo cacheServer = this.ResolvedCacheServer ?? CacheServerResolver.GetCacheServerFromConfig(enlistment); tracer.AddLogFileEventListener( GVFSEnlistment.GetNewGVFSLogFileName(enlistment.GVFSLogsRoot, GVFSConstants.LogFileTypes.MountVerb), EventLevel.Verbose, Keywords.Any); tracer.WriteStartEvent( enlistment.EnlistmentRoot, enlistment.RepoUrl, cacheServer.Url, new EventMetadata { { "Unattended", this.Unattended }, { "IsElevated", GVFSPlatform.Instance.IsElevated() }, { "NamedPipeName", enlistment.NamedPipeName }, { nameof(this.EnlistmentRootPathParameter), this.EnlistmentRootPathParameter }, }); if (!GVFSPlatform.Instance.KernelDriver.IsReady(tracer, enlistment.EnlistmentRoot, out errorMessage)) { if (GVFSPlatform.Instance.UnderConstruction.SupportsGVFSService) { tracer.RelatedEvent( EventLevel.Informational, $"{nameof(MountVerb)}_{nameof(this.Execute)}_EnablingKernelDriverViaService", new EventMetadata { { "KernelDriver.IsReady_Error", errorMessage }, { TracingConstants.MessageKey.InfoMessage, "Service will retry" } }); if (!this.ShowStatusWhileRunning( () => { return(this.TryEnableAndAttachPrjFltThroughService(enlistment.EnlistmentRoot, out errorMessage)); }, $"Attaching ProjFS to volume")) { this.ReportErrorAndExit(tracer, ReturnCode.FilterError, errorMessage); } } else { tracer.RelatedEvent( EventLevel.Informational, $"{nameof(MountVerb)}_{nameof(this.Execute)}", new EventMetadata { { "KernelDriver.IsReady_Error", errorMessage }, }); this.ReportErrorAndExit(tracer, ReturnCode.FilterError, errorMessage); } } RetryConfig retryConfig = null; ServerGVFSConfig serverGVFSConfig = this.DownloadedGVFSConfig; if (!this.SkipVersionCheck) { string authErrorMessage; if (!this.TryAuthenticate(tracer, enlistment, out authErrorMessage)) { this.Output.WriteLine(" WARNING: " + authErrorMessage); this.Output.WriteLine(" Mount will proceed, but new files cannot be accessed until GVFS can authenticate."); } if (serverGVFSConfig == null) { if (retryConfig == null) { retryConfig = this.GetRetryConfig(tracer, enlistment); } serverGVFSConfig = this.QueryGVFSConfig(tracer, enlistment, retryConfig); } this.ValidateClientVersions(tracer, enlistment, serverGVFSConfig, showWarnings: true); CacheServerResolver cacheServerResolver = new CacheServerResolver(tracer, enlistment); cacheServer = cacheServerResolver.ResolveNameFromRemote(cacheServer.Url, serverGVFSConfig); this.Output.WriteLine("Configured cache server: " + cacheServer); } this.InitializeLocalCacheAndObjectsPaths(tracer, enlistment, retryConfig, serverGVFSConfig, cacheServer); if (!this.ShowStatusWhileRunning( () => { return(this.PerformPreMountValidation(tracer, enlistment, out mountExecutableLocation, out errorMessage)); }, "Validating repo")) { this.ReportErrorAndExit(tracer, errorMessage); } if (!this.SkipVersionCheck) { string error; if (!RepoMetadata.TryInitialize(tracer, enlistment.DotGVFSRoot, out error)) { this.ReportErrorAndExit(tracer, error); } try { GitProcess git = new GitProcess(enlistment); this.LogEnlistmentInfoAndSetConfigValues(tracer, git, enlistment); } finally { RepoMetadata.Shutdown(); } } if (!this.ShowStatusWhileRunning( () => { return(this.TryMount(tracer, enlistment, mountExecutableLocation, out errorMessage)); }, "Mounting")) { this.ReportErrorAndExit(errorMessage); } } if (!this.Unattended && GVFSPlatform.Instance.UnderConstruction.SupportsGVFSService) { if (!this.ShowStatusWhileRunning( () => { return(this.RegisterMount(enlistment, out errorMessage)); }, "Registering for automount")) { this.Output.WriteLine(" WARNING: " + errorMessage); } } }
private Result CreateClone( ITracer tracer, GSDEnlistment enlistment, GitObjectsHttpRequestor objectRequestor, GitRefs refs, string branch) { Result initRepoResult = this.TryInitRepo(tracer, refs, enlistment); if (!initRepoResult.Success) { return(initRepoResult); } PhysicalFileSystem fileSystem = new PhysicalFileSystem(); string errorMessage; if (!this.TryCreateAlternatesFile(fileSystem, enlistment, out errorMessage)) { return(new Result("Error configuring alternate: " + errorMessage)); } GitRepo gitRepo = new GitRepo(tracer, enlistment, fileSystem); GSDContext context = new GSDContext(tracer, fileSystem, gitRepo, enlistment); GSDGitObjects gitObjects = new GSDGitObjects(context, objectRequestor); if (!this.TryDownloadCommit( refs.GetTipCommitId(branch), enlistment, objectRequestor, gitObjects, gitRepo, out errorMessage)) { return(new Result(errorMessage)); } if (!GSDVerb.TrySetRequiredGitConfigSettings(enlistment) || !GSDVerb.TrySetOptionalGitConfigSettings(enlistment)) { return(new Result("Unable to configure git repo")); } CacheServerResolver cacheServerResolver = new CacheServerResolver(tracer, enlistment); if (!cacheServerResolver.TrySaveUrlToLocalConfig(objectRequestor.CacheServer, out errorMessage)) { return(new Result("Unable to configure cache server: " + errorMessage)); } GitProcess git = new GitProcess(enlistment); string originBranchName = "origin/" + branch; GitProcess.Result createBranchResult = git.CreateBranchWithUpstream(branch, originBranchName); if (createBranchResult.ExitCodeIsFailure) { return(new Result("Unable to create branch '" + originBranchName + "': " + createBranchResult.Errors + "\r\n" + createBranchResult.Output)); } File.WriteAllText( Path.Combine(enlistment.WorkingDirectoryBackingRoot, GSDConstants.DotGit.Head), "ref: refs/heads/" + branch); if (!this.TryDownloadRootGitAttributes(enlistment, gitObjects, gitRepo, out errorMessage)) { return(new Result(errorMessage)); } this.CreateGitScript(enlistment); string installHooksError; if (!HooksInstaller.InstallHooks(context, out installHooksError)) { tracer.RelatedError(installHooksError); return(new Result(installHooksError)); } // TODO: Move this to be after the mount? GitProcess.Result forceCheckoutResult = git.ForceCheckout(branch); if (forceCheckoutResult.ExitCodeIsFailure && forceCheckoutResult.Errors.IndexOf("unable to read tree") > 0) { // It is possible to have the above TryDownloadCommit() fail because we // already have the commit and root tree we intend to check out, but // don't have a tree further down the working directory. If we fail // checkout here, its' because we don't have these trees and the // read-object hook is not available yet. Force downloading the commit // again and retry the checkout. if (!this.TryDownloadCommit( refs.GetTipCommitId(branch), enlistment, objectRequestor, gitObjects, gitRepo, out errorMessage, checkLocalObjectCache: false)) { return(new Result(errorMessage)); } forceCheckoutResult = git.ForceCheckout(branch); } if (!RepoMetadata.TryInitialize(tracer, enlistment.DotGSDRoot, out errorMessage)) { tracer.RelatedError(errorMessage); return(new Result(errorMessage)); } try { RepoMetadata.Instance.SaveCloneMetadata(tracer, enlistment); this.LogEnlistmentInfoAndSetConfigValues(tracer, git, enlistment); } catch (Exception e) { tracer.RelatedError(e.ToString()); return(new Result(e.Message)); } finally { RepoMetadata.Shutdown(); } return(new Result(true)); }
/// <summary> /// Validate the object. /// </summary> /// <exception cref="ValidationException"> /// Thrown if validation fails /// </exception> public virtual void Validate() { if (Name == null) { throw new ValidationException(ValidationRules.CannotBeNull, "Name"); } if (AwsElasticBlockStore != null) { AwsElasticBlockStore.Validate(); } if (AzureDisk != null) { AzureDisk.Validate(); } if (AzureFile != null) { AzureFile.Validate(); } if (Cephfs != null) { Cephfs.Validate(); } if (Cinder != null) { Cinder.Validate(); } if (FlexVolume != null) { FlexVolume.Validate(); } if (GcePersistentDisk != null) { GcePersistentDisk.Validate(); } if (GitRepo != null) { GitRepo.Validate(); } if (Glusterfs != null) { Glusterfs.Validate(); } if (HostPath != null) { HostPath.Validate(); } if (Iscsi != null) { Iscsi.Validate(); } if (Nfs != null) { Nfs.Validate(); } if (PersistentVolumeClaim != null) { PersistentVolumeClaim.Validate(); } if (PhotonPersistentDisk != null) { PhotonPersistentDisk.Validate(); } if (PortworxVolume != null) { PortworxVolume.Validate(); } if (Projected != null) { Projected.Validate(); } if (Quobyte != null) { Quobyte.Validate(); } if (Rbd != null) { Rbd.Validate(); } if (ScaleIO != null) { ScaleIO.Validate(); } if (VsphereVolume != null) { VsphereVolume.Validate(); } }
private bool TryPrefetchCommitsAndTrees(ITracer tracer, GVFSEnlistment enlistment, GitObjectsHttpRequestor objectRequestor, out string error) { error = null; PhysicalFileSystem fileSystem = new PhysicalFileSystem(); GitRepo repo = new GitRepo(tracer, enlistment, fileSystem); GVFSContext context = new GVFSContext(tracer, fileSystem, repo, enlistment); GitObjects gitObjects = new GVFSGitObjects(context, objectRequestor); gitObjects.DeleteStaleTempPrefetchPackAndIdxs(); string[] packs = gitObjects.ReadPackFileNames(enlistment.GitPackRoot, GVFSConstants.PrefetchPackPrefix); List <PrefetchPackInfo> orderedPacks = packs .Where(pack => this.GetTimestamp(pack).HasValue) .Select(pack => new PrefetchPackInfo(this.GetTimestamp(pack).Value, pack)) .OrderBy(packInfo => packInfo.Timestamp) .ToList(); long maxGood = -1; int firstBadPack = -1; for (int i = 0; i < orderedPacks.Count; ++i) { long timestamp = orderedPacks[i].Timestamp; string packPath = orderedPacks[i].Path; string idxPath = Path.ChangeExtension(packPath, ".idx"); if (!fileSystem.FileExists(idxPath)) { EventMetadata metadata = new EventMetadata(); metadata.Add("pack", packPath); metadata.Add("idxPath", idxPath); metadata.Add("timestamp", timestamp); GitProcess.Result indexResult = gitObjects.IndexPackFile(packPath); if (indexResult.HasErrors) { firstBadPack = i; metadata.Add("Errors", indexResult.Errors); tracer.RelatedWarning(metadata, $"{nameof(this.TryPrefetchCommitsAndTrees)}: Found pack file that's missing idx file, and failed to regenerate idx"); break; } else { maxGood = timestamp; metadata.Add(TracingConstants.MessageKey.InfoMessage, $"{nameof(this.TryPrefetchCommitsAndTrees)}: Found pack file that's missing idx file, and regenerated idx"); tracer.RelatedEvent(EventLevel.Informational, $"{nameof(this.TryPrefetchCommitsAndTrees)}_RebuildIdx", metadata); } } else { maxGood = timestamp; } } if (firstBadPack != -1) { const int MaxDeleteRetries = 200; // 200 * IoFailureRetryDelayMS (50ms) = 10 seconds const int RetryLoggingThreshold = 40; // 40 * IoFailureRetryDelayMS (50ms) = 2 seconds // Delete packs and indexes in reverse order so that if prefetch is killed, subseqeuent prefetch commands will // find the right starting spot. for (int i = orderedPacks.Count - 1; i >= firstBadPack; --i) { string packPath = orderedPacks[i].Path; string idxPath = Path.ChangeExtension(packPath, ".idx"); EventMetadata metadata = new EventMetadata(); metadata.Add("path", idxPath); metadata.Add(TracingConstants.MessageKey.InfoMessage, $"{nameof(this.TryPrefetchCommitsAndTrees)} deleting bad idx file"); tracer.RelatedEvent(EventLevel.Informational, $"{nameof(this.TryPrefetchCommitsAndTrees)}_DeleteBadIdx", metadata); if (!fileSystem.TryWaitForDelete(tracer, idxPath, IoFailureRetryDelayMS, MaxDeleteRetries, RetryLoggingThreshold)) { error = $"Unable to delete {idxPath}"; return(false); } metadata = new EventMetadata(); metadata.Add("path", packPath); metadata.Add(TracingConstants.MessageKey.InfoMessage, $"{nameof(this.TryPrefetchCommitsAndTrees)} deleting bad pack file"); tracer.RelatedEvent(EventLevel.Informational, $"{nameof(this.TryPrefetchCommitsAndTrees)}_DeleteBadPack", metadata); if (!fileSystem.TryWaitForDelete(tracer, packPath, IoFailureRetryDelayMS, MaxDeleteRetries, RetryLoggingThreshold)) { error = $"Unable to delete {packPath}"; return(false); } } } if (!gitObjects.TryDownloadPrefetchPacks(maxGood)) { error = "Failed to download prefetch packs"; return(false); } return(true); }