public void DetectsFailuresInLsTree() { MockTracer tracer = new MockTracer(); MockGitProcess gitProcess = new MockGitProcess(new ConfigurableFileSystem()); gitProcess.SetExpectedCommandResult("ls-tree -r -t sha1", () => new GitProcess.Result(string.Empty, string.Empty, 1)); DiffHelper diffBackwards = new DiffHelper(tracer, new MockEnlistment(), gitProcess, new List <string>(), new List <string>()); diffBackwards.PerformDiff(null, "sha1"); diffBackwards.HasFailures.ShouldEqual(true); }
public void DetectsFailuresInDiffTree() { MockTracer tracer = new MockTracer(); MockGitProcess gitProcess = new MockGitProcess(); gitProcess.SetExpectedCommandResult("diff-tree -r -t sha1 sha2", () => new GitProcess.Result(string.Empty, string.Empty, 1)); DiffHelper diffBackwards = new DiffHelper(tracer, new Mock.Common.MockGVFSEnlistment(), gitProcess, new List <string>(), new List <string>()); diffBackwards.PerformDiff("sha1", "sha2"); diffBackwards.HasFailures.ShouldEqual(true); }
public void DetectsFailuresInLsTree() { MockTracer tracer = new MockTracer(); MockGitProcess gitProcess = new MockGitProcess(); gitProcess.SetExpectedCommandResult("ls-tree -r -t sha1", () => new GitProcess.Result(string.Empty, string.Empty, 1)); DiffHelper diffBackwards = new DiffHelper(tracer, new Mock.Common.MockGSDEnlistment(), gitProcess, new List <string>(), new List <string>(), includeSymLinks: this.IncludeSymLinks); diffBackwards.PerformDiff(null, "sha1"); diffBackwards.HasFailures.ShouldEqual(true); }
public void PrefetchWithStats( string branchOrCommit, bool isBranch, bool hydrateFilesAfterDownload, out int matchedBlobCount, out int downloadedBlobCount, out int hydratedFileCount) { matchedBlobCount = 0; downloadedBlobCount = 0; hydratedFileCount = 0; if (string.IsNullOrWhiteSpace(branchOrCommit)) { throw new FetchException("Must specify branch or commit to fetch"); } GitRefs refs = null; string commitToFetch; if (isBranch) { refs = this.ObjectRequestor.QueryInfoRefs(branchOrCommit); if (refs == null) { throw new FetchException("Could not query info/refs from: {0}", this.Enlistment.RepoUrl); } else if (refs.Count == 0) { throw new FetchException("Could not find branch {0} in info/refs from: {1}", branchOrCommit, this.Enlistment.RepoUrl); } commitToFetch = refs.GetTipCommitId(branchOrCommit); } else { commitToFetch = branchOrCommit; } this.DownloadMissingCommit(commitToFetch, this.GitObjects); // For FastFetch only, examine the shallow file to determine the previous commit that had been fetched string shallowFile = Path.Combine(this.Enlistment.WorkingDirectoryBackingRoot, GVFSConstants.DotGit.Shallow); string previousCommit = null; // Use the shallow file to find a recent commit to diff against to try and reduce the number of SHAs to check. if (File.Exists(shallowFile)) { previousCommit = File.ReadAllLines(shallowFile).Where(line => !string.IsNullOrWhiteSpace(line)).LastOrDefault(); if (string.IsNullOrWhiteSpace(previousCommit)) { this.Tracer.RelatedError("Shallow file exists, but contains no valid SHAs."); this.HasFailures = true; return; } } BlockingCollection <string> availableBlobs = new BlockingCollection <string>(); //// // First create the pipeline // // diff ---> blobFinder ---> downloader ---> packIndexer // | | | | // ------------------------------------------------------> fileHydrator //// // diff // Inputs: // * files/folders // * commit id // Outputs: // * RequiredBlobs (property): Blob ids required to satisfy desired paths // * FileAddOperations (property): Repo-relative paths corresponding to those blob ids DiffHelper diff = new DiffHelper(this.Tracer, this.Enlistment, this.FileList, this.FolderList, includeSymLinks: false); // blobFinder // Inputs: // * requiredBlobs (in param): Blob ids from output of `diff` // Outputs: // * availableBlobs (out param): Locally available blob ids (shared between `blobFinder`, `downloader`, and `packIndexer`, all add blob ids to the list as they are locally available) // * MissingBlobs (property): Blob ids that are missing and need to be downloaded // * AvailableBlobs (property): Same as availableBlobs FindBlobsStage blobFinder = new FindBlobsStage(this.SearchThreadCount, diff.RequiredBlobs, availableBlobs, this.Tracer, this.Enlistment); // downloader // Inputs: // * missingBlobs (in param): Blob ids from output of `blobFinder` // Outputs: // * availableBlobs (out param): Loose objects that have completed downloading (shared between `blobFinder`, `downloader`, and `packIndexer`, all add blob ids to the list as they are locally available) // * AvailableObjects (property): Same as availableBlobs // * AvailablePacks (property): Packfiles that have completed downloading BatchObjectDownloadStage downloader = new BatchObjectDownloadStage(this.DownloadThreadCount, this.ChunkSize, blobFinder.MissingBlobs, availableBlobs, this.Tracer, this.Enlistment, this.ObjectRequestor, this.GitObjects); // packIndexer // Inputs: // * availablePacks (in param): Packfiles that have completed downloading from output of `downloader` // Outputs: // * availableBlobs (out param): Blobs that have completed downloading and indexing (shared between `blobFinder`, `downloader`, and `packIndexer`, all add blob ids to the list as they are locally available) IndexPackStage packIndexer = new IndexPackStage(this.IndexThreadCount, downloader.AvailablePacks, availableBlobs, this.Tracer, this.GitObjects); // fileHydrator // Inputs: // * workingDirectoryRoot (in param): the root of the working directory where hydration takes place // * blobIdsToPaths (in param): paths of all blob ids that need to be hydrated from output of `diff` // * availableBlobs (in param): blobs id that are available locally, from whatever source // Outputs: // * Hydrated files on disk. HydrateFilesStage fileHydrator = new HydrateFilesStage(Environment.ProcessorCount * 2, this.Enlistment.WorkingDirectoryRoot, diff.FileAddOperations, availableBlobs, this.Tracer); // All the stages of the pipeline are created and wired up, now kick them off in the proper sequence ThreadStart performDiff = () => { diff.PerformDiff(previousCommit, commitToFetch); this.HasFailures |= diff.HasFailures; }; if (hydrateFilesAfterDownload) { // Call synchronously to ensure that diff.FileAddOperations // is completely populated when fileHydrator starts performDiff(); } else { new Thread(performDiff).Start(); } blobFinder.Start(); downloader.Start(); if (hydrateFilesAfterDownload) { fileHydrator.Start(); } // If indexing happens during searching, searching progressively gets slower, so wait on searching before indexing. blobFinder.WaitForCompletion(); this.HasFailures |= blobFinder.HasFailures; packIndexer.Start(); downloader.WaitForCompletion(); this.HasFailures |= downloader.HasFailures; packIndexer.WaitForCompletion(); this.HasFailures |= packIndexer.HasFailures; availableBlobs.CompleteAdding(); if (hydrateFilesAfterDownload) { fileHydrator.WaitForCompletion(); this.HasFailures |= fileHydrator.HasFailures; } matchedBlobCount = blobFinder.AvailableBlobCount + blobFinder.MissingBlobCount; downloadedBlobCount = blobFinder.MissingBlobCount; hydratedFileCount = fileHydrator.ReadFileCount; if (!this.SkipConfigUpdate && !this.HasFailures) { this.UpdateRefs(branchOrCommit, isBranch, refs); if (isBranch) { this.HasFailures |= !this.UpdateRefSpec(this.Tracer, this.Enlistment, branchOrCommit, refs); } } if (!this.HasFailures) { this.SavePrefetchArgs(commitToFetch, hydrateFilesAfterDownload); } }
/// <param name="branchOrCommit">A specific branch to filter for, or null for all branches returned from info/refs</param> public virtual void FastFetch(string branchOrCommit, bool isBranch) { if (string.IsNullOrWhiteSpace(branchOrCommit)) { throw new FetchException("Must specify branch or commit to fetch"); } GitRefs refs = null; string commitToFetch; if (isBranch) { refs = this.ObjectRequestor.QueryInfoRefs(branchOrCommit); if (refs == null) { throw new FetchException("Could not query info/refs from: {0}", this.Enlistment.RepoUrl); } else if (refs.Count == 0) { throw new FetchException("Could not find branch {0} in info/refs from: {1}", branchOrCommit, this.Enlistment.RepoUrl); } commitToFetch = refs.GetTipCommitId(branchOrCommit); } else { commitToFetch = branchOrCommit; } this.DownloadMissingCommit(commitToFetch, this.GitObjects); // Dummy output queue since we don't need to checkout available blobs BlockingCollection <string> availableBlobs = new BlockingCollection <string>(); // Configure pipeline // LsTreeHelper output => FindMissingBlobs => BatchDownload => IndexPack string shallowFile = Path.Combine(this.Enlistment.WorkingDirectoryRoot, GVFSConstants.DotGit.Shallow); string previousCommit = null; // Use the shallow file to find a recent commit to diff against to try and reduce the number of SHAs to check DiffHelper blobEnumerator = new DiffHelper(this.Tracer, this.Enlistment, this.PathWhitelist); if (File.Exists(shallowFile)) { previousCommit = File.ReadAllLines(shallowFile).Where(line => !string.IsNullOrWhiteSpace(line)).LastOrDefault(); if (string.IsNullOrWhiteSpace(previousCommit)) { this.Tracer.RelatedError("Shallow file exists, but contains no valid SHAs."); this.HasFailures = true; return; } } blobEnumerator.PerformDiff(previousCommit, commitToFetch); this.HasFailures |= blobEnumerator.HasFailures; FindMissingBlobsJob blobFinder = new FindMissingBlobsJob(this.SearchThreadCount, blobEnumerator.RequiredBlobs, availableBlobs, this.Tracer, this.Enlistment); BatchObjectDownloadJob downloader = new BatchObjectDownloadJob(this.DownloadThreadCount, this.ChunkSize, blobFinder.DownloadQueue, availableBlobs, this.Tracer, this.Enlistment, this.ObjectRequestor, this.GitObjects); IndexPackJob packIndexer = new IndexPackJob(this.IndexThreadCount, downloader.AvailablePacks, availableBlobs, this.Tracer, this.GitObjects); blobFinder.Start(); downloader.Start(); // If indexing happens during searching, searching progressively gets slower, so wait on searching before indexing. blobFinder.WaitForCompletion(); this.HasFailures |= blobFinder.HasFailures; // Index regardless of failures, it'll shorten the next fetch. packIndexer.Start(); downloader.WaitForCompletion(); this.HasFailures |= downloader.HasFailures; packIndexer.WaitForCompletion(); this.HasFailures |= packIndexer.HasFailures; if (!this.SkipConfigUpdate && !this.HasFailures) { this.UpdateRefs(branchOrCommit, isBranch, refs); if (isBranch) { this.HasFailures |= !this.UpdateRefSpec(this.Tracer, this.Enlistment, branchOrCommit, refs); } } }
public void FastFetchWithStats( string branchOrCommit, bool isBranch, bool readFilesAfterDownload, out int matchedBlobCount, out int downloadedBlobCount, out int readFileCount) { matchedBlobCount = 0; downloadedBlobCount = 0; readFileCount = 0; if (string.IsNullOrWhiteSpace(branchOrCommit)) { throw new FetchException("Must specify branch or commit to fetch"); } GitRefs refs = null; string commitToFetch; if (isBranch) { refs = this.ObjectRequestor.QueryInfoRefs(branchOrCommit); if (refs == null) { throw new FetchException("Could not query info/refs from: {0}", this.Enlistment.RepoUrl); } else if (refs.Count == 0) { throw new FetchException("Could not find branch {0} in info/refs from: {1}", branchOrCommit, this.Enlistment.RepoUrl); } commitToFetch = refs.GetTipCommitId(branchOrCommit); } else { commitToFetch = branchOrCommit; } this.DownloadMissingCommit(commitToFetch, this.GitObjects); // Configure pipeline // LsTreeHelper output => FindMissingBlobs => BatchDownload => IndexPack string shallowFile = Path.Combine(this.Enlistment.WorkingDirectoryRoot, GVFSConstants.DotGit.Shallow); string previousCommit = null; // Use the shallow file to find a recent commit to diff against to try and reduce the number of SHAs to check DiffHelper blobEnumerator = new DiffHelper(this.Tracer, this.Enlistment, this.FileList, this.FolderList); if (File.Exists(shallowFile)) { previousCommit = File.ReadAllLines(shallowFile).Where(line => !string.IsNullOrWhiteSpace(line)).LastOrDefault(); if (string.IsNullOrWhiteSpace(previousCommit)) { this.Tracer.RelatedError("Shallow file exists, but contains no valid SHAs."); this.HasFailures = true; return; } } ThreadStart performDiff = () => { blobEnumerator.PerformDiff(previousCommit, commitToFetch); this.HasFailures |= blobEnumerator.HasFailures; }; if (readFilesAfterDownload) { // Call synchronously to ensure that blobEnumerator.FileAddOperations // is completely populated when ReadFilesJob starts performDiff(); } else { new Thread(performDiff).Start(); } BlockingCollection <string> availableBlobs = new BlockingCollection <string>(); FindMissingBlobsJob blobFinder = new FindMissingBlobsJob(this.SearchThreadCount, blobEnumerator.RequiredBlobs, availableBlobs, this.Tracer, this.Enlistment); BatchObjectDownloadJob downloader = new BatchObjectDownloadJob(this.DownloadThreadCount, this.ChunkSize, blobFinder.MissingBlobs, availableBlobs, this.Tracer, this.Enlistment, this.ObjectRequestor, this.GitObjects); IndexPackJob packIndexer = new IndexPackJob(this.IndexThreadCount, downloader.AvailablePacks, availableBlobs, this.Tracer, this.GitObjects); ReadFilesJob readFiles = new ReadFilesJob(Environment.ProcessorCount * 2, blobEnumerator.FileAddOperations, availableBlobs, this.Tracer); blobFinder.Start(); downloader.Start(); if (readFilesAfterDownload) { readFiles.Start(); } // If indexing happens during searching, searching progressively gets slower, so wait on searching before indexing. blobFinder.WaitForCompletion(); this.HasFailures |= blobFinder.HasFailures; packIndexer.Start(); downloader.WaitForCompletion(); this.HasFailures |= downloader.HasFailures; packIndexer.WaitForCompletion(); this.HasFailures |= packIndexer.HasFailures; availableBlobs.CompleteAdding(); if (readFilesAfterDownload) { readFiles.WaitForCompletion(); this.HasFailures |= readFiles.HasFailures; } matchedBlobCount = blobFinder.AvailableBlobCount + blobFinder.MissingBlobCount; downloadedBlobCount = blobFinder.MissingBlobCount; readFileCount = readFiles.ReadFileCount; if (!this.SkipConfigUpdate && !this.HasFailures) { this.UpdateRefs(branchOrCommit, isBranch, refs); if (isBranch) { this.HasFailures |= !this.UpdateRefSpec(this.Tracer, this.Enlistment, branchOrCommit, refs); } } }