public void CanParseBackwardsDiff() { MockTracer tracer = new MockTracer(); DiffHelper diffBackwards = new DiffHelper(tracer, new Mock.Common.MockGVFSEnlistment(), new List <string>(), new List <string>(), includeSymLinks: this.IncludeSymLinks); diffBackwards.ParseDiffFile(GetDataPath("backward.txt"), "xx:\\fakeRepo"); // File > folder, deleted file, edited file, renamed file, rename-edit file // Children of file > folder, renamed folder, deleted folder, recursive delete file, edited folder diffBackwards.RequiredBlobs.Count.ShouldEqual(10); // File added, folder > file, moved folder, added folder diffBackwards.FileDeleteOperations.Count.ShouldEqual(4); // Also includes, the children of: Folder added, folder renamed, file => folder diffBackwards.TotalFileDeletes.ShouldEqual(7); // Folder created, folder edited, folder deleted, folder renamed (add + delete), // folder => file, file => folder, recursive delete (include subfolder) diffBackwards.TotalDirectoryOperations.ShouldEqual(9); }
protected override async Task <Dictionary <string, SlnFile> > ProcessImpl( Task <Dictionary <string, SlnFile> > previousTask) { var fileDictObj = await previousTask; var fileDict = (Dictionary <string, SlnFile>)fileDictObj; var repoResult = GitRunner.FindRepository(_workingDirectory); if (!repoResult.foundRepo) { Logger.LogError("Unable to find Git repository located in {0}. Shutting down.", _workingDirectory); return(new Dictionary <string, SlnFile>()); } // validate the target branch if (!DiffHelper.HasBranch(repoResult.repo, _targetGitBranch)) { Logger.LogError("Current git repository doesn't have any branch named [{0}]. Shutting down.", _targetGitBranch); return(new Dictionary <string, SlnFile>()); } var repo = repoResult.repo; var affectedFiles = DiffHelper.ChangedFiles(repo, _targetGitBranch); // filter out any files that aren't affected by the diff var newDict = new Dictionary <string, SlnFile>(); foreach (var file in affectedFiles) { Logger.LogDebug("Affected file: {0}", file); // this file is in the solution if (fileDict.ContainsKey(file)) { newDict[file] = fileDict[file]; } } return(newDict); }
public void UpdateCid(int cid) { ChangesetVersionSpec versionSpec = new ChangesetVersionSpec(cid); Toplevel.GdkWindow.Cursor = new Gdk.Cursor(Gdk.CursorType.Watch); string tname = System.IO.Path.GetTempFileName(); using (StreamWriter sw = new StreamWriter(tname)) { DiffHelper.ShowChangeset(driver.VersionControlServer, versionSpec, false, GetDiffOptions(sw)); } using (StreamReader sr = new StreamReader(tname)) { textView.Update("temp.diff", sr.ReadToEnd()); } Toplevel.GdkWindow.Cursor = new Gdk.Cursor(Gdk.CursorType.LeftPtr); File.Delete(tname); }
public ActionResult Translate(int id, string culture) { var brand = _brandService.Find(id); var compared = new BrandModel { Id = brand.Id, Name = brand.Name, Description = brand.Description }; var translated = new BrandModel { Id = brand.Id }; var diff = new BrandModel { Id = brand.Id }; var translation = _translationStore.Find(CultureInfo.GetCultureInfo(culture), EntityKey.FromEntity(brand)); if (translation != null) { translated.Name = translation.GetTranslatedText("Name"); translated.Description = translation.GetTranslatedText("Description"); diff.Name = DiffHelper.GetDiffHtml(translation.GetOriginalText("Name"), brand.Name); diff.Description = DiffHelper.GetDiffHtml(translation.GetOriginalText("Description"), brand.Description); } ViewBag.Difference = diff; ViewBag.Compared = compared; return(View(translated)); }
public void CanParseDiffForwards() { MockTracer tracer = new MockTracer(); DiffHelper diffForwards = new DiffHelper(tracer, new MockEnlistment(), new List <string>(), new List <string>()); diffForwards.ParseDiffFile(GetDataPath("forward.txt"), "xx:\\fakeRepo"); // File added, file edited, file renamed, folder => file, edit-rename file // Children of: Add folder, Renamed folder, edited folder, file => folder diffForwards.RequiredBlobs.Count.ShouldEqual(9); // File deleted, folder deleted, file > folder, edit-rename diffForwards.FileDeleteOperations.Count.ShouldEqual(4); // Includes children of: Recursive delete folder, deleted folder, renamed folder, and folder => file diffForwards.TotalFileDeletes.ShouldEqual(8); // Folder created, folder edited, folder deleted, folder renamed (add + delete), // folder => file, file => folder, recursive delete (top-level only) diffForwards.DirectoryOperations.Count.ShouldEqual(8); // Should also include the deleted folder of recursive delete diffForwards.TotalDirectoryOperations.ShouldEqual(9); }
public void CanParseBackwardsDiff() { MockTracer tracer = new MockTracer(); DiffHelper diffBackwards = new DiffHelper(tracer, null, null, new List <string>()); diffBackwards.ParseDiffFile(this.GetDataPath("backward.txt"), "xx:\\fakeRepo"); // File > folder, deleted file, edited file, renamed file, rename-edit file // Children of file > folder, renamed folder, deleted folder, recursive delete file, edited folder diffBackwards.RequiredBlobs.Count.ShouldEqual(10); // File added, folder > file, moved folder, added folder diffBackwards.FileDeleteOperations.Count.ShouldEqual(4); // Also includes, the children of: Folder added, folder renamed, file => folder diffBackwards.TotalFileDeletes.ShouldEqual(7); // Folder created, folder edited, folder deleted, folder renamed (add + delete), // folder => file, file => folder, recursive delete (include subfolder) diffBackwards.DirectoryOperations.Count.ShouldEqual(9); // Should match count above since there were no recursive adds to become recursive deletes diffBackwards.TotalDirectoryOperations.ShouldEqual(9); }
protected override async Task <Dictionary <string, SlnFile> > ProcessImpl( Task <Dictionary <string, SlnFile> > previousTask) { var fileDictObj = await previousTask; var fileDict = (Dictionary <string, SlnFile>)fileDictObj; var repoResult = GitRunner.FindRepository(_workingDirectory); if (!repoResult.foundRepo) { Logger.LogError("Unable to find Git repository located in {0}. Shutting down.", _workingDirectory); return(new Dictionary <string, SlnFile>()); } // validate the target branch if (!DiffHelper.HasBranch(repoResult.repo, _targetGitBranch)) { Logger.LogError("Current git repository doesn't have any branch named [{0}]. Shutting down.", _targetGitBranch); return(new Dictionary <string, SlnFile>()); } var repo = repoResult.repo; var affectedFiles = DiffHelper.ChangedFiles(repo, _targetGitBranch).ToList(); var projectFiles = fileDict.Where(x => x.Value.FileType == FileType.Project).ToList(); var projectFolders = projectFiles.ToLookup(x => Path.GetDirectoryName(x.Key), v => Tuple.Create(v.Key, v.Value)); var projectImports = ProjectImportsFinder.FindProjectImports(projectFiles.Select(pair => new SlnFileWithPath(pair.Key, pair.Value))); // filter out any files that aren't affected by the diff var newDict = new Dictionary <string, SlnFile>(); foreach (var file in affectedFiles) { Logger.LogDebug("Affected file: {0}", file); // this file is in the solution if (fileDict.ContainsKey(file)) { newDict[file] = fileDict[file]; } else { // special case - not all of the affected files were in the solution. // Check to see if these affected files are in the same folder as any of the projects var directoryName = Path.GetDirectoryName(file); if (TryFindSubFolder(projectFolders.Select(c => c.Key), directoryName, out var projectFolder)) { var affectedProjects = projectFolders[projectFolder]; foreach (var affectedProject in affectedProjects) { var project = affectedProject.Item2; var projectPath = affectedProject.Item1; Logger.LogInformation("Adding project {0} to the set of affected files because non-code file {1}, " + "found inside same directory [{2}], was modified.", projectPath, file, directoryName); newDict[projectPath] = project; } } } // special case - if affected file was imported to some projects, need to mark importing project as affected if (projectImports.ContainsKey(file)) { // Mark all dependant as affected foreach (var dependentProject in projectImports[file].DependantProjects) { newDict[dependentProject.Path] = dependentProject.File; } } } return(newDict); }
public void PrefetchWithStats( string branchOrCommit, bool isBranch, bool hydrateFilesAfterDownload, out int matchedBlobCount, out int downloadedBlobCount, out int hydratedFileCount) { matchedBlobCount = 0; downloadedBlobCount = 0; hydratedFileCount = 0; if (string.IsNullOrWhiteSpace(branchOrCommit)) { throw new FetchException("Must specify branch or commit to fetch"); } GitRefs refs = null; string commitToFetch; if (isBranch) { refs = this.ObjectRequestor.QueryInfoRefs(branchOrCommit); if (refs == null) { throw new FetchException("Could not query info/refs from: {0}", this.Enlistment.RepoUrl); } else if (refs.Count == 0) { throw new FetchException("Could not find branch {0} in info/refs from: {1}", branchOrCommit, this.Enlistment.RepoUrl); } commitToFetch = refs.GetTipCommitId(branchOrCommit); } else { commitToFetch = branchOrCommit; } this.DownloadMissingCommit(commitToFetch, this.GitObjects); // For FastFetch only, examine the shallow file to determine the previous commit that had been fetched string shallowFile = Path.Combine(this.Enlistment.WorkingDirectoryBackingRoot, GVFSConstants.DotGit.Shallow); string previousCommit = null; // Use the shallow file to find a recent commit to diff against to try and reduce the number of SHAs to check. if (File.Exists(shallowFile)) { previousCommit = File.ReadAllLines(shallowFile).Where(line => !string.IsNullOrWhiteSpace(line)).LastOrDefault(); if (string.IsNullOrWhiteSpace(previousCommit)) { this.Tracer.RelatedError("Shallow file exists, but contains no valid SHAs."); this.HasFailures = true; return; } } BlockingCollection <string> availableBlobs = new BlockingCollection <string>(); //// // First create the pipeline // // diff ---> blobFinder ---> downloader ---> packIndexer // | | | | // ------------------------------------------------------> fileHydrator //// // diff // Inputs: // * files/folders // * commit id // Outputs: // * RequiredBlobs (property): Blob ids required to satisfy desired paths // * FileAddOperations (property): Repo-relative paths corresponding to those blob ids DiffHelper diff = new DiffHelper(this.Tracer, this.Enlistment, this.FileList, this.FolderList, includeSymLinks: false); // blobFinder // Inputs: // * requiredBlobs (in param): Blob ids from output of `diff` // Outputs: // * availableBlobs (out param): Locally available blob ids (shared between `blobFinder`, `downloader`, and `packIndexer`, all add blob ids to the list as they are locally available) // * MissingBlobs (property): Blob ids that are missing and need to be downloaded // * AvailableBlobs (property): Same as availableBlobs FindBlobsStage blobFinder = new FindBlobsStage(this.SearchThreadCount, diff.RequiredBlobs, availableBlobs, this.Tracer, this.Enlistment); // downloader // Inputs: // * missingBlobs (in param): Blob ids from output of `blobFinder` // Outputs: // * availableBlobs (out param): Loose objects that have completed downloading (shared between `blobFinder`, `downloader`, and `packIndexer`, all add blob ids to the list as they are locally available) // * AvailableObjects (property): Same as availableBlobs // * AvailablePacks (property): Packfiles that have completed downloading BatchObjectDownloadStage downloader = new BatchObjectDownloadStage(this.DownloadThreadCount, this.ChunkSize, blobFinder.MissingBlobs, availableBlobs, this.Tracer, this.Enlistment, this.ObjectRequestor, this.GitObjects); // packIndexer // Inputs: // * availablePacks (in param): Packfiles that have completed downloading from output of `downloader` // Outputs: // * availableBlobs (out param): Blobs that have completed downloading and indexing (shared between `blobFinder`, `downloader`, and `packIndexer`, all add blob ids to the list as they are locally available) IndexPackStage packIndexer = new IndexPackStage(this.IndexThreadCount, downloader.AvailablePacks, availableBlobs, this.Tracer, this.GitObjects); // fileHydrator // Inputs: // * workingDirectoryRoot (in param): the root of the working directory where hydration takes place // * blobIdsToPaths (in param): paths of all blob ids that need to be hydrated from output of `diff` // * availableBlobs (in param): blobs id that are available locally, from whatever source // Outputs: // * Hydrated files on disk. HydrateFilesStage fileHydrator = new HydrateFilesStage(Environment.ProcessorCount * 2, this.Enlistment.WorkingDirectoryRoot, diff.FileAddOperations, availableBlobs, this.Tracer); // All the stages of the pipeline are created and wired up, now kick them off in the proper sequence ThreadStart performDiff = () => { diff.PerformDiff(previousCommit, commitToFetch); this.HasFailures |= diff.HasFailures; }; if (hydrateFilesAfterDownload) { // Call synchronously to ensure that diff.FileAddOperations // is completely populated when fileHydrator starts performDiff(); } else { new Thread(performDiff).Start(); } blobFinder.Start(); downloader.Start(); if (hydrateFilesAfterDownload) { fileHydrator.Start(); } // If indexing happens during searching, searching progressively gets slower, so wait on searching before indexing. blobFinder.WaitForCompletion(); this.HasFailures |= blobFinder.HasFailures; packIndexer.Start(); downloader.WaitForCompletion(); this.HasFailures |= downloader.HasFailures; packIndexer.WaitForCompletion(); this.HasFailures |= packIndexer.HasFailures; availableBlobs.CompleteAdding(); if (hydrateFilesAfterDownload) { fileHydrator.WaitForCompletion(); this.HasFailures |= fileHydrator.HasFailures; } matchedBlobCount = blobFinder.AvailableBlobCount + blobFinder.MissingBlobCount; downloadedBlobCount = blobFinder.MissingBlobCount; hydratedFileCount = fileHydrator.ReadFileCount; if (!this.SkipConfigUpdate && !this.HasFailures) { this.UpdateRefs(branchOrCommit, isBranch, refs); if (isBranch) { this.HasFailures |= !this.UpdateRefSpec(this.Tracer, this.Enlistment, branchOrCommit, refs); } } if (!this.HasFailures) { this.SavePrefetchArgs(commitToFetch, hydrateFilesAfterDownload); } }
/// <param name="branchOrCommit">A specific branch to filter for, or null for all branches returned from info/refs</param> public virtual void FastFetch(string branchOrCommit, bool isBranch) { if (string.IsNullOrWhiteSpace(branchOrCommit)) { throw new FetchException("Must specify branch or commit to fetch"); } GitRefs refs = null; string commitToFetch; if (isBranch) { refs = this.ObjectRequestor.QueryInfoRefs(branchOrCommit); if (refs == null) { throw new FetchException("Could not query info/refs from: {0}", this.Enlistment.RepoUrl); } else if (refs.Count == 0) { throw new FetchException("Could not find branch {0} in info/refs from: {1}", branchOrCommit, this.Enlistment.RepoUrl); } commitToFetch = refs.GetTipCommitId(branchOrCommit); } else { commitToFetch = branchOrCommit; } this.DownloadMissingCommit(commitToFetch, this.GitObjects); // Dummy output queue since we don't need to checkout available blobs BlockingCollection <string> availableBlobs = new BlockingCollection <string>(); // Configure pipeline // LsTreeHelper output => FindMissingBlobs => BatchDownload => IndexPack string shallowFile = Path.Combine(this.Enlistment.WorkingDirectoryRoot, GVFSConstants.DotGit.Shallow); string previousCommit = null; // Use the shallow file to find a recent commit to diff against to try and reduce the number of SHAs to check DiffHelper blobEnumerator = new DiffHelper(this.Tracer, this.Enlistment, this.PathWhitelist); if (File.Exists(shallowFile)) { previousCommit = File.ReadAllLines(shallowFile).Where(line => !string.IsNullOrWhiteSpace(line)).LastOrDefault(); if (string.IsNullOrWhiteSpace(previousCommit)) { this.Tracer.RelatedError("Shallow file exists, but contains no valid SHAs."); this.HasFailures = true; return; } } blobEnumerator.PerformDiff(previousCommit, commitToFetch); this.HasFailures |= blobEnumerator.HasFailures; FindMissingBlobsJob blobFinder = new FindMissingBlobsJob(this.SearchThreadCount, blobEnumerator.RequiredBlobs, availableBlobs, this.Tracer, this.Enlistment); BatchObjectDownloadJob downloader = new BatchObjectDownloadJob(this.DownloadThreadCount, this.ChunkSize, blobFinder.DownloadQueue, availableBlobs, this.Tracer, this.Enlistment, this.ObjectRequestor, this.GitObjects); IndexPackJob packIndexer = new IndexPackJob(this.IndexThreadCount, downloader.AvailablePacks, availableBlobs, this.Tracer, this.GitObjects); blobFinder.Start(); downloader.Start(); // If indexing happens during searching, searching progressively gets slower, so wait on searching before indexing. blobFinder.WaitForCompletion(); this.HasFailures |= blobFinder.HasFailures; // Index regardless of failures, it'll shorten the next fetch. packIndexer.Start(); downloader.WaitForCompletion(); this.HasFailures |= downloader.HasFailures; packIndexer.WaitForCompletion(); this.HasFailures |= packIndexer.HasFailures; if (!this.SkipConfigUpdate && !this.HasFailures) { this.UpdateRefs(branchOrCommit, isBranch, refs); if (isBranch) { this.HasFailures |= !this.UpdateRefSpec(this.Tracer, this.Enlistment, branchOrCommit, refs); } } }
public ActionResult Translate(int id, string culture) { var product = CurrentInstance.Database.Repository <Product>().Find(id); var productType = product.ProductType; var controls = FormControls.Controls().ToList(); var compared = Mapper.Map <Product, ProductModel>(product); var translated = Mapper.Map <Product, ProductModel>(product); var difference = new ProductModel(); var productKey = new EntityKey(typeof(Product), product.Id); var translation = _translationStore.Find(CultureInfo.GetCultureInfo(culture), productKey); if (translation != null) { translated.Name = translation.GetTranslatedText("Name"); difference.Name = DiffHelper.GetDiffHtml(translation.GetOriginalText("Name"), product.Name); } // Custom fields // Product type definition might change, so we need to display fields base on product type definition compared.CustomFields.Clear(); translated.CustomFields.Clear(); foreach (var definition in productType.CustomFieldDefinitions) { var control = controls.Find(c => c.Name == definition.ControlType); var field = product.CustomFields.FirstOrDefault(f => f.FieldName == definition.Name) ?? new ProductCustomField(definition.Name, null); var comparedField = MapOrDefault(definition, field); comparedField.FieldText = control.GetFieldDisplayText(definition, field == null ? null : field.FieldValue); compared.CustomFields.Add(comparedField); var translatedField = MapOrDefault(definition, field); translatedField.FieldText = comparedField.FieldText; var diffField = new CustomFieldModel { FieldName = definition.Name }; // If the field value is defined in product editing page, then it's always the field value that get translated if (translation != null && !control.IsSelectionList && !control.IsValuesPredefined) { translatedField.FieldText = translation.GetTranslatedText("CustomFields[" + field.FieldName + "]"); translatedField.FieldValue = translatedField.FieldText; diffField.FieldText = DiffHelper.GetDiffHtml(translation.GetOriginalText("CustomFields[" + field.FieldName + "]"), comparedField.FieldText); } translated.CustomFields.Add(translatedField); difference.CustomFields.Add(diffField); } // Variants translated.Variants = GetVariants(product, productType, culture); ViewBag.ProductType = productType; ViewBag.Compared = compared; ViewBag.Difference = difference; return(View(translated)); }
public void FastFetchWithStats( string branchOrCommit, bool isBranch, bool readFilesAfterDownload, out int matchedBlobCount, out int downloadedBlobCount, out int readFileCount) { matchedBlobCount = 0; downloadedBlobCount = 0; readFileCount = 0; if (string.IsNullOrWhiteSpace(branchOrCommit)) { throw new FetchException("Must specify branch or commit to fetch"); } GitRefs refs = null; string commitToFetch; if (isBranch) { refs = this.ObjectRequestor.QueryInfoRefs(branchOrCommit); if (refs == null) { throw new FetchException("Could not query info/refs from: {0}", this.Enlistment.RepoUrl); } else if (refs.Count == 0) { throw new FetchException("Could not find branch {0} in info/refs from: {1}", branchOrCommit, this.Enlistment.RepoUrl); } commitToFetch = refs.GetTipCommitId(branchOrCommit); } else { commitToFetch = branchOrCommit; } this.DownloadMissingCommit(commitToFetch, this.GitObjects); // Configure pipeline // LsTreeHelper output => FindMissingBlobs => BatchDownload => IndexPack string shallowFile = Path.Combine(this.Enlistment.WorkingDirectoryRoot, GVFSConstants.DotGit.Shallow); string previousCommit = null; // Use the shallow file to find a recent commit to diff against to try and reduce the number of SHAs to check DiffHelper blobEnumerator = new DiffHelper(this.Tracer, this.Enlistment, this.FileList, this.FolderList); if (File.Exists(shallowFile)) { previousCommit = File.ReadAllLines(shallowFile).Where(line => !string.IsNullOrWhiteSpace(line)).LastOrDefault(); if (string.IsNullOrWhiteSpace(previousCommit)) { this.Tracer.RelatedError("Shallow file exists, but contains no valid SHAs."); this.HasFailures = true; return; } } ThreadStart performDiff = () => { blobEnumerator.PerformDiff(previousCommit, commitToFetch); this.HasFailures |= blobEnumerator.HasFailures; }; if (readFilesAfterDownload) { // Call synchronously to ensure that blobEnumerator.FileAddOperations // is completely populated when ReadFilesJob starts performDiff(); } else { new Thread(performDiff).Start(); } BlockingCollection <string> availableBlobs = new BlockingCollection <string>(); FindMissingBlobsJob blobFinder = new FindMissingBlobsJob(this.SearchThreadCount, blobEnumerator.RequiredBlobs, availableBlobs, this.Tracer, this.Enlistment); BatchObjectDownloadJob downloader = new BatchObjectDownloadJob(this.DownloadThreadCount, this.ChunkSize, blobFinder.MissingBlobs, availableBlobs, this.Tracer, this.Enlistment, this.ObjectRequestor, this.GitObjects); IndexPackJob packIndexer = new IndexPackJob(this.IndexThreadCount, downloader.AvailablePacks, availableBlobs, this.Tracer, this.GitObjects); ReadFilesJob readFiles = new ReadFilesJob(Environment.ProcessorCount * 2, blobEnumerator.FileAddOperations, availableBlobs, this.Tracer); blobFinder.Start(); downloader.Start(); if (readFilesAfterDownload) { readFiles.Start(); } // If indexing happens during searching, searching progressively gets slower, so wait on searching before indexing. blobFinder.WaitForCompletion(); this.HasFailures |= blobFinder.HasFailures; packIndexer.Start(); downloader.WaitForCompletion(); this.HasFailures |= downloader.HasFailures; packIndexer.WaitForCompletion(); this.HasFailures |= packIndexer.HasFailures; availableBlobs.CompleteAdding(); if (readFilesAfterDownload) { readFiles.WaitForCompletion(); this.HasFailures |= readFiles.HasFailures; } matchedBlobCount = blobFinder.AvailableBlobCount + blobFinder.MissingBlobCount; downloadedBlobCount = blobFinder.MissingBlobCount; readFileCount = readFiles.ReadFileCount; if (!this.SkipConfigUpdate && !this.HasFailures) { this.UpdateRefs(branchOrCommit, isBranch, refs); if (isBranch) { this.HasFailures |= !this.UpdateRefSpec(this.Tracer, this.Enlistment, branchOrCommit, refs); } } }
private static async Task <int> RunIncrementalist(SlnOptions options) { var logger = new ConsoleLogger("Incrementalist", (s, level) => level >= (options.Verbose ? LogLevel.Debug : LogLevel.Information), false); try { var pwd = options.WorkingDirectory ?? Directory.GetCurrentDirectory(); var insideRepo = Repository.IsValid(pwd); if (!insideRepo) { logger.LogError("Current path {0} is not located inside any known Git repository.", pwd); return(-2); } var repoFolder = Repository.Discover(pwd); var workingFolder = Directory.GetParent(repoFolder).Parent; var repoResult = GitRunner.FindRepository(workingFolder.FullName); if (!repoResult.foundRepo) { Console.WriteLine("Unable to find Git repository located in {0}. Shutting down.", workingFolder.FullName); return(-3); } // validate the target branch if (!DiffHelper.HasBranch(repoResult.repo, options.GitBranch)) { // workaround common CI server issues and check to see if this same branch is located // under "origin/{branchname}" options.GitBranch = $"origin/{options.GitBranch}"; if (!DiffHelper.HasBranch(repoResult.repo, options.GitBranch)) { Console.WriteLine("Current git repository doesn't have any branch named [{0}]. Shutting down.", options.GitBranch); Console.WriteLine("[Debug] Here are all of the currently known branches in this repository"); foreach (var b in repoResult.repo.Branches) { Console.WriteLine(b.FriendlyName); } return(-4); } } if (!string.IsNullOrEmpty(repoFolder)) { if (options.ListFolders) { await AnalyzeFolderDiff(options, workingFolder, logger); } else { await AnaylzeSolutionDIff(options, workingFolder, logger); } } return(0); } catch (Exception ex) { logger.LogError(ex, "Error encountered during execution of Incrementalist."); return(-1); } }
public void ShouldDetectExistingBranch() { Repository.CreateBranch("foo"); DiffHelper.HasBranch(Repository.Repository, "foo").Should().BeTrue(); }
public Labeler(string repoOwner, string repoName, string secretUri, double threshold, DiffHelper diffHelper, DatasetHelper datasetHelper) { _repoOwner = repoOwner; _repoName = repoName; _threshold = threshold; _secretUri = secretUri; _diffHelper = diffHelper; _datasetHelper = datasetHelper; }
protected override async Task <IEnumerable <string> > ProcessImpl(Task <Repository> previousTask) { var repository = await previousTask; return(DiffHelper.ChangedFiles(repository, _targetBranch)); }
private void FindDiff() { string pageName = PageNameUtil.Decode(WikiPage); //Page oldPage = PagesProvider.PagesHistGetByNameVersion(pageName, OldVer, TenantId); //Page newPage = PagesProvider.PagesHistGetByNameVersion(pageName, NewVer, TenantId); var oldPage = Wiki.GetPage(pageName, OldVer); var newPage = Wiki.GetPage(pageName, NewVer); string oldVersion, newVersion; if (oldPage == null) { oldVersion = string.Empty; } else { oldVersion = oldPage.Body; } if (newPage == null) { newVersion = string.Empty; } else { newVersion = newPage.Body; } DiffHelper.Item[] f = DiffHelper.DiffText(oldVersion, newVersion, true, true, false); string[] aLines = oldVersion.Split('\n'); string[] bLines = newVersion.Split('\n'); int n = 0; StringBuilder sb = new StringBuilder(); foreach (DiffHelper.Item aItem in f) { // write unchanged lines while ((n < aItem.StartB) && (n < bLines.Length)) { WriteLine(n, null, bLines[n], sb); n++; } // while // write deleted lines for (int m = 0; m < aItem.deletedA; m++) { WriteLine(-1, "d", aLines[aItem.StartA + m], sb); } // for // write inserted lines while (n < aItem.StartB + aItem.insertedB) { WriteLine(n, "i", bLines[n], sb); n++; } // while } // while if (f.Length > 0 || (from bline in bLines where !bline.Trim().Equals(string.Empty) select bline).Count() > 0) { // write rest of unchanged lines while (n < bLines.Length) { WriteLine(n, null, bLines[n], sb); n++; } // while } litDiff.Text = sb.ToString(); }