/// <summary> /// Output method for the text format. /// </summary> /// <param name="writer">Writer into which the output should be written</param> /// <param name="revision">The last revision</param> private static void SaveOutputInText(TextWriter writer, RevisionData revision) { foreach (LineInfo line in revision.LineInfo) { writer.WriteLine(line); } }
private RevisionData[] ReadRevisionParents(Dictionary <Hash, RevisionData> cache) { int end = FindNewLineOrEndOfString(); int numParents = (end - Position + 1) / 41; var parents = new RevisionData[numParents]; if (numParents == 0) { Position = end + 1; } else { for (int i = 0; i < numParents; ++i) { var sha1 = ReadHash(skip: 1); if (cache == null) { parents[i] = new RevisionData(sha1); } else { if (!cache.TryGetValue(sha1, out parents[i])) { parents[i] = new RevisionData(sha1); cache.Add(sha1, parents[i]); } } } } return(parents); }
private static IList <ReflogRecordData> ParseResult1(GitOutput output, Dictionary <Hash, RevisionData> cache) { if (output.Output.Length < 40) { return(new ReflogRecordData[0]); } var parser = new GitParser(output.Output); int index = 0; var list = new List <ReflogRecordData>(); while (!parser.IsAtEndOfString) { var selector = parser.ReadLine(); if (selector.Length == 0) { break; } var message = parser.ReadLine(); var sha1 = parser.ReadHash(skip: 1); RevisionData rev; if (!cache.TryGetValue(sha1, out rev)) { rev = new RevisionData(sha1); cache.Add(sha1, rev); } parser.ParseRevisionData(rev, cache); list.Add(new ReflogRecordData(index++, message, rev)); } return(list); }
public void MapRevision(NodeVersion source, Revision <TypedEntity> destination, AbstractLookupHelper lookupHelper, AbstractMappingEngine masterMapper) { if (destination.Item == null) { destination.Item = MapTypedEntityForRevision(source, lookupHelper, masterMapper); } else { MapTypedEntityForRevision(source, destination.Item, lookupHelper, masterMapper); } var latestStatus = GetLatestNodeVersionStatus(source, lookupHelper, masterMapper); var utcStatusChanged = latestStatus == null ? DateTimeOffset.MinValue : latestStatus.Date; var statusType = latestStatus == null ? new RevisionStatusType("unknown", "Unknown / Not specified") : MapEntityStatusType(latestStatus.NodeVersionStatusType, lookupHelper, masterMapper); var metaData = new RevisionData() { Id = (HiveId)source.Id, StatusType = statusType, UtcCreated = source.NodeVersionStatuses.First().Date, UtcModified = utcStatusChanged, UtcStatusChanged = utcStatusChanged }; destination.Item.UtcModified = source.DateCreated; destination.Item.UtcStatusChanged = utcStatusChanged; destination.MetaData = metaData; }
/// <summary>Returns revision with specified SHA1 or <c>null</c> if such revision does not exist.</summary> /// <param name="sha1">SHA-1 of required revision.</param> /// <returns>Revision with specified SHA-1 or <c>null</c> if such revision does not exist.</returns> /// <remarks>If revision is not present in cache, it will be queried from git repo.</remarks> public Revision TryGetRevision(Hash sha1) { lock (SyncRoot) { Revision revision; if (_revisions.TryGetValue(sha1, out revision)) { if (!revision.IsLoaded) { revision.Load(); } } else { RevisionData revisionData = null; try { revisionData = Repository.Accessor.QueryRevision.Invoke( new QueryRevisionParameters(sha1)); } catch (GitException) { return(null); } revision = ObjectFactories.CreateRevision(Repository, revisionData); } return(revision); } }
private IEnumerable <RevisionData> GetAllRevisionData(HiveId entityUri) { Mandate.ParameterNotEmpty(entityUri, "hiveId"); //var entityStatusLog = Helper.NhSession.QueryOver<NodeVersionStatusHistory>() // .OrderBy(x => x.Date).Desc // .JoinQueryOver(x => x.NodeVersion).Where(x => x.Node.Id == (Guid)entityUri.Value) // .List() // .DistinctBy(x => x.Id); NodeVersionStatusHistory aliasHistory = null; NodeVersion aliasVersion = null; Node aliasNode = null; NodeVersionStatusType aliasType = null; var entityStatusLog = Helper.NhSession.QueryOver <NodeVersionStatusHistory>(() => aliasHistory) .OrderBy(x => x.Date).Desc .JoinQueryOver(x => x.NodeVersionStatusType, () => aliasType) .JoinQueryOver(x => aliasHistory.NodeVersion, () => aliasVersion) .JoinQueryOver(x => x.Node, () => aliasNode) .Where(x => x.Id == (Guid)entityUri.Value) .Fetch(x => aliasHistory.NodeVersionStatusType).Eager .Select(x => x.Date, x => x.Id, x => aliasNode.DateCreated, x => aliasType.Id, x => aliasType.IsSystem, x => aliasType.Alias, x => aliasType.Name, x => aliasVersion.Id) .List <object[]>() .Select(col => new { Date = (DateTimeOffset)col[0], Id = (Guid)col[1], DateCreated = (DateTimeOffset)col[2], TypeId = (Guid)col[3], TypeIsSystem = (bool)col[4], TypeAlias = (string)col[5], TypeName = (string)col[6], VersionId = (Guid)col[7] }); var otherRevisionData = new HashSet <RevisionData>(); var changeset = new Changeset(new Branch("default")); // Ignored for the moment in the persistence layer for this provider foreach (var statusQueryRow in entityStatusLog) { var nodeVersionStatusType = new NodeVersionStatusType { Alias = statusQueryRow.TypeAlias, Name = statusQueryRow.TypeName, Id = statusQueryRow.TypeId, IsSystem = statusQueryRow.TypeIsSystem }; var revisionStatusType = FrameworkContext.TypeMappers.Map <RevisionStatusType>(nodeVersionStatusType); var revisionData = new RevisionData(changeset, (HiveId)statusQueryRow.VersionId, revisionStatusType) { UtcCreated = statusQueryRow.DateCreated, UtcModified = statusQueryRow.Date, UtcStatusChanged = statusQueryRow.Date }; otherRevisionData.Add(revisionData); } return(otherRevisionData); }
/// <summary> /// Save the results /// </summary> protected override void SaveOutput(Stream outputStream) { if (revisions.Count == 0) { Console.Error.WriteLine("No revisions to analyze..."); return; } if (!quiet) { Console.Error.Write("Writing resulting data..."); } using (StreamWriter writer = new StreamWriter(outputStream, Encoding.UTF8)) { RevisionData lastRevision = revisions[revisions.Count - 1]; switch (selectedOutputFormat) { case OutputFormat.Text: SaveOutputInText(writer, lastRevision); break; case OutputFormat.Html: SaveOutputInHtml(writer, lastRevision); break; } } if (!quiet) { Console.Error.WriteLine("OK"); } }
public RevisionData ParseRevision() { var hash = ReadHash(skip: 1); var revision = new RevisionData(hash); ParseRevisionData(revision, null); return(revision); }
private static void MapVersion(TypedEntity source, NodeVersion destination, Node masterNode, AbstractLookupHelper lookupHelper, AbstractMappingEngine masterMapper) { destination.DateCreated = source.UtcModified; destination.Node = masterNode; // First map the schema which will also map the attribute definitions var destinationSchema = MapAttributeSchemaDefinition(source.EntitySchema, lookupHelper, masterMapper); destination.AttributeSchemaDefinition = destinationSchema; //var rdbmsMappedAttributes = MapAttributes(source.Attributes, lookupHelper, masterMapper).ToList(); //foreach (var rdbmsMappedAttribute in rdbmsMappedAttributes) //{ // // Ensure attribute has the AttributeDefinition instance created when mapping // // the schema, to avoid saving duplicate definitions // var attribute = rdbmsMappedAttribute; // rdbmsMappedAttribute.AttributeDefinition = rdbmsSchema.AttributeDefinitions.Single(x => x.Alias == attribute.AttributeDefinition.Alias); //} MergeMapCollections( source.Attributes, destination.Attributes, (sourceTypedAttribute, dest) => MapAttribute(sourceTypedAttribute, dest, lookupHelper, masterMapper), sourceTypedAttribute => MapAttribute(sourceTypedAttribute, lookupHelper, masterMapper), (sourceAttrib, destAttrib) => { destAttrib.NodeVersion = destination; // Support inherited properties: if the attribute definition has a schema on it, it's from another schema var inheritedDef = sourceAttrib.AttributeDefinition as InheritedAttributeDefinition; if (inheritedDef != null) { var def = lookupHelper.Lookup <AttributeDefinition>(inheritedDef.Id); if (def != null) { destAttrib.AttributeDefinition = def; } } else { destAttrib.AttributeDefinition = destinationSchema.AttributeDefinitions.Single(x => x.Alias == sourceAttrib.AttributeDefinition.Alias); } }); //destination.Attributes.EnsureClearedWithProxy(); //rdbmsMappedAttributes.ForEach(x => // { // destination.Attributes.Add(x); // x.NodeVersion = destination; // }); var revisionData = new RevisionData(FixedStatusTypes.Created); var rdbmsStatusHistory = CreateRdbmsStatusHistory(destination, revisionData, lookupHelper, masterMapper); destination.NodeVersionStatuses.Add(rdbmsStatusHistory); }
/// <summary> /// Checks whether this instance contains the same text as another one /// </summary> /// <param name="obj">Another instance of this class</param> /// <returns><c>true</c> if <paramref name="obj"/> contains the same text as this instance, <c>false</c> otherwise</returns> /// <seealso cref="Object.Equals(object)"/> public override bool Equals(object obj) { RevisionData other = obj as RevisionData; if (other == null) { return(false); } return(Text.Equals(other.Text)); }
/// <summary> /// Executes the task. /// </summary> /// <returns>true if the execution was successful; otherwise, false.</returns> public override bool Execute() { // This task is only required if MSBuild does not generate the assembly info data. if (GenerateAssemblyInfo) { return(true); } logger = new TaskLogger(Log); logger.Trace("NetRevisionTask: PatchAssemblyInfo"); // Analyse working directory RevisionData data = Common.ProcessDirectory(ProjectDir, RequiredVcs, TagMatch, logger); if (!string.IsNullOrEmpty(RequiredVcs) && data.VcsProvider == null) { logger.Error($@"Required VCS ""{RequiredVcs}"" not present."); return(false); } if (string.IsNullOrEmpty(RevisionFormat)) { RevisionFormat = Common.GetRevisionFormat(ProjectDir, logger, true); } if (string.IsNullOrEmpty(RevisionFormat)) { RevisionFormat = data.GetDefaultRevisionFormat(logger); } var rf = new RevisionFormatter { RevisionData = data, RemoveTagV = RemoveTagV }; try { var aih = new AssemblyInfoHelper(ProjectDir, true, logger); SourceAssemblyInfo = aih.FileName; PatchedAssemblyInfo = aih.PatchFile( IntermediateOutputPath, rf, RevisionFormat, ResolveSimpleAttributes, ResolveInformationalAttribute, RevisionNumberOnly, ResolveCopyright, ShowRevision, ProcessAnyProperty); } catch (FormatException ex) { logger.Error(ex.Message); return(false); } return(true); }
public void ParseRevisionData(RevisionData rev, Dictionary <Hash, RevisionData> cache) { rev.TreeHash = ReadHash(skip: 1); rev.Parents = ReadRevisionParents(cache); rev.CommitDate = ReadUnixTimestampLine(); rev.CommitterName = ReadLine(); rev.CommitterEmail = ReadLine(); rev.AuthorDate = ReadUnixTimestampLine(); rev.AuthorName = ReadLine(); rev.AuthorEmail = ReadLine(); // Subject + Body int eoc = FindNullOrEndOfString(); int bodyStart; int subjectEnd = FindSeparatingEmptyLine(eoc, out bodyStart); if (subjectEnd == -1) { int eos = eoc - 1; char c = String[eos]; while ((c == '\r') || (c == '\n')) { c = String[--eos]; } if (eos > Position) { rev.Subject = ReadStringUpToNoAdvance(eos + 1); } else { rev.Subject = string.Empty; } rev.Body = string.Empty; } else { rev.Subject = ReadStringUpToNoAdvance(subjectEnd); Position = bodyStart; int eob = eoc - 1; char c = String[eob]; while ((c == '\r') || (c == '\n')) { c = String[--eob]; } if (eob > Position) { rev.Body = ReadStringUpToNoAdvance(eob + 1); } else { rev.Body = string.Empty; } } Position = eoc + 1; }
public static Revision CreateRevision(Repository repository, RevisionData revisionData) { Verify.Argument.IsNotNull(repository, "repository"); Verify.Argument.IsNotNull(revisionData, "revisionData"); var revisions = repository.Revisions; var revision = revisions.GetOrCreateRevision(revisionData.SHA1); var fields = revisionData.Fields; if (!revision.IsLoaded && (fields != RevisionField.SHA1)) { if ((fields & RevisionField.Subject) == RevisionField.Subject) { revision.Subject = revisionData.Subject; } if ((fields & RevisionField.Body) == RevisionField.Body) { revision.Body = revisionData.Body; } if ((fields & RevisionField.TreeHash) == RevisionField.TreeHash) { revision.TreeHash = revisionData.TreeHash; } if ((fields & RevisionField.Parents) == RevisionField.Parents) { foreach (var parentData in revisionData.Parents) { var parent = revisions.GetOrCreateRevision(parentData.SHA1); revision.Parents.AddInternal(parent); } } if ((fields & RevisionField.CommitDate) == RevisionField.CommitDate) { revision.CommitDate = revisionData.CommitDate; } if ((fields & (RevisionField.CommitterName | RevisionField.CommitterEmail)) == (RevisionField.CommitterName | RevisionField.CommitterEmail)) { revision.Committer = repository.Users.GetOrCreateUser(revisionData.CommitterName, revisionData.CommitterEmail); } if ((fields & RevisionField.AuthorDate) == RevisionField.AuthorDate) { revision.AuthorDate = revisionData.AuthorDate; } if ((fields & (RevisionField.AuthorName | RevisionField.AuthorEmail)) == (RevisionField.AuthorName | RevisionField.AuthorEmail)) { revision.Author = repository.Users.GetOrCreateUser(revisionData.AuthorName, revisionData.AuthorEmail); } revision.IsLoaded = true; } return(revision); }
/// <summary>Solves the filters defined in this class</summary> /// <param name="linkedTo">This parameter has the related instance to retrieve the requested instances</param> /// <param name="filters">This parameter has all the filters defined with this class</param> /// <param name="orderCriteria">This parameter has the name of the order criteria to add to SQL statement</param> /// <param name="startRowOID">This parameter has the OID necessary to start the search</param> /// <param name="blockSize">This parameter represents the number of instances to be returned</param> public override ONCollection QueryByFilter(ONLinkedToList linkedTo, ONFilterList filters, ONDisplaySet displaySet, string orderCriteria, ONOid startRowOID, int blockSize) { // OrderCriteria ONOrderCriteria lComparer = GetOrderCriteria(orderCriteria); // Horizontal visibility if (filters == null) { filters = new ONFilterList(); } filters.Add("HorizontalVisibility", new RevisionHorizontalVisibility()); // Linked To List if (linkedTo == null) { linkedTo = new ONLinkedToList(); } // Call Data try { RevisionData lData = new RevisionData(OnContext); ONCollection lCollection = lData.ExecuteQuery(linkedTo, filters, displaySet, lComparer, startRowOID, blockSize); // OrderCriteria if (lComparer != null) { lCollection.Sort(lComparer); } return(lCollection); } catch (Exception e) { if (e is ONException) { throw e; } else { string ltraceItem = "Error in query, Method: ExecuteQuery, Component: RevisionQuery"; if (e is ONSystemException) { ONSystemException lException = e as ONSystemException; lException.addTraceInformation(ltraceItem); throw lException; } throw new ONSystemException(e, ltraceItem); } } }
/// <summary> /// Creates a new revision and sets the status on the entity /// </summary> /// <param name="revision"></param> /// <param name="revisionStatusType"></param> public static Revision <T> CopyToNewRevision <T>(this Revision <T> revision, RevisionStatusType revisionStatusType) where T : class, IVersionableEntity { var r = new Revision <T>(revision.Item); //create the new revision var revisionData = new RevisionData { Id = HiveId.Empty, StatusType = revisionStatusType }; //set the new revision r.MetaData = revisionData; return(r); }
public RevisionCollection RevisionRoleHV() { RevisionData ldata = new RevisionData(OnContext); //Fix related instance ONLinkedToList lLinkedTo = new ONLinkedToList(); lLinkedTo["RevisionPasajero"] = Oid; ONFilterList lFilterList = new ONFilterList(); lFilterList.Add("HorizontalVisibility", new RevisionHorizontalVisibility()); //Execute return(ldata.ExecuteQuery(lLinkedTo, lFilterList, null, null, null, 1) as RevisionCollection); }
private static void ProcessDiff(RevisionData previousRevision, RevisionData thisRevision, List <LineInfo> lineInfo, int[] prevData, int[] thisData) { Diff.Item[] script = Diff.DiffInt(prevData, thisData); foreach (Diff.Item edit in script) { // Console.WriteLine("-{0}+{1} @{2}/{3}", edit.deletedA, edit.insertedB, edit.StartA, edit.StartB); if (edit.deletedA > 0) { lineInfo.RemoveRange(edit.StartB, edit.deletedA); } for (int i = 0; i < edit.insertedB; ++i) { lineInfo.Insert(edit.StartB + i, new LineInfo(thisRevision.Lines[edit.StartB + i], thisRevision)); } } }
private static IList <StashedStateData> ParseResult1(GitOutput output, Dictionary <Hash, RevisionData> cache) { int index = 0; var parser = new GitParser(output.Output); var res = new List <StashedStateData>(); while (!parser.IsAtEndOfString) { var sha1 = new Hash(parser.String, parser.Position); var rev = new RevisionData(sha1); parser.Skip(41); parser.ParseRevisionData(rev, cache); var state = new StashedStateData(index, rev); res.Add(state); ++index; } return(res); }
public void Delete_instanceServ( [ONInboundArgument("Clas_1348178542592347Ser_2Arg_1_Alias", RevisionClassText.Delete_instance_P_thisRevisarAeronaveArgumentAlias, "", "Clas_1348178542592347Ser_2_Alias", RevisionClassText.Delete_instanceServiceAlias, "Clas_1348178542592347_Alias", RevisionClassText.ClassAlias, AllowsNull = false)] RevisionOid p_thisRevisarAeronaveArg) { try { RevisionData lData = new RevisionData(OnContext); #region Cardinality check for role 'RevisionPasajero' // Minimum cardinality check (inverse) if (Instance.RevisionPasajeroRole.Count > 0) { throw new ONMinCardinalityException(null, RevisionPasajeroClassText.ClassAlias, "Clas_1348178673664478_Alias", RevisionPasajeroClassText.RevisionRoleAlias, "Agr_1348602167296276Rol_2_Alias", 1); } #endregion Cardinality check for role 'RevisionPasajero' // Delete relationships { RevisionData lDataRel = new RevisionData(OnContext); lDataRel.RevisionPasajeroRoleDelete(Instance.Oid); } // Delete instance lData.UpdateDeleted(Instance); } catch (Exception e) { if (e is ONException) { throw e; } else { string ltraceItem = "Definition class: Revision, Service: delete_instance, Component: RevisionAction, Method: Delete_instanceServ"; if (e is ONSystemException) { ONSystemException lException = e as ONSystemException; lException.addTraceInformation(ltraceItem); throw lException; } throw new ONSystemException(e, ltraceItem); } } }
public void ParseCommitParentsFromRaw(IEnumerable <RevisionData> revs, Dictionary <Hash, RevisionData> cache) { var parents = new List <RevisionData>(); foreach (var rev in revs) { parents.Clear(); int start = Position; int eoc = FindNullOrEndOfString(); SkipLine(); while (Position < eoc) { bool hasParents = false; while (CheckValue("parent ") && Position < eoc) { Skip(7); var p = ReadHash(); SkipLine(); RevisionData prd; if (cache != null) { if (!cache.TryGetValue(p, out prd)) { prd = new RevisionData(p); cache.Add(p, prd); } } else { prd = new RevisionData(p); } parents.Add(prd); hasParents = true; } SkipLine(); if (hasParents) { break; } } rev.Parents = parents.ToArray(); Position = eoc + 1; } }
/// <summary> /// When the Entity is a Revision, this checks the previous revisions committed during this transaction to see if the status has actually changed, /// if it determines that no previous entry exists in memory for this transaction, it will look up the entity in the index to see if the /// status has changed. It then sets the status changed date accordingly on the TypedEntity and in the index fields. /// </summary> /// <param name="op"></param> /// <param name="revisionsCommitted"></param> /// <returns>Returns a Tuple of the updated TypedEntity and RevisionData</returns> private Tuple <TypedEntity, RevisionData> EnsureCorrectStatusChangedDate(LinearHiveIndexOperation op, IEnumerable <Tuple <TypedEntity, RevisionData> > revisionsCommitted) { dynamic r = op.Entity; TypedEntity te = r.Item; RevisionData rd = r.MetaData; //find all previous TypedEntities in the committed list matching this one var previous = revisionsCommitted.Where(x => x.Item1.Id.Value.ToString() == te.Id.Value.ToString()) .OrderBy(x => x.Item1.UtcModified) .LastOrDefault(); SearchResult latestEntry = GetLatestEntry(r); if (previous == null && latestEntry != null && latestEntry.Fields.ContainsKey(FixedRevisionIndexFields.RevisionStatusId) && latestEntry.Fields[FixedRevisionIndexFields.RevisionStatusId] != rd.StatusType.Id.Value.ToString()) { //if there's nothing in memory but there's a previously saved entry with a different status id, then update the date te.UtcStatusChanged = rd.UtcCreated; op.Fields[FixedIndexedFields.UtcStatusChanged] = new ItemField(te.UtcStatusChanged.UtcDateTime) { DataType = FieldDataType.DateTime }; } else if (previous != null && previous.Item2.StatusType.Id.Value.ToString() != rd.StatusType.Id.Value.ToString()) { //its changed in memory so update the date te.UtcStatusChanged = rd.UtcCreated; op.Fields[FixedIndexedFields.UtcStatusChanged] = new ItemField(te.UtcStatusChanged.UtcDateTime) { DataType = FieldDataType.DateTime }; } else if (latestEntry != null) { //the status hasn't changed and the entity is not new, set to latest entries status changed te.UtcStatusChanged = ExamineHelper.FromExamineDateTime(latestEntry.Fields, FixedIndexedFields.UtcStatusChanged).Value; op.Fields[FixedIndexedFields.UtcStatusChanged] = new ItemField(te.UtcStatusChanged.UtcDateTime) { DataType = FieldDataType.DateTime }; } return(new Tuple <TypedEntity, RevisionData>(te, rd)); }
private static void DoDiffTest(string from, string to) { var lineInfo = new List <LineInfo>(); RevisionData firstRevision = new RevisionData(1, from, null, 1, DateTime.Now, "first!", false, lineInfo); foreach (string word in from.Split(' ')) { lineInfo.Add(new LineInfo(word, firstRevision)); } var secondLineInfo = new List <LineInfo>(lineInfo); RevisionData secondRevision = new RevisionData(2, to, null, 2, DateTime.Now, "second", false, secondLineInfo); // Console.WriteLine("--- diffing '{0}' and '{1}' ---", from, to); ProcessDiff(firstRevision, secondRevision, secondLineInfo, firstRevision.IntData, secondRevision.IntData); string[] words = to.Split(' '); Assert.AreEqual(words.Length, secondLineInfo.Count); for (int i = 0; i < words.Length; ++i) { Assert.AreEqual(words[i], secondLineInfo[i].Line.Trim(), "Difference at word {0}", i); } }
/// <summary> /// Locks the revision. /// </summary> /// <returns>The latest revision.</returns> public static int LockRevision(out ReadOnlyCollection <string> files, out List <byte[]> md5HashesResponseMessages) { int res = RevisionChecker.revision; if (sFiles == null) { List <string> tempList = new List <string>(); string revisionDir = Application.persistentDataPath + "/Revisions/" + res.ToString(); BuildFilesList(revisionDir, revisionDir.Length + 1, ref tempList); sFiles = tempList.AsReadOnly(); } if (sMD5HashesResponseMessages == null) { sMD5HashesResponseMessages = Server.BuildMD5HashesResponseMessages(); } RevisionData revisionData; if (!sRevisions.TryGetValue(res, out revisionData)) { // TODO: Need to lock folder revisionData = new RevisionData(); sRevisions.Add(res, revisionData); } ++revisionData.locks; files = sFiles; md5HashesResponseMessages = sMD5HashesResponseMessages; DebugEx.VerboseFormat("RevisionsCache.LockRevision() = {0}", res); return(res); }
public static void Main(string[] args) { var shs = new Service(args[0]).OpenStore(Guid.Parse(args[1])); //using (var rd = new BinaryReader(new BufferedStream(new FileStream(args[2], FileMode.Open, FileAccess.Read)))) { RevisionData info = new RevisionData(args[1]); using (var rd = new StreamReader(new BufferedStream(new FileStream(args[2], FileMode.Open, FileAccess.Read)))) { DateTime d1 = Convert.ToDateTime(args[3]); DateTime d2 = Convert.ToDateTime(args[4]); try { int queryId = Int32.Parse(rd.ReadLine()); int numUrls = Int32.Parse(rd.ReadLine()); var urls = new string[numUrls]; for (int i = 0; i < numUrls; i++) { urls[i] = rd.ReadLine(); } var sw = Stopwatch.StartNew(); var uids = shs.BatchedUrlToUid(urls); var tbl = new UidMap(uids, true); long[] bwdUids = tbl; var bwdLinks = shs.BatchedSampleLinks(bwdUids, Dir.Bwd, bs, true); SortedDictionary <string, long> temp = new SortedDictionary <string, long>(); for (int i = 0; i < bwdUids.Length; i++) { var bwdValidateUids = shs.BatchedSampleLinks(bwdLinks[i], Dir.Fwd, fs, true); for (int j = 0; j < bwdValidateUids.Length; j++) { string[] validateUrls = shs.BatchedUidToUrl(bwdValidateUids[j]); temp = info.getOutlinkInDuration(bwdLinks[i][j], bwdValidateUids[j], validateUrls, d1, d2); if (temp.ContainsValue(bwdUids[i])) { } } var bwdValidateUrls = shs.BatchedUidToUrl(bwdLinks[i]); //info.getInlinkInDuration(bwdUids[i], bwdLinks[i], ) } var fwdUids = shs.BatchedSampleLinks(tbl, Dir.Fwd, fs, true); var fwdUrls = shs.BatchedUidToUrl(tbl); foreach (long[] x in bwdLinks) { tbl.Add(x); } foreach (long[] x in fwdUids) { tbl.Add(x); } long[] srcUids = tbl; string[] return_urls = shs.BatchedUidToUrl(srcUids); //Console.Error.WriteLine("Length in Archive {0}", tbl.GetSize()); //var extTbl = tbl.Subtract(new UidMap(uids, true)); //Console.Error.WriteLine("Length in Archive {0}", extTbl.GetSize()); //long one_hope_retrieval_time = sw.ElapsedTicks; //Console.WriteLine("Retrieve 1-hops nodes: {0} from {1} root_nodes in {2} microseconds", srcUids.Length, uids.Length, one_hope_retrieval_time / 10); //sw = Stopwatch.StartNew(); var dstUids = shs.BatchedGetLinks(srcUids, Dir.Fwd); //long forward_link_of_one_hop = sw.ElapsedTicks; SortedDictionary <long, KeyValuePair <double, double> > return_score = computeHITS(tbl, srcUids, dstUids); //long[] extUids = extTbl; //var extUrls = shs.BatchedUidToUrl(extUids); long end_time = sw.ElapsedTicks; Console.WriteLine("HITS finish in {0} microseconds with {1} links", end_time / 10, tbl.GetSize()); /* * int menu = 0; * * while ((menu = Int32.Parse(Console.ReadLine())) > 0) * { * try { * Console.WriteLine("You choose {0}.", menu); * switch (menu) * { * case 1: * Console.Error.WriteLine("Num of URLs: {0}", tbl.GetSize()); * tbl.PrintList(); * break; * case 2: * Console.Error.WriteLine("Num of extend URLs: {0}", extTbl.GetSize()); * extTbl.PrintList(); * break; * case 3: * for (int i = 0; i < uids.Length; i++) * { * if (uids[i] > -1) * { * int idx = tbl[uids[i]]; * Console.WriteLine("{0}\t{1}\t{2}", aut[idx], hub[idx], urls[i]); * } * } * break; * case 4: * Console.Error.WriteLine("Num of extend URLs: {0}", extUids.Length); * for (int i = 0; i < extUrls.Length; i++) * { * if (extUids[i] > -1) * { * int idx = tbl[extUids[i]]; * Console.WriteLine("{0}\t{1}\t{2}", aut[idx], hub[idx], extUrls[i]); * } * } * break; * case 5: * Console.Error.WriteLine("Num of UIDS: {0}", uids.Length); * for (int i = 0; i < uids.Length; i++) * { * Console.WriteLine("{0}", uids[i]); * } * break; * case 6: * Console.Error.WriteLine("Mapping UID to URL"); * long uid = Int64.Parse(Console.ReadLine()); * Console.WriteLine("{0}", shs.UidToUrl(uid)); * break; * case 7: * Console.Error.WriteLine("Mapping URL to UID"); * string url = Console.ReadLine(); * Console.WriteLine("{0}", shs.UrlToUid(url)); * break; * default: * Console.WriteLine("What?"); * break; * } * } * catch (Exception ex) * { * Console.Error.WriteLine(ex.ToString()); * } * } * */ //Output the result scores to screen var result_urls = shs.BatchedUidToUrl(srcUids); for (int i = 0; i < srcUids.Length; i++) { if (return_score.ContainsKey(srcUids[i])) { KeyValuePair <double, double> score = new KeyValuePair <double, double>(); return_score.TryGetValue(srcUids[i], out score); Console.WriteLine("{0}\t{1}\t{2}", score.Key, score.Value, result_urls[i]); } } //long end_time = sw.ElapsedTicks; //Console.WriteLine("SALSA finish in {0} microseconds", end_time / 10); //for (int i = 0; i < scores.Length; i++) //{ // Console.WriteLine("{0}: {1}", urls[i], scores[i]); //} } catch (EndOfStreamException) { } } }
public RevisionInstance Create_instanceServ( [ONInboundArgument("Clas_1348178542592347Ser_1Arg_1_Alias", RevisionClassText.Create_instance_P_atrid_RevisarAeronaveArgumentAlias, "autonumeric", "Clas_1348178542592347Ser_1_Alias", RevisionClassText.Create_instanceServiceAlias, "Clas_1348178542592347_Alias", RevisionClassText.ClassAlias, AllowsNull = false)] ONInt p_atrid_RevisarAeronaveArg, [ONInboundArgument("Clas_1348178542592347Ser_1Arg_3_Alias", RevisionClassText.Create_instance_P_atrFechaRevisionArgumentAlias, "date", "Clas_1348178542592347Ser_1_Alias", RevisionClassText.Create_instanceServiceAlias, "Clas_1348178542592347_Alias", RevisionClassText.ClassAlias, AllowsNull = false)] ONDate p_atrFechaRevisionArg, [ONInboundArgument("Clas_1348178542592347Ser_1Arg_4_Alias", RevisionClassText.Create_instance_P_atrNombreRevisorArgumentAlias, "string", "Clas_1348178542592347Ser_1_Alias", RevisionClassText.Create_instanceServiceAlias, "Clas_1348178542592347_Alias", RevisionClassText.ClassAlias, Length = 100, AllowsNull = false)] ONString p_atrNombreRevisorArg, [ONInboundArgument("Clas_1348178542592347Ser_1Arg_5_Alias", RevisionClassText.Create_instance_P_atrId_AeronaveArgumentAlias, "string", "Clas_1348178542592347Ser_1_Alias", RevisionClassText.Create_instanceServiceAlias, "Clas_1348178542592347_Alias", RevisionClassText.ClassAlias, Length = 100, AllowsNull = false)] ONString p_atrId_AeronaveArg) { try { RevisionData lData = new RevisionData(OnContext); #region Construct OID Instance.Oid = new RevisionOid(); Instance.Oid.Id_RevisarAeronaveAttr = new ONInt(p_atrid_RevisarAeronaveArg); #endregion Construct OID #region Argument initialization 'p_atrFechaRevision' (FechaRevision) Instance.FechaRevisionAttr = new ONDate(p_atrFechaRevisionArg); #endregion Argument initialization 'p_atrFechaRevision' (FechaRevision) #region Argument initialization 'p_atrNombreRevisor' (NombreRevisor) Instance.NombreRevisorAttr = new ONString(p_atrNombreRevisorArg); #endregion Argument initialization 'p_atrNombreRevisor' (NombreRevisor) #region Argument initialization 'p_atrId_Aeronave' (Id_Aeronave) Instance.Id_AeronaveAttr = new ONString(p_atrId_AeronaveArg); #endregion Argument initialization 'p_atrId_Aeronave' (Id_Aeronave) #region Autonumeric attribute 'id_RevisarAeronave' if (Instance.Id_RevisarAeronaveAttr < new ONInt(0)) { RevisionData lAutonumericData = new RevisionData(OnContext); lAutonumericData.ClassName = "Revision"; //Get Autonumeric Instance.Oid.Id_RevisarAeronaveAttr = lAutonumericData.GetAutonumericid_RevisarAeronave(); } #endregion Autonumeric attribute 'id_RevisarAeronave' //Search if instance exists if (lData.Exist(Instance.Oid, null)) { throw new ONInstanceExistException(null, "Clas_1348178542592347_Alias", RevisionClassText.ClassAlias); } //Update the new instance lData.UpdateAdded(Instance); } catch (Exception e) { if (e is ONException) { throw e; } else { string ltraceItem = "Definition class: Revision, Service: create_instance, Component: RevisionAction, Method: Create_instanceServ"; if (e is ONSystemException) { ONSystemException lException = e as ONSystemException; lException.addTraceInformation(ltraceItem); throw lException; } throw new ONSystemException(e, ltraceItem); } } return(Instance); }
public RevisionData ProcessDirectory(string path) { // Initialise data RevisionData data = new RevisionData { VcsProvider = this }; // Queries the commit hash and time from the latest log entry string gitLogFormat = "%H %ci %ai%n%cN%n%cE%n%aN%n%aE"; Program.ShowDebugMessage("Executing: git log -n 1 --format=format:\"" + gitLogFormat + "\""); Program.ShowDebugMessage(" WorkingDirectory: " + path); ProcessStartInfo psi = new ProcessStartInfo(gitExec, "log -n 1 --format=format:\"" + gitLogFormat + "\""); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; Process p = Process.Start(psi); string line = null; int lineCount = 0; while (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); lineCount++; Program.ShowDebugMessage(line, 4); if (lineCount == 1) { Match m = Regex.Match(line, @"^([0-9a-fA-F]{40}) ([0-9-]{10} [0-9:]{8} [0-9+-]{5}) ([0-9-]{10} [0-9:]{8} [0-9+-]{5})"); if (m.Success) { data.CommitHash = m.Groups[1].Value; data.CommitTime = DateTimeOffset.Parse(m.Groups[2].Value, CultureInfo.InvariantCulture); data.AuthorTime = DateTimeOffset.Parse(m.Groups[3].Value, CultureInfo.InvariantCulture); } } else if (lineCount == 2) { data.CommitterName = line.Trim(); } else if (lineCount == 3) { data.CommitterEMail = line.Trim(); } else if (lineCount == 4) { data.AuthorName = line.Trim(); } else if (lineCount == 5) { data.AuthorEMail = line.Trim(); } } if (!p.WaitForExit(1000)) { p.Kill(); } if (!string.IsNullOrEmpty(data.CommitHash)) { // Query the working directory state Program.ShowDebugMessage("Executing: git status --porcelain"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "status --porcelain"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; while (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); } if (!p.WaitForExit(1000)) { p.Kill(); } data.IsModified = !string.IsNullOrEmpty(line); // Query the current branch Program.ShowDebugMessage("Executing: git rev-parse --abbrev-ref HEAD"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "rev-parse --abbrev-ref HEAD"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; if (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); data.Branch = line.Trim(); } p.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!p.WaitForExit(1000)) { p.Kill(); } if ((data.Branch == "HEAD" || data.Branch.StartsWith("heads/")) && Environment.GetEnvironmentVariable("CI_SERVER") == "yes") { // GitLab runner uses detached HEAD so the normal Git command will always return // "HEAD" instead of the actual branch name. // "HEAD" is reported by default with GitLab CI runner. // "heads/*" is reported if an explicit 'git checkout -B' command has been issued. // Use GitLab CI provided environment variables instead if the available data is // plausible. if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("CI_BUILD_REF_NAME")) && string.IsNullOrEmpty(Environment.GetEnvironmentVariable("CI_BUILD_TAG"))) { Program.ShowDebugMessage("Reading branch name from CI environment variable"); data.Branch = Environment.GetEnvironmentVariable("CI_BUILD_REF_NAME"); } else { Program.ShowDebugMessage("No branch name available in CI environment"); data.Branch = ""; } } // Query the most recent matching tag if (!string.IsNullOrWhiteSpace(Program.TagMatch) || Program.TagMatch == null) { string tagMatchOption = ""; if (Program.TagMatch != null) { tagMatchOption = " --match \"" + Program.TagMatch + "\""; } Program.ShowDebugMessage("Executing: git describe --tags --first-parent --long" + tagMatchOption); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "describe --tags --first-parent --long" + tagMatchOption); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.RedirectStandardError = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; if (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); line = line.Trim(); Match m = Regex.Match(line, @"^(.*)-([0-9]+)-g[0-9a-fA-F]+$"); if (m.Success) { data.Tag = m.Groups[1].Value.Trim(); data.CommitsAfterTag = int.Parse(m.Groups[2].Value, CultureInfo.InvariantCulture); } } p.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!p.WaitForExit(1000)) { p.Kill(); } } // Query the linear revision number of the current branch (first parent) Program.ShowDebugMessage("Executing: git rev-list --first-parent --count HEAD"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "rev-list --first-parent --count HEAD"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; if (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); int revNum; if (int.TryParse(line.Trim(), out revNum)) { data.RevisionNumber = revNum; } else { Program.ShowDebugMessage("Revision count could not be parsed", 2); } } p.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!p.WaitForExit(1000)) { p.Kill(); } } return data; }
public RevisionData ProcessDirectory(string path, string tagMatch) { // Initialise data var data = new RevisionData { VcsProvider = this }; // svn assumes case-sensitive path names on Windows, which is... bad. string fixedPath = PathUtil.GetExactPath(path); if (fixedPath != path) { Logger?.Warning($"Corrected path to: {fixedPath}"); } path = fixedPath; // Get revision number Logger?.Trace("Executing: svnversion"); Logger?.Trace($" WorkingDirectory: {path}"); var psi = new ProcessStartInfo(svnversionExec) { WorkingDirectory = path, RedirectStandardOutput = true, UseShellExecute = false, CreateNoWindow = true }; var process = Process.Start(psi); string line; while (!process.StandardOutput.EndOfStream) { line = process.StandardOutput.ReadLine(); Logger?.RawOutput(line); // Possible output: // 1234 Revision 1234 // 1100:1234 Mixed revisions 1100 to 1234 // 1234M Revision 1234, modified // 1100:1234MP Mixed revisions 1100 to 1234, modified and partial Match m = Regex.Match(line, @"^([0-9]+:)?([0-9]+)"); if (m.Success) { data.IsMixed = m.Groups[1].Success; data.RevisionNumber = int.Parse(m.Groups[2].Value, CultureInfo.InvariantCulture); break; } } process.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!process.WaitForExit(1000)) { process.Kill(); } if (data.RevisionNumber == 0) { return(data); // Try no more } Logger?.Trace("Executing: svn status"); Logger?.Trace($" WorkingDirectory: {path}"); psi = new ProcessStartInfo(svnExec, "status") { WorkingDirectory = path, RedirectStandardOutput = true, UseShellExecute = false, CreateNoWindow = true }; process = Process.Start(psi); line = null; while (!process.StandardOutput.EndOfStream) { line = process.StandardOutput.ReadLine(); Logger?.RawOutput(line); } if (!process.WaitForExit(1000)) { process.Kill(); } data.IsModified = !string.IsNullOrEmpty(line); Logger?.Trace($"Executing: svn info --revision {data.RevisionNumber}"); Logger?.Trace($" WorkingDirectory: {path}"); psi = new ProcessStartInfo(svnExec, $"info --revision {data.RevisionNumber}") { WorkingDirectory = path, RedirectStandardOutput = true, UseShellExecute = false, CreateNoWindow = true, //StandardOutputEncoding = Encoding.Default // TODO: Test if it's necessary (Encoding.Default is not supported in .NET Standard 1.6) }; process = Process.Start(psi); string workingCopyRootPath = null; while (!process.StandardOutput.EndOfStream) { line = process.StandardOutput.ReadLine(); Logger?.RawOutput(line); // WARNING: This is the info about the commit that has been *last updated to* in the // specified *subdirectory* of the working directory. The revision number // printed here belongs to that commit, but does not necessarily match the // revision number determined above by 'svnversion'. // If you need consistent data on the commit other than the revision // number, be sure to always update the entire working directory and set // the VCS path to its root. Match m = Regex.Match(line, @"^Working Copy Root Path: (.+)"); if (m.Success) { workingCopyRootPath = m.Groups[1].Value.Trim(); } // Try to be smart and detect the branch from the relative path. This should work // fine if the standard SVN repository tree is used. m = Regex.Match(line, @"^Relative URL: \^(.+)"); if (m.Success) { data.Branch = m.Groups[1].Value.Trim().TrimStart('/'); if (data.Branch.StartsWith("branches/", StringComparison.Ordinal)) { data.Branch = data.Branch.Substring(9); } // Cut off the current subdirectory if (workingCopyRootPath != null && path.StartsWith(workingCopyRootPath, StringComparison.OrdinalIgnoreCase)) { int subdirLength = path.Length - workingCopyRootPath.Length; data.Branch = data.Branch.Substring(0, data.Branch.Length - subdirLength); } } // Use "Repository Root" because "URL" is only the URL where the working directory // was checked out from. This can be a subdirectory of the repository if only a part // of it was checked out, like "/trunk" or a branch. m = Regex.Match(line, @"^Repository Root: (.+)"); if (m.Success) { data.RepositoryUrl = m.Groups[1].Value.Trim(); } m = Regex.Match(line, @"^Last Changed Author: (.+)"); if (m.Success) { data.CommitterName = m.Groups[1].Value.Trim(); } m = Regex.Match(line, @"^Last Changed Date: ([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2} [+-][0-9]{4})"); if (m.Success) { data.CommitTime = DateTimeOffset.Parse(m.Groups[1].Value, CultureInfo.InvariantCulture); } } if (!process.WaitForExit(1000)) { process.Kill(); } return(data); }
private RevisionData[] ReadRevisionParents(Dictionary<Hash, RevisionData> cache) { int end = FindNewLineOrEndOfString(); int numParents = (end - Position + 1) / 41; var parents = new RevisionData[numParents]; if(numParents == 0) { Position = end + 1; } else { for(int i = 0; i < numParents; ++i) { var sha1 = ReadHash(skip: 1); if(cache == null) { parents[i] = new RevisionData(sha1); } else { if(!cache.TryGetValue(sha1, out parents[i])) { parents[i] = new RevisionData(sha1); cache.Add(sha1, parents[i]); } } } } return parents; }
public RevisionData ParseSingleRevision(QueryRevisionParameters parameters, GitOutput output) { Assert.IsNotNull(parameters); Assert.IsNotNull(output); if(output.ExitCode != 0) { var revName = parameters.SHA1.ToString(); if(IsUnknownRevisionError(output.Error, revName)) { throw new UnknownRevisionException(revName); } output.Throw(); } var parser = new GitParser(output.Output); var rev = new RevisionData(parameters.SHA1); parser.ParseRevisionData(rev, null); return rev; }
public ReferencesData ParseReferences(QueryReferencesParameters parameters, GitOutput output) { Assert.IsNotNull(parameters); Assert.IsNotNull(output); var refTypes = parameters.ReferenceTypes; bool needHeads = (refTypes & ReferenceType.LocalBranch) == ReferenceType.LocalBranch; bool needRemotes = (refTypes & ReferenceType.RemoteBranch) == ReferenceType.RemoteBranch; bool needTags = (refTypes & ReferenceType.Tag) == ReferenceType.Tag; bool needStash = (refTypes & ReferenceType.Stash) == ReferenceType.Stash; var heads = needHeads ? new List<BranchData>() : null; var remotes = needRemotes ? new List<BranchData>() : null; var tags = needTags ? new List<TagData>() : null; RevisionData stash = null; bool encounteredRemoteBranch = false; bool encounteredStash = false; bool encounteredTag = false; var refs = output.Output; int pos = 0; int l = refs.Length; while(pos < l) { var hash = new Hash(refs, pos); pos += 41; var end = refs.IndexOf('\n', pos); if(end == -1) end = l; if(!encounteredRemoteBranch && StringUtility.CheckValue(refs, pos, GitConstants.LocalBranchPrefix)) { if(needHeads) { pos += GitConstants.LocalBranchPrefix.Length; var name = refs.Substring(pos, end - pos); var branch = new BranchData(name, hash, false, false, false); heads.Add(branch); } } else if(!encounteredStash && StringUtility.CheckValue(refs, pos, GitConstants.RemoteBranchPrefix)) { encounteredRemoteBranch = true; if(needRemotes) { pos += GitConstants.RemoteBranchPrefix.Length; var name = refs.Substring(pos, end - pos); if(!name.EndsWith("/HEAD")) { var branch = new BranchData(name, hash, false, true, false); remotes.Add(branch); } } } else if(!encounteredTag && !encounteredStash && StringUtility.CheckValue(refs, pos, GitConstants.StashFullName)) { encounteredRemoteBranch = true; encounteredStash = true; if(needStash) { stash = new RevisionData(hash); } } else if(StringUtility.CheckValue(refs, pos, GitConstants.TagPrefix)) { encounteredRemoteBranch = true; encounteredStash = true; encounteredTag = true; if(needTags) { pos += GitConstants.TagPrefix.Length; var name = refs.Substring(pos, end - pos); var type = TagType.Lightweight; if(end < l - 1) { int s2 = end + 1; int pos2 = s2 + 41 + GitConstants.TagPrefix.Length; var end2 = refs.IndexOf('\n', pos2); if(end2 == -1) end2 = l; if(end2 - pos2 == end - pos + 3) { if(StringUtility.CheckValue(refs, pos2, name) && StringUtility.CheckValue(refs, pos2 + name.Length, GitConstants.DereferencedTagPostfix)) { type = TagType.Annotated; hash = new Hash(refs, s2); end = end2; } } } var tag = new TagData(name, hash, type); tags.Add(tag); } else break; } pos = end + 1; } return new ReferencesData(heads, remotes, tags, stash); }
public RevisionData ProcessDirectory(string path) { // Initialise data RevisionData data = new RevisionData { VcsProvider = this }; // Queries the commit hash and time from the latest log entry string gitLogFormat = "%H %ci %ai%n%cN%n%cE%n%aN%n%aE"; Program.ShowDebugMessage("Executing: git log -n 1 --format=format:\"" + gitLogFormat + "\""); Program.ShowDebugMessage(" WorkingDirectory: " + path); ProcessStartInfo psi = new ProcessStartInfo(gitExec, "log -n 1 --format=format:\"" + gitLogFormat + "\""); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; Process p = Process.Start(psi); string line = null; int lineCount = 0; while (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); lineCount++; Program.ShowDebugMessage(line, 4); if (lineCount == 1) { Match m = Regex.Match(line, @"^([0-9a-fA-F]{40}) ([0-9-]{10} [0-9:]{8} [0-9+-]{5}) ([0-9-]{10} [0-9:]{8} [0-9+-]{5})"); if (m.Success) { data.CommitHash = m.Groups[1].Value; data.CommitTime = DateTimeOffset.Parse(m.Groups[2].Value, CultureInfo.InvariantCulture); data.AuthorTime = DateTimeOffset.Parse(m.Groups[3].Value, CultureInfo.InvariantCulture); } } else if (lineCount == 2) { data.CommitterName = line.Trim(); } else if (lineCount == 3) { data.CommitterEMail = line.Trim(); } else if (lineCount == 4) { data.AuthorName = line.Trim(); } else if (lineCount == 5) { data.AuthorEMail = line.Trim(); } } if (!p.WaitForExit(1000)) { p.Kill(); } if (!string.IsNullOrEmpty(data.CommitHash)) { // Query the working directory state Program.ShowDebugMessage("Executing: git status --porcelain"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "status --porcelain"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; while (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); } if (!p.WaitForExit(1000)) { p.Kill(); } data.IsModified = !string.IsNullOrEmpty(line); // Query the current branch Program.ShowDebugMessage("Executing: git rev-parse --abbrev-ref HEAD"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "rev-parse --abbrev-ref HEAD"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; if (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); data.Branch = line.Trim(); } p.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!p.WaitForExit(1000)) { p.Kill(); } // Query the most recent matching tag if (!string.IsNullOrWhiteSpace(Program.TagMatch) || Program.TagMatch == null) { string tagMatchOption = ""; if (Program.TagMatch != null) { tagMatchOption = " --match \"" + Program.TagMatch + "\""; } Program.ShowDebugMessage("Executing: git describe --tags --first-parent --long" + tagMatchOption); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "describe --tags --first-parent --long" + tagMatchOption); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.RedirectStandardError = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; if (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); line = line.Trim(); Match m = Regex.Match(line, @"^(.*)-([0-9]+)-g[0-9a-fA-F]+$"); if (m.Success) { data.Tag = m.Groups[1].Value.Trim(); data.CommitsAfterTag = int.Parse(m.Groups[2].Value, CultureInfo.InvariantCulture); } } p.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!p.WaitForExit(1000)) { p.Kill(); } } // Query the linear revision number of the current branch (first parent) Program.ShowDebugMessage("Executing: git rev-list --first-parent --count HEAD"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "rev-list --first-parent --count HEAD"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; if (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); int revNum; if (int.TryParse(line.Trim(), out revNum)) { data.RevisionNumber = revNum; } else { Program.ShowDebugMessage("Revision count could not be parsed", 2); } } p.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!p.WaitForExit(1000)) { p.Kill(); } } return(data); }
public RevisionData ProcessDirectory(string path) { // Initialise data RevisionData data = new RevisionData { VcsProvider = this }; // Queries the commit hash and time from the latest log entry string gitLogFormat = "%H %ci %ai%n%cN%n%cE%n%aN%n%aE"; Program.ShowDebugMessage("Executing: git log -n 1 --format=format:\"" + gitLogFormat + "\""); Program.ShowDebugMessage(" WorkingDirectory: " + path); ProcessStartInfo psi = new ProcessStartInfo(gitExec, "log -n 1 --format=format:\"" + gitLogFormat + "\""); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; Process p = Process.Start(psi); string line = null; int lineCount = 0; while (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); lineCount++; Program.ShowDebugMessage(line, 4); if (lineCount == 1) { Match m = Regex.Match(line, @"^([0-9a-fA-F]{40}) ([0-9-]{10} [0-9:]{8} [0-9+-]{5}) ([0-9-]{10} [0-9:]{8} [0-9+-]{5})"); if (m.Success) { data.CommitHash = m.Groups[1].Value; data.CommitTime = DateTimeOffset.Parse(m.Groups[2].Value); data.AuthorTime = DateTimeOffset.Parse(m.Groups[3].Value); } } else if (lineCount == 2) { data.CommitterName = line.Trim(); } else if (lineCount == 3) { data.CommitterEMail = line.Trim(); } else if (lineCount == 4) { data.AuthorName = line.Trim(); } else if (lineCount == 5) { data.AuthorEMail = line.Trim(); } } if (!p.WaitForExit(1000)) { p.Kill(); } //Try to get tag info for current commit if (string.IsNullOrEmpty(data.Tag)) { Program.ShowDebugMessage("Executing: git describe"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "describe"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; while (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); if (!string.IsNullOrEmpty(line)) data.Tag = line; } if (!p.WaitForExit(1000)) { p.Kill(); } data.IsModified = !string.IsNullOrEmpty(line); } if (!string.IsNullOrEmpty(data.CommitHash)) { // Query the working directory state Program.ShowDebugMessage("Executing: git status --porcelain"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "status --porcelain"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; while (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); } if (!p.WaitForExit(1000)) { p.Kill(); } data.IsModified = !string.IsNullOrEmpty(line); // Query the current branch Program.ShowDebugMessage("Executing: git rev-parse --abbrev-ref HEAD"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "rev-parse --abbrev-ref HEAD"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; if (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); data.Branch = line.Trim(); } p.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!p.WaitForExit(1000)) { p.Kill(); } // Query the linear revision number of the current branch (first parent) Program.ShowDebugMessage("Executing: git rev-list --first-parent --count HEAD"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "rev-list --first-parent --count HEAD"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; if (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); int revNum; if (int.TryParse(line.Trim(), out revNum)) { data.RevisionNumber = revNum; } else { Program.ShowDebugMessage("Revision count could not be parsed", 2); } } p.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!p.WaitForExit(1000)) { p.Kill(); } } return data; }
private static NodeVersionStatusHistory CreateRdbmsStatusHistory(NodeVersion destination, RevisionData revisionData, AbstractLookupHelper lookupHelper, AbstractMappingEngine masterMapper) { return(new NodeVersionStatusHistory() { Date = revisionData.UtcStatusChanged, NodeVersion = destination, NodeVersionStatusType = MapStatusType(revisionData.StatusType, lookupHelper, masterMapper) }); }
public void ParseRevisionData(RevisionData rev, Dictionary<Hash, RevisionData> cache) { rev.TreeHash = ReadHash(skip: 1); rev.Parents = ReadRevisionParents(cache); rev.CommitDate = ReadUnixTimestampLine(); rev.CommitterName = ReadLine(); rev.CommitterEmail = ReadLine(); rev.AuthorDate = ReadUnixTimestampLine(); rev.AuthorName = ReadLine(); rev.AuthorEmail = ReadLine(); // Subject + Body int eoc = FindNullOrEndOfString(); int bodyStart; int subjectEnd = FindSeparatingEmptyLine(eoc, out bodyStart); if(subjectEnd == -1) { int eos = eoc - 1; char c = String[eos]; while((c == '\r') || (c == '\n')) { c = String[--eos]; } if(eos > Position) { rev.Subject = ReadStringUpToNoAdvance(eos + 1); } else { rev.Subject = string.Empty; } rev.Body = string.Empty; } else { rev.Subject = ReadStringUpToNoAdvance(subjectEnd); Position = bodyStart; int eob = eoc - 1; char c = String[eob]; while((c == '\r') || (c == '\n')) { c = String[--eob]; } if(eob > Position) { rev.Body = ReadStringUpToNoAdvance(eob + 1); } else { rev.Body = string.Empty; } } Position = eoc + 1; }
public void ParseCommitParentsFromRaw(IEnumerable<RevisionData> revs, Dictionary<Hash, RevisionData> cache) { var parents = new List<RevisionData>(); foreach(var rev in revs) { parents.Clear(); int start = Position; int eoc = FindNullOrEndOfString(); SkipLine(); while(Position < eoc) { bool hasParents = false; while(CheckValue("parent ") && Position < eoc) { Skip(7); var p = ReadHash(); SkipLine(); RevisionData prd; if(cache != null) { if(!cache.TryGetValue(p, out prd)) { prd = new RevisionData(p); cache.Add(p, prd); } } else { prd = new RevisionData(p); } parents.Add(prd); hasParents = true; } SkipLine(); if(hasParents) break; } rev.Parents = parents.ToArray(); Position = eoc + 1; } }
public RevisionData ParseRevision() { var hash = ReadHash(skip: 1); var revision = new RevisionData(hash); ParseRevisionData(revision, null); return revision; }
public RevisionData ProcessDirectory(string path) { // Initialise data RevisionData data = new RevisionData { VcsProvider = this }; // Queries the commit hash and time from the latest log entry string gitLogFormat = "%H %ci %ai%n%cN%n%cE%n%aN%n%aE"; Program.ShowDebugMessage("Executing: git log -n 1 --format=format:\"" + gitLogFormat + "\""); Program.ShowDebugMessage(" WorkingDirectory: " + path); ProcessStartInfo psi = new ProcessStartInfo(gitExec, "log -n 1 --format=format:\"" + gitLogFormat + "\""); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; Process p = Process.Start(psi); string line = null; int lineCount = 0; while (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); lineCount++; Program.ShowDebugMessage(line, 4); if (lineCount == 1) { Match m = Regex.Match(line, @"^([0-9a-fA-F]{40}) ([0-9-]{10} [0-9:]{8} [0-9+-]{5}) ([0-9-]{10} [0-9:]{8} [0-9+-]{5})"); if (m.Success) { data.CommitHash = m.Groups[1].Value; data.CommitTime = DateTimeOffset.Parse(m.Groups[2].Value, CultureInfo.InvariantCulture); data.AuthorTime = DateTimeOffset.Parse(m.Groups[3].Value, CultureInfo.InvariantCulture); } } else if (lineCount == 2) { data.CommitterName = line.Trim(); } else if (lineCount == 3) { data.CommitterEMail = line.Trim(); } else if (lineCount == 4) { data.AuthorName = line.Trim(); } else if (lineCount == 5) { data.AuthorEMail = line.Trim(); } } if (!p.WaitForExit(1000)) { p.Kill(); } if (!string.IsNullOrEmpty(data.CommitHash)) { // Query the working directory state Program.ShowDebugMessage("Executing: git status --porcelain"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "status --porcelain"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; while (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); } if (!p.WaitForExit(1000)) { p.Kill(); } data.IsModified = !string.IsNullOrEmpty(line); // Query the current branch Program.ShowDebugMessage("Executing: git rev-parse --abbrev-ref HEAD"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "rev-parse --abbrev-ref HEAD"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; if (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); data.Branch = line.Trim(); } p.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!p.WaitForExit(1000)) { p.Kill(); } if ((data.Branch == "HEAD" || data.Branch.StartsWith("heads/")) && Environment.GetEnvironmentVariable("CI_SERVER") == "yes") { // GitLab runner uses detached HEAD so the normal Git command will always return // "HEAD" instead of the actual branch name. // "HEAD" is reported by default with GitLab CI runner. // "heads/*" is reported if an explicit 'git checkout -B' command has been issued. // Use GitLab CI provided environment variables instead if the available data is // plausible. if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("CI_BUILD_REF_NAME")) && string.IsNullOrEmpty(Environment.GetEnvironmentVariable("CI_BUILD_TAG"))) { Program.ShowDebugMessage("Reading branch name from CI environment variable"); data.Branch = Environment.GetEnvironmentVariable("CI_BUILD_REF_NAME"); } else { Program.ShowDebugMessage("No branch name available in CI environment"); data.Branch = ""; } } // Query the most recent matching tag if (!string.IsNullOrWhiteSpace(Program.TagMatch) || Program.TagMatch == null) { string tagMatchOption = ""; if (Program.TagMatch != null) { tagMatchOption = " --match \"" + Program.TagMatch + "\""; } Program.ShowDebugMessage("Executing: git describe --tags --first-parent --long" + tagMatchOption); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "describe --tags --first-parent --long" + tagMatchOption); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.RedirectStandardError = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; if (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); line = line.Trim(); Match m = Regex.Match(line, @"^(.*)-([0-9]+)-g[0-9a-fA-F]+$"); if (m.Success) { data.Tag = m.Groups[1].Value.Trim(); data.CommitsAfterTag = int.Parse(m.Groups[2].Value, CultureInfo.InvariantCulture); } } p.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!p.WaitForExit(1000)) { p.Kill(); } } // Query the linear revision number of the current branch (first parent) Program.ShowDebugMessage("Executing: git rev-list --first-parent --count HEAD"); Program.ShowDebugMessage(" WorkingDirectory: " + path); psi = new ProcessStartInfo(gitExec, "rev-list --first-parent --count HEAD"); psi.WorkingDirectory = path; psi.RedirectStandardOutput = true; psi.StandardOutputEncoding = Encoding.Default; psi.UseShellExecute = false; p = Process.Start(psi); line = null; if (!p.StandardOutput.EndOfStream) { line = p.StandardOutput.ReadLine(); Program.ShowDebugMessage(line, 4); int revNum; if (int.TryParse(line.Trim(), out revNum)) { data.RevisionNumber = revNum; } else { Program.ShowDebugMessage("Revision count could not be parsed", 2); } } p.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!p.WaitForExit(1000)) { p.Kill(); } } return(data); }
public RevisionData ProcessDirectory(string path, string tagMatch) { // Initialise data var data = new RevisionData { VcsProvider = this }; // Queries the commit hash and time from the latest log entry string gitLogFormat = "%H %ci %ai%n%cN%n%cE%n%aN%n%aE"; Logger?.Trace($@"Executing: git log -n 1 --format=format:""{gitLogFormat}"""); Logger?.Trace(" WorkingDirectory: " + path); var psi = new ProcessStartInfo(gitExec, $@"log -n 1 --format=format:""{gitLogFormat}""") { WorkingDirectory = path, RedirectStandardOutput = true, //StandardOutputEncoding = Encoding.Default, // TODO: Test if it's necessary (Encoding.Default is not supported in .NET Standard 1.6) UseShellExecute = false, CreateNoWindow = true }; var process = Process.Start(psi); string line; int lineCount = 0; while (!process.StandardOutput.EndOfStream) { line = process.StandardOutput.ReadLine(); lineCount++; Logger?.RawOutput(line); if (lineCount == 1) { Match m = Regex.Match(line, @"^([0-9a-fA-F]{40}) ([0-9-]{10} [0-9:]{8} [0-9+-]{5}) ([0-9-]{10} [0-9:]{8} [0-9+-]{5})"); if (m.Success) { data.CommitHash = m.Groups[1].Value; data.CommitTime = DateTimeOffset.Parse(m.Groups[2].Value, CultureInfo.InvariantCulture); data.AuthorTime = DateTimeOffset.Parse(m.Groups[3].Value, CultureInfo.InvariantCulture); } } else if (lineCount == 2) { data.CommitterName = line.Trim(); } else if (lineCount == 3) { data.CommitterEMail = line.Trim(); } else if (lineCount == 4) { data.AuthorName = line.Trim(); } else if (lineCount == 5) { data.AuthorEMail = line.Trim(); } } if (!process.WaitForExit(1000)) { process.Kill(); } if (!string.IsNullOrEmpty(data.CommitHash)) { // Query the working directory state Logger?.Trace("Executing: git status --porcelain"); Logger?.Trace($" WorkingDirectory: {path}"); psi = new ProcessStartInfo(gitExec, "status --porcelain") { WorkingDirectory = path, RedirectStandardOutput = true, //StandardOutputEncoding = Encoding.Default, // TODO: Test if it's necessary (Encoding.Default is not supported in .NET Standard 1.6) UseShellExecute = false, CreateNoWindow = true }; process = Process.Start(psi); line = null; while (!process.StandardOutput.EndOfStream) { line = process.StandardOutput.ReadLine(); Logger?.RawOutput(line); } if (!process.WaitForExit(1000)) { process.Kill(); } data.IsModified = !string.IsNullOrEmpty(line); // Query the current branch Logger?.Trace("Executing: git rev-parse --abbrev-ref HEAD"); Logger?.Trace($" WorkingDirectory: {path}"); psi = new ProcessStartInfo(gitExec, "rev-parse --abbrev-ref HEAD") { WorkingDirectory = path, RedirectStandardOutput = true, //StandardOutputEncoding = Encoding.Default, // TODO: Test if it's necessary (Encoding.Default is not supported in .NET Standard 1.6) UseShellExecute = false, CreateNoWindow = true }; process = Process.Start(psi); if (!process.StandardOutput.EndOfStream) { line = process.StandardOutput.ReadLine(); Logger?.RawOutput(line); data.Branch = line.Trim(); } process.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!process.WaitForExit(1000)) { process.Kill(); } if ((data.Branch == "HEAD" || data.Branch.StartsWith("heads/")) && Environment.GetEnvironmentVariable("CI_SERVER") == "yes") { // GitLab runner uses detached HEAD so the normal Git command will always return // "HEAD" instead of the actual branch name. // "HEAD" is reported by default with GitLab CI runner. // "heads/*" is reported if an explicit 'git checkout -B' command has been issued. // Use GitLab CI provided environment variables instead if the available data is // plausible. if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("CI_COMMIT_REF_NAME")) && string.IsNullOrEmpty(Environment.GetEnvironmentVariable("CI_COMMIT_TAG"))) { // GitLab v9 Logger?.Trace("Reading branch name from CI environment variable: CI_COMMIT_REF_NAME"); data.Branch = Environment.GetEnvironmentVariable("CI_COMMIT_REF_NAME"); } else if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("CI_BUILD_REF_NAME")) && string.IsNullOrEmpty(Environment.GetEnvironmentVariable("CI_BUILD_TAG"))) { // GitLab v8 Logger?.Trace("Reading branch name from CI environment variable: CI_BUILD_REF_NAME"); data.Branch = Environment.GetEnvironmentVariable("CI_BUILD_REF_NAME"); } else { Logger?.Trace("No branch name available in CI environment"); data.Branch = ""; } } if (data.Branch == "HEAD" && Environment.GetEnvironmentVariable("TF_BUILD") == "True") { // Azure Pipelines runner uses detached HEAD so the normal Git command will always return // "HEAD" instead of the actual branch name. // Use Azure Pipelines provided environment variables instead if the available data is // plausible. string buildSourceBranch = Environment.GetEnvironmentVariable("BUILD_SOURCEBRANCH"); const string headsPrefix = "refs/heads/"; if (!string.IsNullOrEmpty(buildSourceBranch) && buildSourceBranch.StartsWith(headsPrefix)) { Logger?.Trace("Reading branch name from environment variable: BUILD_SOURCEBRANCH"); data.Branch = buildSourceBranch.Substring(headsPrefix.Length); } else { Logger?.Trace("No branch name available in CI environment"); data.Branch = ""; } } // Query the most recent matching tag string tagMatchOption = ""; if (!string.IsNullOrWhiteSpace(tagMatch) && tagMatch != "*") { tagMatchOption = $@" --match ""{tagMatch}"""; } Logger?.Trace("Executing: git describe --tags --first-parent --long" + tagMatchOption); Logger?.Trace($" WorkingDirectory: {path}"); psi = new ProcessStartInfo(gitExec, "describe --tags --first-parent --long" + tagMatchOption) { WorkingDirectory = path, RedirectStandardOutput = true, RedirectStandardError = true, //StandardOutputEncoding = Encoding.Default, // TODO: Test if it's necessary (Encoding.Default is not supported in .NET Standard 1.6) UseShellExecute = false, CreateNoWindow = true }; process = Process.Start(psi); if (!process.StandardOutput.EndOfStream) { line = process.StandardOutput.ReadLine(); Logger?.RawOutput(line); line = line.Trim(); Match m = Regex.Match(line, @"^(.*)-([0-9]+)-g[0-9a-fA-F]+$"); if (m.Success) { data.Tag = m.Groups[1].Value.Trim(); data.CommitsAfterTag = int.Parse(m.Groups[2].Value, CultureInfo.InvariantCulture); } } process.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!process.WaitForExit(1000)) { process.Kill(); } // Query the linear revision number of the current branch (first parent) Logger?.Trace("Executing: git rev-list --first-parent --count HEAD"); Logger?.Trace($" WorkingDirectory: {path}"); psi = new ProcessStartInfo(gitExec, "rev-list --first-parent --count HEAD") { WorkingDirectory = path, RedirectStandardOutput = true, //StandardOutputEncoding = Encoding.Default, // TODO: Test if it's necessary (Encoding.Default is not supported in .NET Standard 1.6) UseShellExecute = false, CreateNoWindow = true }; process = Process.Start(psi); if (!process.StandardOutput.EndOfStream) { line = process.StandardOutput.ReadLine(); Logger?.RawOutput(line); if (int.TryParse(line.Trim(), out int revNum)) { data.RevisionNumber = revNum; } else { Logger?.Warning("Revision count could not be parsed"); } } process.StandardOutput.ReadToEnd(); // Kindly eat up the remaining output if (!process.WaitForExit(1000)) { process.Kill(); } } return(data); }