internal override void ParseHeader() { // Parse "@@@ -55,12 -163,13 +163,15 @@@ protected boolean" // byte[] buf = file.buf; MutableInteger ptr = new MutableInteger(); ptr.value = RawParseUtils.NextLF(buf, startOffset, ' '); for (int n = 0; n < old.Length; n++) { old[n].startLine = -RawParseUtils.ParseBase10(buf, ptr.value, ptr); if (buf[ptr.value] == ',') { old[n].lineCount = RawParseUtils.ParseBase10(buf, ptr.value + 1, ptr); } else { old[n].lineCount = 1; } } newStartLine = RawParseUtils.ParseBase10(buf, ptr.value + 1, ptr); if (buf[ptr.value] == ',') { newLineCount = RawParseUtils.ParseBase10(buf, ptr.value + 1, ptr); } else { newLineCount = 1; } }
internal ReflogEntry(byte[] raw, int pos) { oldId = ObjectId.FromString(raw, pos); pos += Constants.OBJECT_ID_STRING_LENGTH; if (raw[pos++] != ' ') { throw new ArgumentException(JGitText.Get().rawLogMessageDoesNotParseAsLogEntry); } newId = ObjectId.FromString(raw, pos); pos += Constants.OBJECT_ID_STRING_LENGTH; if (raw[pos++] != ' ') { throw new ArgumentException(JGitText.Get().rawLogMessageDoesNotParseAsLogEntry); } who = RawParseUtils.ParsePersonIdentOnly(raw, pos); int p0 = RawParseUtils.Next(raw, pos, '\t'); if (p0 >= raw.Length) { comment = string.Empty; } else { // personident has no \t, no comment present int p1 = RawParseUtils.NextLF(raw, p0); comment = p1 > p0?RawParseUtils.Decode(raw, p0, p1 - 1) : string.Empty; } }
internal virtual void ParseHeader() { // Parse "@@ -236,9 +236,9 @@ protected boolean" // byte[] buf = file.buf; MutableInteger ptr = new MutableInteger(); ptr.value = RawParseUtils.NextLF(buf, startOffset, ' '); old.startLine = -RawParseUtils.ParseBase10(buf, ptr.value, ptr); if (buf[ptr.value] == ',') { old.lineCount = RawParseUtils.ParseBase10(buf, ptr.value + 1, ptr); } else { old.lineCount = 1; } newStartLine = RawParseUtils.ParseBase10(buf, ptr.value + 1, ptr); if (buf[ptr.value] == ',') { newLineCount = RawParseUtils.ParseBase10(buf, ptr.value + 1, ptr); } else { newLineCount = 1; } }
internal virtual int ParseTraditionalHeaders(int ptr, int end) { while (ptr < end) { int eol = RawParseUtils.NextLF(buf, ptr); if (IsHunkHdr(buf, ptr, eol) >= 1) { // First hunk header; break out and parse them later. break; } else { if (RawParseUtils.Match(buf, ptr, OLD_NAME) >= 0) { ParseOldName(ptr, eol); } else { if (RawParseUtils.Match(buf, ptr, NEW_NAME) >= 0) { ParseNewName(ptr, eol); } else { // Possibly an empty patch. break; } } } ptr = eol; } return(ptr); }
internal Entry(byte[] raw, int pos) { oldId = ObjectId.FromString(raw, pos); pos += Constants.OBJECT_ID_STRING_LENGTH; if (raw[pos++] != ' ') { throw new ArgumentException(JGitText.Get().rawLogMessageDoesNotParseAsLogEntry); } newId = ObjectId.FromString(raw, pos); pos += Constants.OBJECT_ID_STRING_LENGTH; if (raw[pos++] != ' ') { throw new ArgumentException(JGitText.Get().rawLogMessageDoesNotParseAsLogEntry); } who = RawParseUtils.ParsePersonIdentOnly(raw, pos); int p0 = RawParseUtils.Next(raw, pos, '\t'); // personident has no // \t if (p0 == -1) { throw new ArgumentException(JGitText.Get().rawLogMessageDoesNotParseAsLogEntry); } int p1 = RawParseUtils.NextLF(raw, p0); if (p1 == -1) { throw new ArgumentException(JGitText.Get().rawLogMessageDoesNotParseAsLogEntry); } comment = RawParseUtils.Decode(raw, p0, p1 - 1); }
// // // // // // // /// <exception cref="System.IO.IOException"></exception> private static FilePath GetSymRef(FilePath workTree, FilePath dotGit) { byte[] content = IOUtil.ReadFully(dotGit); if (!IsSymRef(content)) { throw new IOException(MessageFormat.Format(JGitText.Get().invalidGitdirRef, dotGit .GetAbsolutePath())); } int pathStart = 8; int lineEnd = RawParseUtils.NextLF(content, pathStart); if (content[lineEnd - 1] == '\n') { lineEnd--; } if (lineEnd == pathStart) { throw new IOException(MessageFormat.Format(JGitText.Get().invalidGitdirRef, dotGit .GetAbsolutePath())); } string gitdirPath = RawParseUtils.Decode(content, pathStart, lineEnd); FilePath gitdirFile = new FilePath(gitdirPath); if (gitdirFile.IsAbsolute()) { return(gitdirFile); } else { return(new FilePath(workTree, gitdirPath).GetCanonicalFile()); } }
/// <summary>Check an annotated tag for errors.</summary> /// <remarks>Check an annotated tag for errors.</remarks> /// <param name="raw">the tag data. The array is never modified.</param> /// <exception cref="NGit.Errors.CorruptObjectException">if any error was detected.</exception> public virtual void CheckTag(byte[] raw) { int ptr = 0; if ((ptr = RawParseUtils.Match(raw, ptr, @object)) < 0) { throw new CorruptObjectException("no object header"); } if ((ptr = Id(raw, ptr)) < 0 || raw[ptr++] != '\n') { throw new CorruptObjectException("invalid object"); } if ((ptr = RawParseUtils.Match(raw, ptr, type)) < 0) { throw new CorruptObjectException("no type header"); } ptr = RawParseUtils.NextLF(raw, ptr); if ((ptr = RawParseUtils.Match(raw, ptr, tag)) < 0) { throw new CorruptObjectException("no tag header"); } ptr = RawParseUtils.NextLF(raw, ptr); if ((ptr = RawParseUtils.Match(raw, ptr, tagger)) > 0) { if ((ptr = PersonIdent(raw, ptr)) < 0 || raw[ptr++] != '\n') { throw new CorruptObjectException("invalid tagger"); } } }
private static int SkipFile(byte[] buf, int ptr) { ptr = RawParseUtils.NextLF(buf, ptr); if (RawParseUtils.Match(buf, ptr, FileHeader.OLD_NAME) >= 0) { ptr = RawParseUtils.NextLF(buf, ptr); } return(ptr); }
/// <exception cref="System.IO.IOException"></exception> private ObjectDirectory.CachedPackList ScanCachedPacks(ObjectDirectory.CachedPackList old) { FileSnapshot s = FileSnapshot.Save(cachedPacksFile); byte[] buf; try { buf = IOUtil.ReadFully(cachedPacksFile); } catch (FileNotFoundException) { buf = new byte[0]; } if (old != null && old.snapshot.Equals(s) && Arrays.Equals(old.raw, buf)) { old.snapshot.SetClean(s); return(old); } AList <LocalCachedPack> list = new AList <LocalCachedPack>(4); ICollection <ObjectId> tips = new HashSet <ObjectId>(); int ptr = 0; while (ptr < buf.Length) { if (buf[ptr] == '#' || buf[ptr] == '\n') { ptr = RawParseUtils.NextLF(buf, ptr); continue; } if (buf[ptr] == '+') { tips.AddItem(ObjectId.FromString(buf, ptr + 2)); ptr = RawParseUtils.NextLF(buf, ptr + 2); continue; } IList <string> names = new AList <string>(4); while (ptr < buf.Length && buf[ptr] == 'P') { int end = RawParseUtils.NextLF(buf, ptr); if (buf[end - 1] == '\n') { end--; } names.AddItem(RawParseUtils.Decode(buf, ptr + 2, end)); ptr = RawParseUtils.NextLF(buf, end); } if (!tips.IsEmpty() && !names.IsEmpty()) { list.AddItem(new LocalCachedPack(this, tips, names)); tips = new HashSet <ObjectId>(); } } list.TrimToSize(); return(new ObjectDirectory.CachedPackList(s, Sharpen.Collections.UnmodifiableList (list), buf)); }
internal virtual string GetScriptText(Encoding[] charsetGuess) { if (GetHunks().IsEmpty()) { // If we have no hunks then we can safely assume the entire // patch is a binary style patch, or a meta-data only style // patch. Either way the encoding of the headers should be // strictly 7-bit US-ASCII and the body is either 7-bit ASCII // (due to the base 85 encoding used for a BinaryHunk) or is // arbitrary noise we have chosen to ignore and not understand // (e.g. the message "Binary files ... differ"). // return(RawParseUtils.ExtractBinaryString(buf, startOffset, endOffset)); } if (charsetGuess != null && charsetGuess.Length != GetParentCount() + 1) { throw new ArgumentException(MessageFormat.Format(JGitText.Get().expectedCharacterEncodingGuesses , Sharpen.Extensions.ValueOf(GetParentCount() + 1))); } if (TrySimpleConversion(charsetGuess)) { Encoding cs = charsetGuess != null ? charsetGuess[0] : null; if (cs == null) { cs = Constants.CHARSET; } try { return(RawParseUtils.DecodeNoFallback(cs, buf, startOffset, endOffset)); } catch (CharacterCodingException) { } } // Try the much slower, more-memory intensive version which // can handle a character set conversion patch. StringBuilder r = new StringBuilder(endOffset - startOffset); // Always treat the headers as US-ASCII; Git file names are encoded // in a C style escape if any character has the high-bit set. // int hdrEnd = GetHunks()[0].GetStartOffset(); for (int ptr = startOffset; ptr < hdrEnd;) { int eol = Math.Min(hdrEnd, RawParseUtils.NextLF(buf, ptr)); r.Append(RawParseUtils.ExtractBinaryString(buf, ptr, eol)); ptr = eol; } string[] files = ExtractFileLines(charsetGuess); int[] offsets = new int[files.Length]; foreach (HunkHeader h in GetHunks()) { h.ExtractFileLines(r, files, offsets); } return(r.ToString()); }
/// <summary>Parse the footer lines (e.g.</summary> /// <remarks> /// Parse the footer lines (e.g. "Signed-off-by") for machine processing. /// <p> /// This method splits all of the footer lines out of the last paragraph of /// the commit message, providing each line as a key-value pair, ordered by /// the order of the line's appearance in the commit message itself. /// <p> /// A footer line's key must match the pattern /// <code>^[A-Za-z0-9-]+:</code> /// , while /// the value is free-form, but must not contain an LF. Very common keys seen /// in the wild are: /// <ul> /// <li> /// <code>Signed-off-by</code> /// (agrees to Developer Certificate of Origin) /// <li> /// <code>Acked-by</code> /// (thinks change looks sane in context) /// <li> /// <code>Reported-by</code> /// (originally found the issue this change fixes) /// <li> /// <code>Tested-by</code> /// (validated change fixes the issue for them) /// <li> /// <code>CC</code> /// , /// <code>Cc</code> /// (copy on all email related to this change) /// <li> /// <code>Bug</code> /// (link to project's bug tracking system) /// </ul> /// </remarks> /// <returns>ordered list of footer lines; empty list if no footers found.</returns> public IList <FooterLine> GetFooterLines() { byte[] raw = buffer; int ptr = raw.Length - 1; while (raw[ptr] == '\n') { // trim any trailing LFs, not interesting ptr--; } int msgB = RawParseUtils.CommitMessage(raw, 0); AList <FooterLine> r = new AList <FooterLine>(4); System.Text.Encoding enc = Encoding; for (; ;) { ptr = RawParseUtils.PrevLF(raw, ptr); if (ptr <= msgB) { break; } // Don't parse commit headers as footer lines. int keyStart = ptr + 2; if (raw[keyStart] == '\n') { break; } // Stop at first paragraph break, no footers above it. int keyEnd = RawParseUtils.EndOfFooterLineKey(raw, keyStart); if (keyEnd < 0) { continue; } // Not a well formed footer line, skip it. // Skip over the ': *' at the end of the key before the value. // int valStart = keyEnd + 1; while (valStart < raw.Length && raw[valStart] == ' ') { valStart++; } // Value ends at the LF, and does not include it. // int valEnd = RawParseUtils.NextLF(raw, valStart); if (raw[valEnd - 1] == '\n') { valEnd--; } r.AddItem(new FooterLine(raw, enc, keyStart, keyEnd, valStart, valEnd)); } Sharpen.Collections.Reverse(r); return(r); }
// Don't permit us to be created. internal static RawCharSequence TextFor(RevCommit cmit) { byte[] raw = cmit.RawBuffer; int b = RawParseUtils.Committer(raw, 0); if (b < 0) { return(RawCharSequence.EMPTY); } int e = RawParseUtils.NextLF(raw, b, '>'); return(new RawCharSequence(raw, b, e)); }
/// <exception cref="System.IO.IOException"></exception> internal virtual void ExtractFileLines(OutputStream[] @out) { byte[] buf = file.buf; int ptr = startOffset; int eol = RawParseUtils.NextLF(buf, ptr); if (endOffset <= eol) { return; } // Treat the hunk header as though it were from the ancestor, // as it may have a function header appearing after it which // was copied out of the ancestor file. // @out[0].Write(buf, ptr, eol - ptr); for (ptr = eol; ptr < endOffset; ptr = eol) { eol = RawParseUtils.NextLF(buf, ptr); switch (buf[ptr]) { case (byte)(' '): case (byte)('\n'): case (byte)('\\'): { @out[0].Write(buf, ptr, eol - ptr); @out[1].Write(buf, ptr, eol - ptr); break; } case (byte)('-'): { @out[0].Write(buf, ptr, eol - ptr); break; } case (byte)('+'): { @out[1].Write(buf, ptr, eol - ptr); break; } default: { goto SCAN_break; break; } } SCAN_continue :; } SCAN_break :; }
internal virtual PersonIdent ParseAuthor(byte[] raw) { if (raw.Length == 0) { return(null); } IDictionary <string, string> keyValueMap = new Dictionary <string, string>(); for (int p = 0; p < raw.Length;) { int end = RawParseUtils.NextLF(raw, p); if (end == p) { break; } int equalsIndex = RawParseUtils.Next(raw, p, '='); if (equalsIndex == end) { break; } string key = RawParseUtils.Decode(raw, p, equalsIndex - 1); string value = RawParseUtils.Decode(raw, equalsIndex + 1, end - 2); p = end; keyValueMap.Put(key, value); } string name = keyValueMap.Get(GIT_AUTHOR_NAME); string email = keyValueMap.Get(GIT_AUTHOR_EMAIL); string time = keyValueMap.Get(GIT_AUTHOR_DATE); // the time is saved as <seconds since 1970> <timezone offset> long when = long.Parse(Sharpen.Runtime.Substring(time, 0, time.IndexOf(' '))) * 1000; string tzOffsetString = Sharpen.Runtime.Substring(time, time.IndexOf(' ') + 1); int multiplier = -1; if (tzOffsetString[0] == '+') { multiplier = 1; } int hours = System.Convert.ToInt32(Sharpen.Runtime.Substring(tzOffsetString, 1, 3 )); int minutes = System.Convert.ToInt32(Sharpen.Runtime.Substring(tzOffsetString, 3, 5)); // this is in format (+/-)HHMM (hours and minutes) // we need to convert into minutes int tz = (hours * 60 + minutes) * multiplier; if (name != null && email != null) { return(new PersonIdent(name, email, when, tz)); } return(null); }
public virtual void TestInsertChangeId() { Git git = new Git(db); string messageHeader = "Some header line\n\nSome detail explanation\n"; string changeIdTemplate = "\nChange-Id: I" + ObjectId.ZeroId.GetName() + "\n"; string messageFooter = "Some foooter lines\nAnother footer line\n"; RevCommit commit = git.Commit().SetMessage(messageHeader + messageFooter).SetInsertChangeId (true).Call(); // we should find a real change id (at the end of the file) byte[] chars = Sharpen.Runtime.GetBytesForString(commit.GetFullMessage()); int lastLineBegin = RawParseUtils.PrevLF(chars, chars.Length - 2); string lastLine = RawParseUtils.Decode(chars, lastLineBegin + 1, chars.Length); NUnit.Framework.Assert.IsTrue(lastLine.Contains("Change-Id:")); NUnit.Framework.Assert.IsFalse(lastLine.Contains("Change-Id: I" + ObjectId.ZeroId .GetName())); commit = git.Commit().SetMessage(messageHeader + changeIdTemplate + messageFooter ).SetInsertChangeId(true).Call(); // we should find a real change id (in the line as dictated by the // template) chars = Sharpen.Runtime.GetBytesForString(commit.GetFullMessage()); int lineStart = 0; int lineEnd = 0; for (int i = 0; i < 4; i++) { lineStart = RawParseUtils.NextLF(chars, lineStart); } lineEnd = RawParseUtils.NextLF(chars, lineStart); string line = RawParseUtils.Decode(chars, lineStart, lineEnd); NUnit.Framework.Assert.IsTrue(line.Contains("Change-Id:")); NUnit.Framework.Assert.IsFalse(line.Contains("Change-Id: I" + ObjectId.ZeroId.GetName ())); commit = git.Commit().SetMessage(messageHeader + changeIdTemplate + messageFooter ).SetInsertChangeId(false).Call(); // we should find the untouched template chars = Sharpen.Runtime.GetBytesForString(commit.GetFullMessage()); lineStart = 0; lineEnd = 0; for (int i_1 = 0; i_1 < 4; i_1++) { lineStart = RawParseUtils.NextLF(chars, lineStart); } lineEnd = RawParseUtils.NextLF(chars, lineStart); line = RawParseUtils.Decode(chars, lineStart, lineEnd); NUnit.Framework.Assert.IsTrue(commit.GetFullMessage().Contains("Change-Id: I" + ObjectId .ZeroId.GetName())); }
internal virtual void ParseIndexLine(int ptr, int end) { // "index $asha1..$bsha1[ $mode]" where $asha1 and $bsha1 // can be unique abbreviations // int dot2 = RawParseUtils.NextLF(buf, ptr, '.'); int mode = RawParseUtils.NextLF(buf, dot2, ' '); oldId = AbbreviatedObjectId.FromString(buf, ptr, IsWindowsLineEnding(dot2) ? dot2 - 2 : dot2 - 1); newId = AbbreviatedObjectId.FromString(buf, dot2 + 1, IsWindowsLineEnding(mode) ? mode - 2 : mode - 1); if (mode < end) { newMode = oldMode = ParseFileMode(mode, end); } }
internal virtual void ExtractFileLines(StringBuilder sb, string[] text, int[] offsets ) { byte[] buf = file.buf; int ptr = startOffset; int eol = RawParseUtils.NextLF(buf, ptr); if (endOffset <= eol) { return; } CopyLine(sb, text, offsets, 0); for (ptr = eol; ptr < endOffset; ptr = eol) { eol = RawParseUtils.NextLF(buf, ptr); switch (buf[ptr]) { case (byte)(' '): case (byte)('\n'): case (byte)('\\'): { CopyLine(sb, text, offsets, 0); SkipLine(text, offsets, 1); break; } case (byte)('-'): { CopyLine(sb, text, offsets, 0); break; } case (byte)('+'): { CopyLine(sb, text, offsets, 1); break; } default: { goto SCAN_break; break; } } SCAN_continue :; } SCAN_break :; }
private void ParseDeletedFileMode(int ptr, int eol) { // "deleted file mode $amode,$bmode" // changeType = DiffEntry.ChangeType.DELETE; int n = 0; while (ptr < eol) { int comma = RawParseUtils.NextLF(buf, ptr, ','); if (eol <= comma) { break; } oldModes[n++] = ParseFileMode(ptr, comma); ptr = comma; } oldModes[n] = ParseFileMode(ptr, eol); newMode = FileMode.MISSING; }
/// <exception cref="NGit.Errors.CorruptObjectException"></exception> internal virtual void ParseCanonical(RevWalk walk, byte[] rawTag) { MutableInteger pos = new MutableInteger(); int oType; pos.value = 53; // "object $sha1\ntype " oType = Constants.DecodeTypeString(this, rawTag, unchecked ((byte)'\n'), pos); walk.idBuffer.FromString(rawTag, 7); @object = walk.LookupAny(walk.idBuffer, oType); int p = pos.value += 4; // "tag " int nameEnd = RawParseUtils.NextLF(rawTag, p) - 1; tagName = RawParseUtils.Decode(Constants.CHARSET, rawTag, p, nameEnd); if (walk.IsRetainBody()) { buffer = rawTag; } flags |= PARSED; }
/// <summary>Extract the email address (if present) from the footer.</summary> /// <remarks> /// Extract the email address (if present) from the footer. /// <p> /// If there is an email address looking string inside of angle brackets /// (e.g. "<a@b>"), the return value is the part extracted from inside the /// brackets. If no brackets are found, then /// <see cref="GetValue()">GetValue()</see> /// is returned /// if the value contains an '@' sign. Otherwise, null. /// </remarks> /// <returns>email address appearing in the value of this footer, or null.</returns> public string GetEmailAddress() { int lt = RawParseUtils.NextLF(buffer, valStart, '<'); if (valEnd <= lt) { int at = RawParseUtils.NextLF(buffer, valStart, '@'); if (valStart < at && at < valEnd) { return(GetValue()); } return(null); } int gt = RawParseUtils.NextLF(buffer, lt, '>'); if (valEnd < gt) { return(null); } return(RawParseUtils.Decode(enc, buffer, lt, gt - 1)); }
private void ParseModeLine(int ptr, int eol) { // "mode $amode,$bmode..$cmode" // int n = 0; while (ptr < eol) { int comma = RawParseUtils.NextLF(buf, ptr, ','); if (eol <= comma) { break; } oldModes[n++] = ParseFileMode(ptr, comma); ptr = comma; } int dot2 = RawParseUtils.NextLF(buf, ptr, '.'); oldModes[n] = ParseFileMode(ptr, dot2); newMode = ParseFileMode(dot2 + 1, eol); }
internal virtual int ParseHunk(int ptr, int end) { byte[] buf = file.buf; if (RawParseUtils.Match(buf, ptr, LITERAL) >= 0) { type = BinaryHunk.Type.LITERAL_DEFLATED; length = RawParseUtils.ParseBase10(buf, ptr + LITERAL.Length, null); } else { if (RawParseUtils.Match(buf, ptr, DELTA) >= 0) { type = BinaryHunk.Type.DELTA_DEFLATED; length = RawParseUtils.ParseBase10(buf, ptr + DELTA.Length, null); } else { // Not a valid binary hunk. Signal to the caller that // we cannot parse any further and that this line should // be treated otherwise. // return(-1); } } ptr = RawParseUtils.NextLF(buf, ptr); // Skip until the first blank line; that is the end of the binary // encoded information in this hunk. To save time we don't do a // validation of the binary data at this point. // while (ptr < end) { bool empty = (buf[ptr] == '\n') || (buf[ptr] == '\r' && ptr + 1 < end && buf[ptr + 1] == '\n'); ptr = RawParseUtils.NextLF(buf, ptr); if (empty) { break; } } return(ptr); }
private int PersonIdent(byte[] raw, int ptr) { int emailB = RawParseUtils.NextLF(raw, ptr, '<'); if (emailB == ptr || raw[emailB - 1] != '<') { return(-1); } int emailE = RawParseUtils.NextLF(raw, emailB, '>'); if (emailE == emailB || raw[emailE - 1] != '>') { return(-1); } if (emailE == raw.Length || raw[emailE] != ' ') { return(-1); } RawParseUtils.ParseBase10(raw, emailE + 1, ptrout); // when ptr = ptrout.value; if (emailE + 1 == ptr) { return(-1); } if (ptr == raw.Length || raw[ptr] != ' ') { return(-1); } RawParseUtils.ParseBase10(raw, ptr + 1, ptrout); // tz offset if (ptr + 1 == ptrout.value) { return(-1); } return(ptrout.value); }
internal override void ParseIndexLine(int ptr, int eol) { // "index $asha1,$bsha1..$csha1" // IList <AbbreviatedObjectId> ids = new AList <AbbreviatedObjectId>(); while (ptr < eol) { int comma = RawParseUtils.NextLF(buf, ptr, ','); if (eol <= comma) { break; } ids.AddItem(AbbreviatedObjectId.FromString(buf, ptr, comma - 1)); ptr = comma; } oldIds = new AbbreviatedObjectId[ids.Count + 1]; Sharpen.Collections.ToArray(ids, oldIds); int dot2 = RawParseUtils.NextLF(buf, ptr, '.'); oldIds[ids.Count] = AbbreviatedObjectId.FromString(buf, ptr, dot2 - 1); newId = AbbreviatedObjectId.FromString(buf, dot2 + 1, eol - 1); oldModes = new FileMode[oldIds.Length]; }
internal virtual int ParseGitHeaders(int ptr, int end) { while (ptr < end) { int eol = RawParseUtils.NextLF(buf, ptr); if (IsHunkHdr(buf, ptr, eol) >= 1) { // First hunk header; break out and parse them later. break; } else { if (RawParseUtils.Match(buf, ptr, OLD_NAME) >= 0) { ParseOldName(ptr, eol); } else { if (RawParseUtils.Match(buf, ptr, NEW_NAME) >= 0) { ParseNewName(ptr, eol); } else { if (RawParseUtils.Match(buf, ptr, OLD_MODE) >= 0) { oldMode = ParseFileMode(ptr + OLD_MODE.Length, eol); } else { if (RawParseUtils.Match(buf, ptr, NEW_MODE) >= 0) { newMode = ParseFileMode(ptr + NEW_MODE.Length, eol); } else { if (RawParseUtils.Match(buf, ptr, DELETED_FILE_MODE) >= 0) { oldMode = ParseFileMode(ptr + DELETED_FILE_MODE.Length, eol); newMode = FileMode.MISSING; changeType = DiffEntry.ChangeType.DELETE; } else { if (RawParseUtils.Match(buf, ptr, NEW_FILE_MODE) >= 0) { ParseNewFileMode(ptr, eol); } else { if (RawParseUtils.Match(buf, ptr, COPY_FROM) >= 0) { oldPath = ParseName(oldPath, ptr + COPY_FROM.Length, eol); changeType = DiffEntry.ChangeType.COPY; } else { if (RawParseUtils.Match(buf, ptr, COPY_TO) >= 0) { newPath = ParseName(newPath, ptr + COPY_TO.Length, eol); changeType = DiffEntry.ChangeType.COPY; } else { if (RawParseUtils.Match(buf, ptr, RENAME_OLD) >= 0) { oldPath = ParseName(oldPath, ptr + RENAME_OLD.Length, eol); changeType = DiffEntry.ChangeType.RENAME; } else { if (RawParseUtils.Match(buf, ptr, RENAME_NEW) >= 0) { newPath = ParseName(newPath, ptr + RENAME_NEW.Length, eol); changeType = DiffEntry.ChangeType.RENAME; } else { if (RawParseUtils.Match(buf, ptr, RENAME_FROM) >= 0) { oldPath = ParseName(oldPath, ptr + RENAME_FROM.Length, eol); changeType = DiffEntry.ChangeType.RENAME; } else { if (RawParseUtils.Match(buf, ptr, RENAME_TO) >= 0) { newPath = ParseName(newPath, ptr + RENAME_TO.Length, eol); changeType = DiffEntry.ChangeType.RENAME; } else { if (RawParseUtils.Match(buf, ptr, SIMILARITY_INDEX) >= 0) { score = RawParseUtils.ParseBase10(buf, ptr + SIMILARITY_INDEX.Length, null); } else { if (RawParseUtils.Match(buf, ptr, DISSIMILARITY_INDEX) >= 0) { score = RawParseUtils.ParseBase10(buf, ptr + DISSIMILARITY_INDEX.Length, null); } else { if (RawParseUtils.Match(buf, ptr, INDEX) >= 0) { ParseIndexLine(ptr + INDEX.Length, eol); } else { // Probably an empty patch (stat dirty). break; } } } } } } } } } } } } } } } } ptr = eol; } return(ptr); }
internal virtual void ParseCanonical(RevWalk walk, byte[] raw) { MutableObjectId idBuffer = walk.idBuffer; idBuffer.FromString(raw, 5); tree = walk.LookupTree(idBuffer); int ptr = 46; if (parents == null) { NGit.Revwalk.RevCommit[] pList = new NGit.Revwalk.RevCommit[1]; int nParents = 0; for (; ;) { if (raw[ptr] != 'p') { break; } idBuffer.FromString(raw, ptr + 7); NGit.Revwalk.RevCommit p = walk.LookupCommit(idBuffer); if (nParents == 0) { pList[nParents++] = p; } else { if (nParents == 1) { pList = new NGit.Revwalk.RevCommit[] { pList[0], p }; nParents = 2; } else { if (pList.Length <= nParents) { NGit.Revwalk.RevCommit[] old = pList; pList = new NGit.Revwalk.RevCommit[pList.Length + 32]; System.Array.Copy(old, 0, pList, 0, nParents); } pList[nParents++] = p; } } ptr += 48; } if (nParents != pList.Length) { NGit.Revwalk.RevCommit[] old = pList; pList = new NGit.Revwalk.RevCommit[nParents]; System.Array.Copy(old, 0, pList, 0, nParents); } parents = pList; } // extract time from "committer " ptr = RawParseUtils.Committer(raw, ptr); if (ptr > 0) { ptr = RawParseUtils.NextLF(raw, ptr, '>'); // In 2038 commitTime will overflow unless it is changed to long. commitTime = RawParseUtils.ParseBase10(raw, ptr, null); } if (walk.IsRetainBody()) { buffer = raw; } flags |= PARSED; }
internal override void ExtractFileLines(StringBuilder sb, string[] text, int[] offsets ) { byte[] buf = file.buf; int ptr = startOffset; int eol = RawParseUtils.NextLF(buf, ptr); if (endOffset <= eol) { return; } CopyLine(sb, text, offsets, 0); for (ptr = eol; ptr < endOffset; ptr = eol) { eol = RawParseUtils.NextLF(buf, ptr); if (eol - ptr < old.Length + 1) { // Line isn't long enough to mention the state of each // ancestor. It must be the end of the hunk. goto SCAN_break; } switch (buf[ptr]) { case (byte)(' '): case (byte)('-'): case (byte)('+'): { break; } default: { // Line can't possibly be part of this hunk; the first // ancestor information isn't recognizable. // goto SCAN_break; break; } } bool copied = false; for (int ancestor = 0; ancestor < old.Length; ancestor++) { switch (buf[ptr + ancestor]) { case (byte)(' '): case (byte)('-'): { if (copied) { SkipLine(text, offsets, ancestor); } else { CopyLine(sb, text, offsets, ancestor); copied = true; } continue; goto case (byte)('+'); } case (byte)('+'): { continue; goto default; } default: { goto SCAN_break; break; } } } if (!copied) { // If none of the ancestors caused the copy then this line // must be new across the board, so it only appears in the // text of the new file. // CopyLine(sb, text, offsets, old.Length); } SCAN_continue :; } SCAN_break :; }
/// <exception cref="System.IO.IOException"></exception> internal override void ExtractFileLines(OutputStream[] @out) { byte[] buf = file.buf; int ptr = startOffset; int eol = RawParseUtils.NextLF(buf, ptr); if (endOffset <= eol) { return; } // Treat the hunk header as though it were from the ancestor, // as it may have a function header appearing after it which // was copied out of the ancestor file. // @out[0].Write(buf, ptr, eol - ptr); for (ptr = eol; ptr < endOffset; ptr = eol) { eol = RawParseUtils.NextLF(buf, ptr); if (eol - ptr < old.Length + 1) { // Line isn't long enough to mention the state of each // ancestor. It must be the end of the hunk. goto SCAN_break; } switch (buf[ptr]) { case (byte)(' '): case (byte)('-'): case (byte)('+'): { break; } default: { // Line can't possibly be part of this hunk; the first // ancestor information isn't recognizable. // goto SCAN_break; break; } } int delcnt = 0; for (int ancestor = 0; ancestor < old.Length; ancestor++) { switch (buf[ptr + ancestor]) { case (byte)('-'): { delcnt++; @out[ancestor].Write(buf, ptr, eol - ptr); continue; goto case (byte)(' '); } case (byte)(' '): { @out[ancestor].Write(buf, ptr, eol - ptr); continue; goto case (byte)('+'); } case (byte)('+'): { continue; goto default; } default: { goto SCAN_break; break; } } } if (delcnt < old.Length) { // This line appears in the new file if it wasn't deleted // relative to all ancestors. // @out[old.Length].Write(buf, ptr, eol - ptr); } SCAN_continue :; } SCAN_break :; }
internal override int ParseBody(NGit.Patch.Patch script, int end) { byte[] buf = file.buf; int c = RawParseUtils.NextLF(buf, startOffset); foreach (CombinedHunkHeader.CombinedOldImage o in old) { o.nDeleted = 0; o.nAdded = 0; o.nContext = 0; } nContext = 0; int nAdded = 0; for (int eol; c < end; c = eol) { eol = RawParseUtils.NextLF(buf, c); if (eol - c < old.Length + 1) { // Line isn't long enough to mention the state of each // ancestor. It must be the end of the hunk. goto SCAN_break; } switch (buf[c]) { case (byte)(' '): case (byte)('-'): case (byte)('+'): { break; } default: { // Line can't possibly be part of this hunk; the first // ancestor information isn't recognizable. // goto SCAN_break; break; } } int localcontext = 0; for (int ancestor = 0; ancestor < old.Length; ancestor++) { switch (buf[c + ancestor]) { case (byte)(' '): { localcontext++; old[ancestor].nContext++; continue; goto case (byte)('-'); } case (byte)('-'): { old[ancestor].nDeleted++; continue; goto case (byte)('+'); } case (byte)('+'): { old[ancestor].nAdded++; nAdded++; continue; goto default; } default: { goto SCAN_break; break; } } } if (localcontext == old.Length) { nContext++; } SCAN_continue :; } SCAN_break :; for (int ancestor_1 = 0; ancestor_1 < old.Length; ancestor_1++) { CombinedHunkHeader.CombinedOldImage o_1 = old[ancestor_1]; int cmp = o_1.nContext + o_1.nDeleted; if (cmp < o_1.lineCount) { int missingCnt = o_1.lineCount - cmp; script.Error(buf, startOffset, MessageFormat.Format(JGitText.Get().truncatedHunkLinesMissingForAncestor , Sharpen.Extensions.ValueOf(missingCnt), Sharpen.Extensions.ValueOf(ancestor_1 + 1))); } } if (nContext + nAdded < newLineCount) { int missingCount = newLineCount - (nContext + nAdded); script.Error(buf, startOffset, MessageFormat.Format(JGitText.Get().truncatedHunkNewLinesMissing , Sharpen.Extensions.ValueOf(missingCount))); } return(c); }
/// <exception cref="System.IO.IOException"></exception> private IList <RebaseCommand.Step> LoadSteps() { byte[] buf = IOUtil.ReadFully(new FilePath(rebaseDir, GIT_REBASE_TODO)); int ptr = 0; int tokenBegin = 0; AList <RebaseCommand.Step> r = new AList <RebaseCommand.Step>(); while (ptr < buf.Length) { tokenBegin = ptr; ptr = RawParseUtils.NextLF(buf, ptr); int nextSpace = 0; int tokenCount = 0; RebaseCommand.Step current = null; while (tokenCount < 3 && nextSpace < ptr) { switch (tokenCount) { case 0: { nextSpace = RawParseUtils.Next(buf, tokenBegin, ' '); string actionToken = Sharpen.Runtime.GetStringForBytes(buf, tokenBegin, nextSpace - tokenBegin - 1); tokenBegin = nextSpace; if (actionToken[0] == '#') { tokenCount = 3; break; } RebaseCommand.Action action = RebaseCommand.Action.Parse(actionToken); if (action != null) { current = new RebaseCommand.Step(RebaseCommand.Action.Parse(actionToken)); } break; } case 1: { if (current == null) { break; } nextSpace = RawParseUtils.Next(buf, tokenBegin, ' '); string commitToken = Sharpen.Runtime.GetStringForBytes(buf, tokenBegin, nextSpace - tokenBegin - 1); tokenBegin = nextSpace; current.commit = AbbreviatedObjectId.FromString(commitToken); break; } case 2: { if (current == null) { break; } nextSpace = ptr; int length = ptr - tokenBegin; current.shortMessage = new byte[length]; System.Array.Copy(buf, tokenBegin, current.shortMessage, 0, length); r.AddItem(current); break; } } tokenCount++; } } return(r); }