public virtual void parseHeader() { // Parse "@@ -236,9 +236,9 @@ protected boolean" // byte[] buf = _file.Buffer; var ptr = new MutableInteger { value = RawParseUtils.nextLF(buf, _startOffset, (byte)' ') }; _oldImage.StartLine = -1 * RawParseUtils.parseBase10(buf, ptr.value, ptr); _oldImage.LineCount = buf[ptr.value] == ',' ? RawParseUtils.parseBase10(buf, ptr.value + 1, ptr) : 1; NewStartLine = RawParseUtils.parseBase10(buf, ptr.value + 1, ptr); NewLineCount = buf[ptr.value] == ',' ? RawParseUtils.parseBase10(buf, ptr.value + 1, ptr) : 1; }
public override void parseHeader() { // Parse "@@@ -55,12 -163,13 +163,15 @@@ protected boolean" // byte[] buf = File.Buffer; var ptr = new MutableInteger { value = RawParseUtils.nextLF(buf, StartOffset, (byte)' ') }; _old.ForEach(coi => { coi.StartLine = -1 * RawParseUtils.parseBase10(Buffer, ptr.value, ptr); coi.LineCount = buf[ptr.value] == ',' ? RawParseUtils.parseBase10(buf, ptr.value + 1, ptr) : 1; }); NewStartLine = RawParseUtils.parseBase10(buf, ptr.value + 1, ptr); NewLineCount = buf[ptr.value] == ',' ? RawParseUtils.parseBase10(buf, ptr.value + 1, ptr) : 1; }
public int parseHunk(int ptr, int end) { byte[] buf = file.Buffer; if (RawParseUtils.match(buf, ptr, LITERAL) >= 0) { type = Type.LITERAL_DEFLATED; length = RawParseUtils.parseBase10(buf, ptr + LITERAL.Length, null); } else if (RawParseUtils.match(buf, ptr, DELTA) >= 0) { type = Type.DELTA_DEFLATED; length = RawParseUtils.parseBase10(buf, ptr + DELTA.Length, null); } else { // Not a valid binary hunk. Signal to the caller that // we cannot parse any further and that this line should // be treated otherwise. // return(-1); } ptr = RawParseUtils.nextLF(buf, ptr); // Skip until the first blank line; that is the end of the binary // encoded information in this hunk. To save time we don't do a // validation of the binary data at this point. // while (ptr < end) { bool empty = buf[ptr] == '\n'; ptr = RawParseUtils.nextLF(buf, ptr); if (empty) { break; } } return(ptr); }
private int personIdent(byte[] raw, int ptr) { int emailB = RawParseUtils.nextLF(raw, ptr, (byte)'<'); if (emailB == ptr || raw[emailB - 1] != '<') { return(-1); } int emailE = RawParseUtils.nextLF(raw, emailB, (byte)'>'); if (emailE == emailB || raw[emailE - 1] != '>') { return(-1); } if (emailE == raw.Length || raw[emailE] != ' ') { return(-1); } RawParseUtils.parseBase10(raw, emailE + 1, ptrout); // when ptr = ptrout.value; if (emailE + 1 == ptr) { return(-1); } if (ptr == raw.Length || raw[ptr] != ' ') { return(-1); } RawParseUtils.parseBase10(raw, ptr + 1, ptrout); // tz offset if (ptr + 1 == ptrout.value) { return(-1); } return(ptrout.value); }
public void parseCanonical(RevWalk walk, byte[] raw) { MutableObjectId idBuffer = walk.IdBuffer; idBuffer.FromString(raw, 5); _tree = walk.lookupTree(idBuffer); int ptr = 46; if (Parents == null) { var pList = new RevCommit[1]; int nParents = 0; while (true) { if (raw[ptr] != (byte)'p') { break; } idBuffer.FromString(raw, ptr + 7); RevCommit p = walk.lookupCommit(idBuffer); if (nParents == 0) { pList[nParents++] = p; } else if (nParents == 1) { pList = new[] { pList[0], p }; nParents = 2; } else { if (pList.Length <= nParents) { RevCommit[] old = pList; pList = new RevCommit[pList.Length + 32]; Array.Copy(old, 0, pList, 0, nParents); } pList[nParents++] = p; } ptr += 48; } if (nParents != pList.Length) { RevCommit[] old = pList; pList = new RevCommit[nParents]; Array.Copy(old, 0, pList, 0, nParents); } Parents = pList; } // extract time from "committer " ptr = RawParseUtils.committer(raw, ptr); if (ptr > 0) { ptr = RawParseUtils.nextLF(raw, ptr, (byte)'>'); // In 2038 commitTime will overflow unless it is changed to long. CommitTime = RawParseUtils.parseBase10(raw, ptr, null); } if (walk.isRetainBody()) { _buffer = raw; } Flags |= PARSED; }
public virtual int parseGitHeaders(int ptr, int end) { while (ptr < end) { int eol = RawParseUtils.nextLF(Buffer, ptr); if (isHunkHdr(Buffer, ptr, eol) >= 1) { // First hunk header; break out and parse them later. break; } if (RawParseUtils.match(Buffer, ptr, OLD_NAME) >= 0) { ParseOldName(ptr, eol); } else if (RawParseUtils.match(Buffer, ptr, NEW_NAME) >= 0) { ParseNewName(ptr, eol); } else if (RawParseUtils.match(Buffer, ptr, OldModeString) >= 0) { _oldMode = ParseFileMode(ptr + OldModeString.Length, eol); } else if (RawParseUtils.match(Buffer, ptr, NewModeString) >= 0) { _newMode = ParseFileMode(ptr + NewModeString.Length, eol); } else if (RawParseUtils.match(Buffer, ptr, DeletedFileMode) >= 0) { _oldMode = ParseFileMode(ptr + DeletedFileMode.Length, eol); _newMode = FileMode.Missing; ChangeType = ChangeTypeEnum.DELETE; } else if (RawParseUtils.match(Buffer, ptr, NewFileMode) >= 0) { ParseNewFileMode(ptr, eol); } else if (RawParseUtils.match(Buffer, ptr, CopyFrom) >= 0) { oldName = ParseName(oldName, ptr + CopyFrom.Length, eol); ChangeType = ChangeTypeEnum.COPY; } else if (RawParseUtils.match(Buffer, ptr, CopyTo) >= 0) { newName = ParseName(newName, ptr + CopyTo.Length, eol); ChangeType = ChangeTypeEnum.COPY; } else if (RawParseUtils.match(Buffer, ptr, RenameOld) >= 0) { oldName = ParseName(oldName, ptr + RenameOld.Length, eol); ChangeType = ChangeTypeEnum.RENAME; } else if (RawParseUtils.match(Buffer, ptr, RenameNew) >= 0) { newName = ParseName(newName, ptr + RenameNew.Length, eol); ChangeType = ChangeTypeEnum.RENAME; } else if (RawParseUtils.match(Buffer, ptr, RenameFrom) >= 0) { oldName = ParseName(oldName, ptr + RenameFrom.Length, eol); ChangeType = ChangeTypeEnum.RENAME; } else if (RawParseUtils.match(Buffer, ptr, RenameTo) >= 0) { newName = ParseName(newName, ptr + RenameTo.Length, eol); ChangeType = ChangeTypeEnum.RENAME; } else if (RawParseUtils.match(Buffer, ptr, SimilarityIndex) >= 0) { _score = RawParseUtils.parseBase10(Buffer, ptr + SimilarityIndex.Length, null); } else if (RawParseUtils.match(Buffer, ptr, DissimilarityIndex) >= 0) { _score = RawParseUtils.parseBase10(Buffer, ptr + DissimilarityIndex.Length, null); } else if (RawParseUtils.match(Buffer, ptr, Index) >= 0) { ParseIndexLine(ptr + Index.Length, eol); } else { // Probably an empty patch (stat dirty). break; } ptr = eol; } return(ptr); }
private UnpackedObjectLoader(byte[] compressed, AnyObjectId id) { // Try to determine if this is a legacy format loose object or // a new style loose object. The legacy format was completely // compressed with zlib so the first byte must be 0x78 (15-bit // window size, deflated) and the first 16 bit word must be // evenly divisible by 31. Otherwise its a new style loose // object. // Inflater inflater = InflaterCache.Instance.get(); try { int fb = compressed[0] & 0xff; if (fb == 0x78 && (((fb << 8) | compressed[1] & 0xff) % 31) == 0) { inflater.SetInput(compressed); var hdr = new byte[64]; int avail = 0; while (!inflater.IsFinished && avail < hdr.Length) { try { avail += inflater.Inflate(hdr, avail, hdr.Length - avail); } catch (IOException dfe) { var coe = new CorruptObjectException(id, "bad stream", dfe); //inflater.end(); throw coe; } } if (avail < 5) { throw new CorruptObjectException(id, "no header"); } var p = new MutableInteger(); _objectType = Constants.decodeTypeString(id, hdr, (byte)' ', p); _objectSize = RawParseUtils.parseBase10(hdr, p.value, p); if (_objectSize < 0) { throw new CorruptObjectException(id, "negative size"); } if (hdr[p.value++] != 0) { throw new CorruptObjectException(id, "garbage after size"); } _bytes = new byte[_objectSize]; if (p.value < avail) { Array.Copy(hdr, p.value, _bytes, 0, avail - p.value); } Decompress(id, inflater, avail - p.value); } else { int p = 0; int c = compressed[p++] & 0xff; int typeCode = (c >> 4) & 7; int size = c & 15; int shift = 4; while ((c & 0x80) != 0) { c = compressed[p++] & 0xff; size += (c & 0x7f) << shift; shift += 7; } switch (typeCode) { case Constants.OBJ_COMMIT: case Constants.OBJ_TREE: case Constants.OBJ_BLOB: case Constants.OBJ_TAG: _objectType = typeCode; break; default: throw new CorruptObjectException(id, "invalid type"); } _objectSize = size; _bytes = new byte[_objectSize]; inflater.SetInput(compressed, p, compressed.Length - p); Decompress(id, inflater, 0); } } finally { InflaterCache.Instance.release(inflater); } }
public DirCacheTree(byte[] @in, MutableInteger off, DirCacheTree myParent) { _parent = myParent; int ptr = RawParseUtils.next(@in, off.value, (byte)'\0'); int nameLen = ptr - off.value - 1; if (nameLen > 0) { _encodedName = new byte[nameLen]; Array.Copy(@in, off.value, _encodedName, 0, nameLen); } else { _encodedName = NoName; } _entrySpan = RawParseUtils.parseBase10(@in, ptr, off); int subcnt = RawParseUtils.parseBase10(@in, off.value, off); off.value = RawParseUtils.next(@in, off.value, (byte)'\n'); if (_entrySpan >= 0) { // Valid trees have a positive entry count and an id of a // tree object that should exist in the object database. // _id = ObjectId.FromRaw(@in, off.value); off.value += Constants.OBJECT_ID_LENGTH; } if (subcnt > 0) { bool alreadySorted = true; _children = new DirCacheTree[subcnt]; for (int i = 0; i < subcnt; i++) { _children[i] = new DirCacheTree(@in, off, this); // C Git's ordering differs from our own; it prefers to // sort by Length first. This sometimes produces a sort // we do not desire. On the other hand it may have been // created by us, and be sorted the way we want. // if (alreadySorted && i > 0 && TreeComparison(_children[i - 1], _children[i]) > 0) { alreadySorted = false; } } if (!alreadySorted) { Array.Sort(_children, TreeComparison); } } else { // Leaf level trees have no children, only (file) entries. // _children = NoChildren; } _childCount = subcnt; }