public override ObjectId GetObjectId(long nthPosition) { int levelOne = Array.BinarySearch(_fanoutTable, nthPosition + 1); long lbase; if (levelOne >= 0) { // If we hit the bucket exactly the item is in the bucket, or // any bucket before it which has the same object count. // lbase = _fanoutTable[levelOne]; while (levelOne > 0 && lbase == _fanoutTable[levelOne - 1]) { levelOne--; } } else { // The item is in the bucket we would insert it into. // levelOne = -(levelOne + 1); } lbase = levelOne > 0 ? _fanoutTable[levelOne - 1] : 0; var p = (int)(nthPosition - lbase); int p4 = p << 2; return(ObjectId.FromRaw(_names[levelOne], p4 + p)); // p * 5 }
public override ObjectId GetObjectId(long nthPosition) { int levelOne = Array.BinarySearch(_idxHeader, nthPosition + 1); long lbase; if (levelOne >= 0) { // If we hit the bucket exactly the item is in the bucket, or // any bucket before it which has the same object count. // lbase = _idxHeader[levelOne]; while (levelOne > 0 && lbase == _idxHeader[levelOne - 1]) { levelOne--; } } else { // The item is in the bucket we would insert it into. // levelOne = -(levelOne + 1); } lbase = levelOne > 0 ? _idxHeader[levelOne - 1] : 0; var p = (int)(nthPosition - lbase); int dataIdx = ((4 + AnyObjectId.ObjectIdLength) * p) + 4; return(ObjectId.FromRaw(_idxdata[levelOne], dataIdx)); }
public ObjectId computeName() { MessageDigest md = Constants.newMessageDigest(); foreach (ObjectToPack otp in sortByName()) { otp.copyRawTo(_buf, 0); md.Update(_buf, 0, Constants.OBJECT_ID_LENGTH); } return(ObjectId.FromRaw(md.Digest())); }
internal Entry(Repository repository, EndianBinaryReader b) : this(repository) { long startposition = b.BaseStream.Position; Ctime = b.ReadInt32() * 1000000000L + (b.ReadInt32() % 1000000000L); Mtime = b.ReadInt32() * 1000000000L + (b.ReadInt32() % 1000000000L); _dev = b.ReadInt32(); _ino = b.ReadInt32(); Mode = b.ReadInt32(); _uid = b.ReadInt32(); _gid = b.ReadInt32(); _size = b.ReadInt32(); byte[] sha1Bytes = b.ReadBytes(Constants.OBJECT_ID_LENGTH); ObjectId = ObjectId.FromRaw(sha1Bytes); _flags = b.ReadInt16(); _name = b.ReadBytes(_flags & 0xFFF); b.BaseStream.Position = startposition + ((8 + 8 + 4 + 4 + 4 + 4 + 4 + 4 + 20 + 2 + _name.Length + 8) & ~7); }
private void OnOpenPack() { PackIndex idx = LoadPackIndex(); var buf = new byte[20]; IO.ReadFully(_fd, 0, buf, 0, 12); if (RawParseUtils.match(buf, 0, Constants.PACK_SIGNATURE) != 4) { throw new IOException("Not a PACK file."); } long vers = NB.decodeUInt32(buf, 4); long packCnt = NB.decodeUInt32(buf, 8); if (vers != 2 && vers != 3) { throw new IOException("Unsupported pack version " + vers + "."); } if (packCnt != idx.ObjectCount) { throw new PackMismatchException("Pack object count mismatch:" + " pack " + packCnt + " index " + idx.ObjectCount + ": " + File.FullName); } IO.ReadFully(_fd, Length - 20, buf, 0, 20); if (!buf.SequenceEqual(_packChecksum)) { throw new PackMismatchException("Pack checksum mismatch:" + " pack " + ObjectId.FromRaw(buf) + " index " + ObjectId.FromRaw(idx.PackChecksum) + ": " + File.FullName); } }
private void ReadTree(byte[] raw) { int rawSize = raw.Length; int rawPtr = 0; int nextIndex = 0; while (rawPtr < rawSize) { while (rawPtr < rawSize && raw[rawPtr] != 0) { rawPtr++; } rawPtr++; rawPtr += Constants.OBJECT_ID_LENGTH; nextIndex++; } var temp = new TreeEntry[nextIndex]; rawPtr = 0; nextIndex = 0; while (rawPtr < rawSize) { int c = raw[rawPtr++]; if (c < '0' || c > '7') { throw new CorruptObjectException(Id, "invalid entry mode"); } int mode = c - '0'; while (true) { c = raw[rawPtr++]; if (' ' == c) { break; } if (c < '0' || c > '7') { throw new CorruptObjectException(Id, "invalid mode"); } mode <<= 3; mode += c - '0'; } int nameLen = 0; while (raw[rawPtr + nameLen] != 0) { nameLen++; } var name = new byte[nameLen]; Array.Copy(raw, rawPtr, name, 0, nameLen); rawPtr += nameLen + 1; ObjectId id = ObjectId.FromRaw(raw, rawPtr); rawPtr += Constants.OBJECT_ID_LENGTH; TreeEntry ent; if (FileMode.RegularFile.Equals(mode)) { ent = new FileTreeEntry(this, id, name, false); } else if (FileMode.ExecutableFile.Equals(mode)) { ent = new FileTreeEntry(this, id, name, true); } else if (FileMode.Tree.Equals(mode)) { ent = new Tree(this, id, name); } else if (FileMode.Symlink.Equals(mode)) { ent = new SymlinkTreeEntry(this, id, name); } else if (FileMode.GitLink.Equals(mode)) { ent = new GitLinkTreeEntry(this, id, name); } else { throw new CorruptObjectException(Id, "Invalid mode: " + Convert.ToString(mode, 8)); } temp[nextIndex++] = ent; } _contents = temp; }
private PackedObjectLoader Reader(WindowCursor curs, long objOffset) { long pos = objOffset; int p = 0; byte[] ib = curs.TempId; // Reader.ReadBytes(ObjectId.ObjectIdLength); ReadFully(pos, ib, 0, 20, curs); int c = ib[p++] & 0xff; int typeCode = (c >> 4) & 7; long dataSize = c & 15; int shift = 4; while ((c & 0x80) != 0) { c = ib[p++] & 0xff; dataSize += (c & 0x7f) << shift; shift += 7; } pos += p; switch (typeCode) { case Constants.OBJ_COMMIT: case Constants.OBJ_TREE: case Constants.OBJ_BLOB: case Constants.OBJ_TAG: return(new WholePackedObjectLoader(this, pos, objOffset, typeCode, (int)dataSize)); case Constants.OBJ_OFS_DELTA: ReadFully(pos, ib, 0, 20, curs); p = 0; c = ib[p++] & 0xff; long ofs = c & 127; while ((c & 128) != 0) { ofs += 1; c = ib[p++] & 0xff; ofs <<= 7; ofs += (c & 127); } return(new DeltaOfsPackedObjectLoader(this, pos + p, objOffset, (int)dataSize, objOffset - ofs)); case Constants.OBJ_REF_DELTA: ReadFully(pos, ib, 0, 20, curs); return(new DeltaRefPackedObjectLoader(this, pos + ib.Length, objOffset, (int)dataSize, ObjectId.FromRaw(ib))); default: throw new IOException("Unknown object type " + typeCode + "."); } }
internal ObjectId WriteObject(ObjectType type, long len, Stream input, bool store) { FileInfo info; DeflaterOutputStream stream; FileStream stream2; ObjectId objectId = null; if (store) { info = _r.ObjectsDirectory.CreateTempFile("noz"); stream2 = info.OpenWrite(); } else { info = null; stream2 = null; } _md.Reset(); if (store) { _def.Reset(); stream = new DeflaterOutputStream(stream2, _def); } else { stream = null; } try { int num; byte[] bytes = Codec.EncodedTypeString(type); _md.Update(bytes); if (stream != null) { stream.Write(bytes, 0, bytes.Length); } _md.Update(0x20); if (stream != null) { stream.WriteByte(0x20); } bytes = Constants.encodeASCII(len.ToString()); _md.Update(bytes); if (stream != null) { stream.Write(bytes, 0, bytes.Length); } _md.Update(0); if (stream != null) { stream.WriteByte(0); } while ((len > 0L) && ((num = input.Read(_buf, 0, (int)Math.Min(len, _buf.Length))) > 0)) { _md.Update(_buf, 0, num); if (stream != null) { stream.Write(_buf, 0, num); } len -= num; } if (len != 0L) { throw new IOException("Input did not match supplied Length. " + len + " bytes are missing."); } if (stream != null) { stream.Close(); if (info != null) { info.IsReadOnly = true; } } objectId = ObjectId.FromRaw(_md.Digest()); } finally { if ((objectId == null) && (stream != null)) { try { stream.Close(); } finally { info.DeleteFile(); } } } if (info != null) { if (_r.HasObject(objectId)) { // Object is already in the repository so remove // the temporary file. // info.DeleteFile(); } else { FileInfo info2 = _r.ToFile(objectId); if (!info.RenameTo(info2.FullName)) { // Maybe the directory doesn't exist yet as the object // directories are always lazily created. Note that we // try the rename first as the directory likely does exist. // if (info2.Directory != null) { info2.Directory.Create(); } if (!info.RenameTo(info2.FullName) && !_r.HasObject(objectId)) { // The object failed to be renamed into its proper // location and it doesn't exist in the repository // either. We really don't know what went wrong, so // fail. // info.DeleteFile(); throw new ObjectWritingException("Unable to create new object: " + info2); } } } } return(objectId); }