/// <summary> /// Construct an object writer for the specified repository. /// </summary> /// <param name="repo"> </param> public ObjectWriter(Repository repo) { _r = repo; _buf = new byte[0x2000]; _md = new MessageDigest(); _def = new Deflater(_r.Config.getCore().getCompression()); }
public WalkFetchConnection(IWalkTransport t, WalkRemoteObjectDatabase w) { _idBuffer = new MutableObjectId(); _objectDigest = Constants.newMessageDigest(); var wt = (Transport)t; _local = wt.Local; _objCheck = wt.CheckFetchedObjects ? new ObjectChecker() : null; _remotes = new List<WalkRemoteObjectDatabase> { w }; _unfetchedPacks = new LinkedList<RemotePack>(); _packsConsidered = new List<string>(); _noPacksYet = new LinkedList<WalkRemoteObjectDatabase>(); _noPacksYet.AddFirst(w); _noAlternatesYet = new LinkedList<WalkRemoteObjectDatabase>(); _noAlternatesYet.AddFirst(w); _fetchErrors = new Dictionary<ObjectId, List<Exception>>(); _packLocks = new List<PackLock>(4); _revWalk = new RevWalk.RevWalk(_local); _treeWalk = new TreeWalk.TreeWalk(_local); COMPLETE = _revWalk.newFlag("COMPLETE"); IN_WORK_QUEUE = _revWalk.newFlag("IN_WORK_QUEUE"); LOCALLY_SEEN = _revWalk.newFlag("LOCALLY_SEEN"); _localCommitQueue = new DateRevQueue(); _workQueue = new LinkedList<ObjectId>(); }
public ObjectWriter(Repository repo) { this.r = repo; buf = new byte[8192]; md = new MessageDigest(); // [henon] Sha1 hash digest generator def = new Deflater(r.Config.Core.Compression); }
public DigestOutputStream(Stream stream, MessageDigest digest) : base() { m_digest = digest; m_stream = stream; }
private void InitializeDigest() { if (_contentDigest != null) return; if (Parent == null) { _contentReadBuffer = new byte[BufferSize]; _contentDigest = Constants.newMessageDigest(); } else { var p = (WorkingTreeIterator)Parent; p.InitializeDigest(); _contentReadBuffer = p._contentReadBuffer; _contentDigest = p._contentDigest; } }
public PackOutputStream(Stream stream) { _crc = new Crc32(); _md = Constants.newMessageDigest(); _stream = stream; }
public DirCacheEntry(byte[] sharedInfo, int infoAt, Stream @in, MessageDigest md) { info = sharedInfo; infoOffset = infoAt; NB.ReadFully(@in, info, infoOffset, INFO_LEN); md.Update(info, infoOffset, INFO_LEN); int pathLen = NB.decodeUInt16(info, infoOffset + P_FLAGS) & NAME_MASK; int skipped = 0; if (pathLen < NAME_MASK) { path = new byte[pathLen]; NB.ReadFully(@in, path, 0, pathLen); md.Update(path, 0, pathLen); } else { var tmp = new BinaryWriter(new MemoryStream()); { byte[] buf = new byte[NAME_MASK]; NB.ReadFully(@in, buf, 0, NAME_MASK); tmp.Write(buf); } for (; ; ) { int c = @in.ReadByte(); if (c < 0) throw new EndOfStreamException("Short read of block."); if (c == 0) break; tmp.Write(c); } path = (tmp.BaseStream as MemoryStream).ToArray(); pathLen = path.Length; skipped = 1; // we already skipped 1 '\0' above to break the loop. md.Update(path, 0, pathLen); md.Update((byte)0); } // Index records are padded out to the next 8 byte alignment // for historical reasons related to how C Git read the files. // int actLen = INFO_LEN + pathLen; int expLen = (actLen + 8) & ~7; int padLen = expLen - actLen - skipped; if (padLen > 0) { NB.skipFully(@in, padLen); md.Update(nullpad, 0, padLen); } }
public IndexPack(Repository db, Stream src, FileInfo dstBase) { _repo = db; _stream = src; _crc = new Crc32(); _inflater = InflaterCache.Instance.get(); _windowCursor = new WindowCursor(); _buffer = new byte[BUFFER_SIZE]; _objectData = new byte[BUFFER_SIZE]; _objectDigest = Constants.newMessageDigest(); _tempObjectId = new MutableObjectId(); _packDigest = Constants.newMessageDigest(); if (dstBase != null) { DirectoryInfo dir = dstBase.Directory; string nam = dstBase.Name; _dstPack = new FileInfo(Path.Combine(dir.ToString(), GetPackFileName(nam))); _dstIdx = new FileInfo(Path.Combine(dir.ToString(), GetIndexFileName(nam))); _packOut = _dstPack.Create(); } else { _dstPack = null; _dstIdx = null; } }
public void index(ProgressMonitor progress) { progress.Start(2 /* tasks */); try { try { ReadPackHeader(); _entries = new PackedObjectInfo[(int)_objectCount]; _baseById = new ObjectIdSubclassMap<DeltaChain>(); _baseByPos = new Dictionary<long, UnresolvedDelta>(); progress.BeginTask(PROGRESS_DOWNLOAD, (int)_objectCount); for (int done = 0; done < _objectCount; done++) { IndexOneObject(); progress.Update(1); if (progress.IsCancelled) { throw new IOException("Download cancelled"); } } ReadPackFooter(); EndInput(); progress.EndTask(); if (_deltaCount > 0) { if (_packOut == null) { throw new IOException("need packOut"); } ResolveDeltas(progress); if (_entryCount < _objectCount) { if (!_fixThin) { throw new IOException("pack has " + (_objectCount - _entryCount) + " unresolved deltas"); } FixThinPack(progress); } } if (_packOut != null && (_keepEmpty || _entryCount > 0)) { _packOut.Flush(); } _packDigest = null; _baseById = null; _baseByPos = null; if (_dstIdx != null && (_keepEmpty || _entryCount > 0)) { WriteIdx(); } } finally { try { InflaterCache.Instance.release(_inflater); } finally { _inflater = null; } _windowCursor = WindowCursor.Release(_windowCursor); progress.EndTask(); if (_packOut != null) { _packOut.Close(); } } if (_keepEmpty || _entryCount > 0) { if (_dstPack != null) { _dstPack.IsReadOnly = true; } if (_dstIdx != null) { _dstIdx.IsReadOnly = true; } } } catch (IOException) { if (_dstPack != null) _dstPack.Delete(); if (_dstIdx != null) _dstIdx.Delete(); throw; } }
private void initializeDigest() { if (contentDigest != null) return; if (parent == null) { contentReadBuffer = new byte[BUFFER_SIZE]; contentDigest = Constants.newMessageDigest(); } else { WorkingTreeIterator p = (WorkingTreeIterator)parent; p.initializeDigest(); contentReadBuffer = p.contentReadBuffer; contentDigest = p.contentDigest; } }
public DirCacheEntry(byte[] sharedInfo, int infoAt, Stream @in, MessageDigest md) { _info = sharedInfo; _infoOffset = infoAt; NB.ReadFully(@in, _info, _infoOffset, INFO_LEN); md.Update(_info, _infoOffset, INFO_LEN); int pathLen = NB.decodeUInt16(_info, _infoOffset + PFlags) & NameMask; int skipped = 0; if (pathLen < NameMask) { _path = new byte[pathLen]; NB.ReadFully(@in, _path, 0, pathLen); md.Update(_path, 0, pathLen); } else { var tmp = new MemoryStream(); { var buf = new byte[NameMask]; NB.ReadFully(@in, buf, 0, NameMask); tmp.Write(buf, 0, buf.Length); } while (true) { int c = @in.ReadByte(); if (c < 0) { throw new EndOfStreamException("Short Read of block."); } if (c == 0) break; tmp.Write(new[] { (byte)c }, 0, 1); } _path = tmp.ToArray(); pathLen = _path.Length; skipped = 1; // we already skipped 1 '\0' above to break the loop. md.Update(_path, 0, pathLen); md.Update(0); } // Index records are padded out to the next 8 byte alignment // for historical reasons related to how C Git Read the files. // int actLen = INFO_LEN + pathLen; int expLen = (actLen + 8) & ~7; int padLen = expLen - actLen - skipped; if (padLen > 0) { NB.skipFully(@in, padLen); md.Update(NullPad, 0, padLen); } }