/// <summary> /// Construct an object writer for the specified repository. /// </summary> /// <param name="repo"> </param> public ObjectWriter(Repository repo) { _r = repo; _buf = new byte[0x2000]; _md = new MessageDigest(); _def = new Deflater(_r.Config.getCore().getCompression()); }
public WalkFetchConnection(IWalkTransport t, WalkRemoteObjectDatabase w) { _idBuffer = new MutableObjectId(); _objectDigest = Constants.newMessageDigest(); var wt = (Transport)t; _local = wt.Local; _objCheck = wt.CheckFetchedObjects ? new ObjectChecker() : null; _remotes = new List<WalkRemoteObjectDatabase> { w }; _unfetchedPacks = new LinkedList<RemotePack>(); _packsConsidered = new List<string>(); _noPacksYet = new LinkedList<WalkRemoteObjectDatabase>(); _noPacksYet.AddFirst(w); _noAlternatesYet = new LinkedList<WalkRemoteObjectDatabase>(); _noAlternatesYet.AddFirst(w); _fetchErrors = new Dictionary<ObjectId, List<Exception>>(); _packLocks = new List<PackLock>(4); _revWalk = new RevWalk.RevWalk(_local); _treeWalk = new TreeWalk.TreeWalk(_local); COMPLETE = _revWalk.newFlag("COMPLETE"); IN_WORK_QUEUE = _revWalk.newFlag("IN_WORK_QUEUE"); LOCALLY_SEEN = _revWalk.newFlag("LOCALLY_SEEN"); _localCommitQueue = new DateRevQueue(); _workQueue = new LinkedList<ObjectId>(); }
public PackOutputStream(Stream stream) { _crc = new Crc32(); _md = Constants.newMessageDigest(); _stream = stream; }
/// <summary> /// Construct an object writer for the specified repository. /// </summary> /// <param name="repo"> </param> public ObjectWriter(Repository repo) { _r = repo; _buf = new byte[0x2000]; _md = Constants.newMessageDigest(); }
private void skipOptionalExtension(Stream inStream, MessageDigest md, byte[] hdr, long sz) { byte[] b = new byte[4096]; while (0 < sz) { int n = inStream.Read(b, 0, (int)Math.Min(b.Length, sz)); if (n < 0) { throw new EndOfStreamException("Short read of optional DIRC extension " + formatExtensionName(hdr) + "; expected another " + sz + " bytes within the section."); } md.Update(b, 0, n); sz -= n; } }
public IndexPack(Repository db, Stream src, FileInfo dstBase) { _repo = db; _stream = src; _crc = new Crc32(); _inflater = InflaterCache.Instance.get(); _windowCursor = new WindowCursor(); _buffer = new byte[BUFFER_SIZE]; _objectData = new byte[BUFFER_SIZE]; _objectDigest = Constants.newMessageDigest(); _tempObjectId = new MutableObjectId(); _packDigest = Constants.newMessageDigest(); if (dstBase != null) { DirectoryInfo dir = dstBase.Directory; string nam = dstBase.Name; _dstPack = new FileInfo(Path.Combine(dir.FullName, GetPackFileName(nam))); _dstIdx = new FileInfo(Path.Combine(dir.FullName, GetIndexFileName(nam))); _packOut = _dstPack.Create(); } else { _dstPack = null; _dstIdx = null; } }
public void index(ProgressMonitor progress) { progress.Start(2 /* tasks */); try { try { ReadPackHeader(); _entries = new PackedObjectInfo[(int)_objectCount]; _baseById = new ObjectIdSubclassMap<DeltaChain>(); _baseByPos = new LongMap<UnresolvedDelta>(); progress.BeginTask(PROGRESS_DOWNLOAD, (int)_objectCount); for (int done = 0; done < _objectCount; done++) { IndexOneObject(); progress.Update(1); if (progress.IsCancelled) { throw new IOException("Download cancelled"); } } ReadPackFooter(); EndInput(); progress.EndTask(); if (_deltaCount > 0) { if (_packOut == null) { throw new IOException("need packOut"); } ResolveDeltas(progress); if (_entryCount < _objectCount) { if (!_fixThin) { throw new IOException("pack has " + (_objectCount - _entryCount) + " unresolved deltas"); } FixThinPack(progress); } } if (_packOut != null && (_keepEmpty || _entryCount > 0)) { _packOut.Flush(); } _packDigest = null; _baseById = null; _baseByPos = null; if (_dstIdx != null && (_keepEmpty || _entryCount > 0)) { WriteIdx(); } } finally { try { InflaterCache.Instance.release(_inflater); } finally { _inflater = null; } _windowCursor = WindowCursor.Release(_windowCursor); progress.EndTask(); if (_packOut != null) { _packOut.Close(); } } if (_keepEmpty || _entryCount > 0) { if (_dstPack != null) { _dstPack.IsReadOnly = true; } if (_dstIdx != null) { _dstIdx.IsReadOnly = true; } } } catch (IOException) { if (_dstPack != null) _dstPack.Delete(); if (_dstIdx != null) _dstIdx.Delete(); throw; } }
public DigestOutputStream(Stream stream, MessageDigest digest) : base() { m_digest = digest; m_stream = stream; }
public DirCacheEntry(byte[] sharedInfo, int infoAt, Stream @in, MessageDigest md) { _info = sharedInfo; _infoOffset = infoAt; IO.ReadFully(@in, _info, _infoOffset, INFO_LEN); md.Update(_info, _infoOffset, INFO_LEN); int pathLen = NB.decodeUInt16(_info, _infoOffset + PFlags) & NameMask; int skipped = 0; if (pathLen < NameMask) { _path = new byte[pathLen]; IO.ReadFully(@in, _path, 0, pathLen); md.Update(_path, 0, pathLen); } else { var tmp = new MemoryStream(); { var buf = new byte[NameMask]; IO.ReadFully(@in, buf, 0, NameMask); tmp.Write(buf, 0, buf.Length); } while (true) { int c = @in.ReadByte(); if (c < 0) { throw new EndOfStreamException("Short Read of block."); } if (c == 0) break; tmp.Write(new[] { (byte)c }, 0, 1); } _path = tmp.ToArray(); pathLen = _path.Length; skipped = 1; // we already skipped 1 '\0' above to break the loop. md.Update(_path, 0, pathLen); md.Update(0); } // Index records are padded out to the next 8 byte alignment // for historical reasons related to how C Git Read the files. // int actLen = INFO_LEN + pathLen; int expLen = (actLen + 8) & ~7; int padLen = expLen - actLen - skipped; if (padLen > 0) { IO.skipFully(@in, padLen); md.Update(NullPad, 0, padLen); } }
public DigestOutputStream(Stream stream, MessageDigest digest) : base() { m_digest = digest; m_stream = stream; }
private void InitializeDigest() { if (_contentDigest != null) return; if (Parent == null) { _contentReadBuffer = new byte[BufferSize]; _contentDigest = Constants.newMessageDigest(); } else { var p = (WorkingTreeIterator)Parent; p.InitializeDigest(); _contentReadBuffer = p._contentReadBuffer; _contentDigest = p._contentDigest; } }