/// <exception cref="System.Exception"></exception> public virtual void Init() { try { md = MessageDigest.GetInstance("MD5"); } catch (Exception e) { System.Console.Error.WriteLine(e); } }
/// <exception cref="System.IO.IOException"></exception> internal override void Write(PackOutputStream @out, long pos, int cnt, MessageDigest digest) { ByteBuffer s = buffer.Slice(); s.Position((int)(pos - start)); while (0 < cnt) { byte[] buf = @out.GetCopyBuffer(); int n = Math.Min(cnt, buf.Length); s.Get(buf, 0, n); @out.Write(buf, 0, n); if (digest != null) { digest.Update(buf, 0, n); } cnt -= n; } }
public DigestInputStream(InputStream stream, MessageDigest digest) : base(stream) { Digest = digest; }
/// <summary>Create a new inserter for a database.</summary> /// <remarks>Create a new inserter for a database.</remarks> public ObjectInserter() { digest = Constants.NewMessageDigest(); }
public DigestOutputStream(OutputStream os, MessageDigest md) { this.os = os; this.digest = md; }
/// <exception cref="System.IO.IOException"></exception> internal override void Write(PackOutputStream @out, long pos, int cnt, MessageDigest digest) { int ptr = (int)(pos - start); @out.Write(array, ptr, cnt); if (digest != null) { digest.Update(array, ptr, cnt); } }
/// <exception cref="System.IO.IOException"></exception> /// <exception cref="System.IO.FileNotFoundException"></exception> /// <exception cref="Sharpen.Error"></exception> private FilePath ToTemp(MessageDigest md, int type, long len, InputStream @is) { bool delete = true; FilePath tmp = NewTempFile(); try { FileOutputStream fOut = new FileOutputStream(tmp); try { OutputStream @out = fOut; if (config.GetFSyncObjectFiles()) { @out = Channels.NewOutputStream(fOut.GetChannel()); } DeflaterOutputStream cOut = Compress(@out); DigestOutputStream dOut = new DigestOutputStream(cOut, md); WriteHeader(dOut, type, len); byte[] buf = Buffer(); while (len > 0) { int n = @is.Read(buf, 0, (int)Math.Min(len, buf.Length)); if (n <= 0) { throw ShortInput(len); } dOut.Write(buf, 0, n); len -= n; } dOut.Flush(); cOut.Finish(); } finally { if (config.GetFSyncObjectFiles()) { fOut.GetChannel().Force(true); } fOut.Close(); } delete = false; return tmp; } finally { if (delete) { FileUtils.Delete(tmp); } } }
/// <summary>Parse the pack stream.</summary> /// <remarks>Parse the pack stream.</remarks> /// <param name="receiving"> /// receives progress feedback during the initial receiving /// objects phase. If null, /// <see cref="NGit.NullProgressMonitor">NGit.NullProgressMonitor</see> /// will be /// used. /// </param> /// <param name="resolving">receives progress feedback during the resolving objects phase. /// </param> /// <returns> /// the pack lock, if one was requested by setting /// <see cref="SetLockMessage(string)">SetLockMessage(string)</see> /// . /// </returns> /// <exception cref="System.IO.IOException">the stream is malformed, or contains corrupt objects. /// </exception> public virtual PackLock Parse(ProgressMonitor receiving, ProgressMonitor resolving ) { if (receiving == null) { receiving = NullProgressMonitor.INSTANCE; } if (resolving == null) { resolving = NullProgressMonitor.INSTANCE; } if (receiving == resolving) { receiving.Start(2); } try { ReadPackHeader(); entries = new PackedObjectInfo[(int)objectCount]; baseById = new ObjectIdOwnerMap<PackParser.DeltaChain>(); baseByPos = new LongMap<PackParser.UnresolvedDelta>(); deferredCheckBlobs = new BlockList<PackedObjectInfo>(); receiving.BeginTask(JGitText.Get().receivingObjects, (int)objectCount); try { for (int done = 0; done < objectCount; done++) { IndexOneObject(); receiving.Update(1); if (receiving.IsCancelled()) { throw new IOException(JGitText.Get().downloadCancelled); } } ReadPackFooter(); EndInput(); } finally { receiving.EndTask(); } if (!deferredCheckBlobs.IsEmpty()) { DoDeferredCheckBlobs(); } if (deltaCount > 0) { if (resolving is BatchingProgressMonitor) { ((BatchingProgressMonitor)resolving).SetDelayStart(1000, TimeUnit.MILLISECONDS); } resolving.BeginTask(JGitText.Get().resolvingDeltas, deltaCount); ResolveDeltas(resolving); if (entryCount < objectCount) { if (!IsAllowThin()) { throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas , Sharpen.Extensions.ValueOf(objectCount - entryCount))); } ResolveDeltasWithExternalBases(resolving); if (entryCount < objectCount) { throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas , Sharpen.Extensions.ValueOf(objectCount - entryCount))); } } resolving.EndTask(); } packDigest = null; baseById = null; baseByPos = null; } finally { try { if (readCurs != null) { readCurs.Release(); } } finally { readCurs = null; } try { inflater.Release(); } finally { inflater = null; } } return null; }
public _LocalFile_455(AmazonS3 _enclosing, string bucket, string key, MessageDigest md5, ProgressMonitor monitor, string monitorTask) { this._enclosing = _enclosing; this.bucket = bucket; this.key = key; this.md5 = md5; this.monitor = monitor; this.monitorTask = monitorTask; }
protected internal virtual void SetH(MessageDigest md) { this.md = md; bsize = md.GetDigestLength(); }
internal ObjectDirectoryPackParser(FileObjectDatabase odb, InputStream src) : base (odb, src) { this.db = odb; this.crc = new CRC32(); this.tailDigest = Constants.NewMessageDigest(); indexVersion = db.GetConfig().Get(CoreConfig.KEY).GetPackIndexVersion(); }
/// <summary>Consume data from the input stream until the packfile is indexed.</summary> /// <remarks>Consume data from the input stream until the packfile is indexed.</remarks> /// <param name="progress">progress feedback</param> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public virtual void Index(ProgressMonitor progress) { progress.Start(2); try { try { ReadPackHeader(); entries = new PackedObjectInfo[(int)objectCount]; baseById = new ObjectIdSubclassMap<IndexPack.DeltaChain>(); baseByPos = new LongMap<IndexPack.UnresolvedDelta>(); deferredCheckBlobs = new AList<PackedObjectInfo>(); progress.BeginTask(JGitText.Get().receivingObjects, (int)objectCount); for (int done = 0; done < objectCount; done++) { IndexOneObject(); progress.Update(1); if (progress.IsCancelled()) { throw new IOException(JGitText.Get().downloadCancelled); } } ReadPackFooter(); EndInput(); if (!deferredCheckBlobs.IsEmpty()) { DoDeferredCheckBlobs(); } progress.EndTask(); if (deltaCount > 0) { if (packOut == null) { throw new IOException(JGitText.Get().needPackOut); } ResolveDeltas(progress); if (entryCount < objectCount) { if (!fixThin) { throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas , (objectCount - entryCount))); } FixThinPack(progress); } } if (packOut != null && (keepEmpty || entryCount > 0)) { packOut.GetChannel().Force(true); } packDigest = null; baseById = null; baseByPos = null; if (dstIdx != null && (keepEmpty || entryCount > 0)) { WriteIdx(); } } finally { try { if (readCurs != null) { readCurs.Release(); } } finally { readCurs = null; } try { inflater.Release(); } finally { inflater = null; objectDatabase.Close(); } progress.EndTask(); if (packOut != null) { packOut.Close(); } } if (keepEmpty || entryCount > 0) { if (dstPack != null) { dstPack.SetReadOnly(); } if (dstIdx != null) { dstIdx.SetReadOnly(); } } } catch (IOException err) { if (dstPack != null) { FileUtils.Delete(dstPack); } if (dstIdx != null) { FileUtils.Delete(dstIdx); } throw; } }
/// <summary>Create a new pack indexer utility.</summary> /// <remarks>Create a new pack indexer utility.</remarks> /// <param name="db"></param> /// <param name="src"> /// stream to read the pack data from. If the stream is buffered /// use /// <see cref="BUFFER_SIZE">BUFFER_SIZE</see> /// as the buffer size for the stream. /// </param> /// <param name="dstBase"></param> /// <exception cref="System.IO.IOException">the output packfile could not be created. /// </exception> public IndexPack(Repository db, InputStream src, FilePath dstBase) { repo = db; objectDatabase = db.ObjectDatabase.NewCachedDatabase(); @in = src; inflater = new IndexPack.InflaterStream(this); readCurs = objectDatabase.NewReader(); buf = new byte[BUFFER_SIZE]; readBuffer = new byte[BUFFER_SIZE]; objectDigest = Constants.NewMessageDigest(); tempObjectId = new MutableObjectId(); packDigest = Constants.NewMessageDigest(); if (dstBase != null) { FilePath dir = dstBase.GetParentFile(); string nam = dstBase.GetName(); dstPack = new FilePath(dir, nam + ".pack"); dstIdx = new FilePath(dir, nam + ".idx"); packOut = new RandomAccessFile(dstPack, "rw"); packOut.SetLength(0); } else { dstPack = null; dstIdx = null; } }
/// <summary>Initialize a formatter to compute a patch id.</summary> /// <remarks>Initialize a formatter to compute a patch id.</remarks> public PatchIdDiffFormatter() : base(new DigestOutputStream(NullOutputStream.INSTANCE , Constants.NewMessageDigest())) { digest = ((DigestOutputStream)GetOutputStream()).GetMessageDigest(); }
/// <summary>Initialize a pack parser.</summary> /// <remarks>Initialize a pack parser.</remarks> /// <param name="odb">database the parser will write its objects into.</param> /// <param name="src">the stream the parser will read.</param> protected internal PackParser(ObjectDatabase odb, InputStream src) { objectDatabase = odb.NewCachedDatabase(); @in = src; inflater = new PackParser.InflaterStream(this); readCurs = objectDatabase.NewReader(); buf = new byte[BUFFER_SIZE]; tempBuffer = new byte[BUFFER_SIZE]; hdrBuf = new byte[64]; objectDigest = Constants.NewMessageDigest(); tempObjectId = new MutableObjectId(); packDigest = Constants.NewMessageDigest(); }
/// <exception cref="System.IO.IOException"></exception> internal abstract void Write(PackOutputStream @out, long pos, int cnt, MessageDigest md);
public DigestInputStream (InputStream stream, MessageDigest digest) : base (stream) { Digest = digest; }
/// <exception cref="System.IO.IOException"></exception> internal DirCacheEntry(byte[] sharedInfo, MutableInteger infoAt, InputStream @in, MessageDigest md) { // private static final int P_CTIME_NSEC = 4; // private static final int P_MTIME_NSEC = 12; // private static final int P_DEV = 16; // private static final int P_INO = 20; // private static final int P_UID = 28; // private static final int P_GID = 32; info = sharedInfo; infoOffset = infoAt.value; IOUtil.ReadFully(@in, info, infoOffset, INFO_LEN); int len; if (IsExtended) { len = INFO_LEN_EXTENDED; IOUtil.ReadFully(@in, info, infoOffset + INFO_LEN, INFO_LEN_EXTENDED - INFO_LEN); if ((GetExtendedFlags() & ~EXTENDED_FLAGS) != 0) { throw new IOException(MessageFormat.Format(JGitText.Get().DIRCUnrecognizedExtendedFlags , GetExtendedFlags().ToString())); } } else { len = INFO_LEN; } infoAt.value += len; md.Update(info, infoOffset, len); int pathLen = NB.DecodeUInt16(info, infoOffset + P_FLAGS) & NAME_MASK; int skipped = 0; if (pathLen < NAME_MASK) { path = new byte[pathLen]; IOUtil.ReadFully(@in, path, 0, pathLen); md.Update(path, 0, pathLen); } else { ByteArrayOutputStream tmp = new ByteArrayOutputStream(); { byte[] buf = new byte[NAME_MASK]; IOUtil.ReadFully(@in, buf, 0, NAME_MASK); tmp.Write(buf); } for (; ; ) { int c = @in.Read(); if (c < 0) { throw new EOFException(JGitText.Get().shortReadOfBlock); } if (c == 0) { break; } tmp.Write(c); } path = tmp.ToByteArray(); pathLen = path.Length; skipped = 1; // we already skipped 1 '\0' above to break the loop. md.Update(path, 0, pathLen); md.Update(unchecked((byte)0)); } // Index records are padded out to the next 8 byte alignment // for historical reasons related to how C Git read the files. // int actLen = len + pathLen; int expLen = (actLen + 8) & ~7; int padLen = expLen - actLen - skipped; if (padLen > 0) { IOUtil.SkipFully(@in, padLen); md.Update(nullpad, 0, padLen); } }