public virtual void TestPackFormat_SmallObject() { int type = Constants.OBJ_BLOB; byte[] data = GetRng().NextBytes(300); byte[] gz = CompressPackFormat(type, data); ObjectId id = ObjectId.ZeroId; ObjectLoader ol = UnpackedObject.Open(new ByteArrayInputStream(gz), Path(id), id, wc); NUnit.Framework.Assert.IsNotNull(ol, "created loader"); NUnit.Framework.Assert.AreEqual(type, ol.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, ol.GetSize()); NUnit.Framework.Assert.IsFalse(ol.IsLarge(), "is not large"); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data, ol.GetCachedBytes()), "same content" ); ObjectStream @in = ol.OpenStream(); NUnit.Framework.Assert.IsNotNull(@in, "have stream"); NUnit.Framework.Assert.AreEqual(type, @in.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, @in.GetSize()); byte[] data2 = new byte[data.Length]; IOUtil.ReadFully(@in, data2, 0, data.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data, ol.GetCachedBytes()), "same content" ); @in.Close(); }
public virtual void TestDelta_SmallObjectChain() { ObjectInserter.Formatter fmt = new ObjectInserter.Formatter(); byte[] data0 = new byte[512]; Arrays.Fill(data0, unchecked ((byte)unchecked ((int)(0xf3)))); ObjectId id0 = fmt.IdFor(Constants.OBJ_BLOB, data0); TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(64 * 1024); PackHeader(pack, 4); ObjectHeader(pack, Constants.OBJ_BLOB, data0.Length); Deflate(pack, data0); byte[] data1 = Clone(unchecked ((int)(0x01)), data0); byte[] delta1 = Delta(data0, data1); ObjectId id1 = fmt.IdFor(Constants.OBJ_BLOB, data1); ObjectHeader(pack, Constants.OBJ_REF_DELTA, delta1.Length); id0.CopyRawTo(pack); Deflate(pack, delta1); byte[] data2 = Clone(unchecked ((int)(0x02)), data1); byte[] delta2 = Delta(data1, data2); ObjectId id2 = fmt.IdFor(Constants.OBJ_BLOB, data2); ObjectHeader(pack, Constants.OBJ_REF_DELTA, delta2.Length); id1.CopyRawTo(pack); Deflate(pack, delta2); byte[] data3 = Clone(unchecked ((int)(0x03)), data2); byte[] delta3 = Delta(data2, data3); ObjectId id3 = fmt.IdFor(Constants.OBJ_BLOB, data3); ObjectHeader(pack, Constants.OBJ_REF_DELTA, delta3.Length); id2.CopyRawTo(pack); Deflate(pack, delta3); Digest(pack); PackParser ip = Index(pack.ToByteArray()); ip.SetAllowThin(true); ip.Parse(NullProgressMonitor.INSTANCE); NUnit.Framework.Assert.IsTrue(wc.Has(id3), "has blob"); ObjectLoader ol = wc.Open(id3); NUnit.Framework.Assert.IsNotNull(ol, "created loader"); NUnit.Framework.Assert.AreEqual(Constants.OBJ_BLOB, ol.GetType()); NUnit.Framework.Assert.AreEqual(data3.Length, ol.GetSize()); NUnit.Framework.Assert.IsFalse(ol.IsLarge(), "is large"); NUnit.Framework.Assert.IsNotNull(ol.GetCachedBytes()); CollectionAssert.AreEquivalent(data3, ol.GetCachedBytes()); ObjectStream @in = ol.OpenStream(); NUnit.Framework.Assert.IsNotNull(@in, "have stream"); NUnit.Framework.Assert.AreEqual(Constants.OBJ_BLOB, @in.GetType()); NUnit.Framework.Assert.AreEqual(data3.Length, @in.GetSize()); byte[] act = new byte[data3.Length]; IOUtil.ReadFully(@in, act, 0, data3.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(act, data3), "same content"); NUnit.Framework.Assert.AreEqual(-1, @in.Read(), "stream at EOF"); @in.Close(); }
/// <summary>Push a candidate object onto the generator's traversal stack.</summary> /// <remarks> /// Push a candidate object onto the generator's traversal stack. /// <p> /// Candidates should be pushed in history order from oldest-to-newest. /// Applications should push the starting commit first, then the index /// revision (if the index is interesting), and finally the working tree copy /// (if the working tree is interesting). /// </remarks> /// <param name="description">description of the blob revision, such as "Working Tree". /// </param> /// <param name="id">may be a commit or a blob.</param> /// <returns> /// /// <code>this</code> /// </returns> /// <exception cref="System.IO.IOException">the repository cannot be read.</exception> public virtual NGit.Blame.BlameGenerator Push(string description, AnyObjectId id) { ObjectLoader ldr = reader.Open(id); if (ldr.GetType() == Constants.OBJ_BLOB) { if (description == null) { description = JGitText.Get().blameNotCommittedYet; } Candidate.BlobCandidate c = new Candidate.BlobCandidate(description, resultPath); c.sourceBlob = id.ToObjectId(); c.sourceText = new RawText(ldr.GetCachedBytes(int.MaxValue)); c.regionList = new Region(0, 0, c.sourceText.Size()); remaining = c.sourceText.Size(); Push(c); return(this); } RevCommit commit = revPool.ParseCommit(id); if (!Find(commit, resultPath)) { return(this); } Candidate c_1 = new Candidate(commit, resultPath); c_1.sourceBlob = idBuf.ToObjectId(); c_1.LoadText(reader); c_1.regionList = new Region(0, 0, c_1.sourceText.Size()); remaining = c_1.sourceText.Size(); Push(c_1); return(this); }
public virtual void TestWhole_SmallObject() { int type = Constants.OBJ_BLOB; byte[] data = GetRng().NextBytes(300); RevBlob id = tr.Blob(data); tr.Branch("master").Commit().Add("A", id).Create(); tr.PackAndPrune(); NUnit.Framework.Assert.IsTrue(wc.Has(id), "has blob"); ObjectLoader ol = wc.Open(id); NUnit.Framework.Assert.IsNotNull(ol, "created loader"); NUnit.Framework.Assert.AreEqual(type, ol.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, ol.GetSize()); NUnit.Framework.Assert.IsFalse(ol.IsLarge(), "is not large"); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data, ol.GetCachedBytes()), "same content" ); ObjectStream @in = ol.OpenStream(); NUnit.Framework.Assert.IsNotNull(@in, "have stream"); NUnit.Framework.Assert.AreEqual(type, @in.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, @in.GetSize()); byte[] data2 = new byte[data.Length]; IOUtil.ReadFully(@in, data2, 0, data.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data2, data), "same content"); NUnit.Framework.Assert.AreEqual(-1, @in.Read(), "stream at EOF"); @in.Close(); }
/// <exception cref="NGit.Errors.MissingObjectException"></exception> /// <exception cref="NGit.Errors.IncorrectObjectTypeException"></exception> /// <exception cref="System.IO.IOException"></exception> private byte[] ReadTree(AnyObjectId id) { BaseSearch.TreeWithData tree = treeCache.Get(id); if (tree != null) { return(tree.buf); } ObjectLoader ldr = reader.Open(id, Constants.OBJ_TREE); byte[] buf = ldr.GetCachedBytes(int.MaxValue); treeCache.Add(new BaseSearch.TreeWithData(id, buf)); return(buf); }
/// <exception cref="System.IO.IOException"></exception> private void Copy(TemporaryBuffer.Heap tinyPack, ObjectLoader ldr) { byte[] buf = new byte[64]; byte[] content = ldr.GetCachedBytes(); int dataLength = content.Length; int nextLength = (int)(((uint)dataLength) >> 4); int size = 0; buf[size++] = unchecked ((byte)((nextLength > 0 ? unchecked ((int)(0x80)) : unchecked ( (int)(0x00))) | (ldr.GetType() << 4) | (dataLength & unchecked ((int)(0x0F))))); dataLength = nextLength; while (dataLength > 0) { nextLength = (int)(((uint)nextLength) >> 7); buf[size++] = unchecked ((byte)((nextLength > 0 ? unchecked ((int)(0x80)) : unchecked ( (int)(0x00))) | (dataLength & unchecked ((int)(0x7F))))); dataLength = nextLength; } tinyPack.Write(buf, 0, size); Deflate(tinyPack, content); }
/// <exception cref="NGit.Errors.MissingObjectException"></exception> /// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Diff.SimilarityIndex.TableFullException"></exception> internal virtual void Hash(ObjectLoader obj) { if (obj.IsLarge()) { ObjectStream @in = obj.OpenStream(); try { SetFileSize(@in.GetSize()); Hash(@in, fileSize); } finally { @in.Close(); } } else { byte[] raw = obj.GetCachedBytes(); SetFileSize(raw.Length); Hash(raw, 0, raw.Length); } }
public virtual void TestObjectMovedToNewPack2() { // Create an object and pack it. Then remove that pack and put the // object into a different pack file, with some other object. We // still should be able to access the objects. // Repository eden = CreateBareRepository(); RevObject o1 = WriteBlob(eden, "o1"); FilePath[] out1 = Pack(eden, o1); NUnit.Framework.Assert.AreEqual(o1.Name, Parse(o1).Name); ObjectLoader load1 = db.Open(o1, Constants.OBJ_BLOB); NUnit.Framework.Assert.IsNotNull(load1); RevObject o2 = WriteBlob(eden, "o2"); Pack(eden, o2, o1); // Force close, and then delete, the old pack. // WhackCache(); Delete(out1); // Now here is the interesting thing... can the loader we made // earlier still resolve the object, even though its underlying // pack is gone, but the object still exists. // ObjectLoader load2 = db.Open(o1, Constants.OBJ_BLOB); NUnit.Framework.Assert.IsNotNull(load2); NUnit.Framework.Assert.AreNotSame(load1, load2); byte[] data2 = load2.GetCachedBytes(); byte[] data1 = load1.GetCachedBytes(); NUnit.Framework.Assert.IsNotNull(data2); NUnit.Framework.Assert.IsNotNull(data1); NUnit.Framework.Assert.AreNotSame(data1, data2); // cache should be per-pack, not per object NUnit.Framework.Assert.IsTrue(Arrays.Equals(data1, data2)); NUnit.Framework.Assert.AreEqual(load2.GetType(), load1.GetType()); }
public virtual void TestWhole_LargeObject() { int type = Constants.OBJ_BLOB; byte[] data = GetRng().NextBytes(streamThreshold + 5); RevBlob id = tr.Blob(data); tr.Branch("master").Commit().Add("A", id).Create(); tr.PackAndPrune(); NUnit.Framework.Assert.IsTrue(wc.Has(id), "has blob"); ObjectLoader ol = wc.Open(id); NUnit.Framework.Assert.IsNotNull(ol, "created loader"); NUnit.Framework.Assert.AreEqual(type, ol.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, ol.GetSize()); NUnit.Framework.Assert.IsTrue(ol.IsLarge(), "is large"); try { ol.GetCachedBytes(); NUnit.Framework.Assert.Fail("Should have thrown LargeObjectException"); } catch (LargeObjectException tooBig) { NUnit.Framework.Assert.AreEqual(MessageFormat.Format(JGitText.Get().largeObjectException , id.Name), tooBig.Message); } ObjectStream @in = ol.OpenStream(); NUnit.Framework.Assert.IsNotNull(@in, "have stream"); NUnit.Framework.Assert.AreEqual(type, @in.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, @in.GetSize()); byte[] data2 = new byte[data.Length]; IOUtil.ReadFully(@in, data2, 0, data.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data2, data), "same content"); NUnit.Framework.Assert.AreEqual(-1, @in.Read(), "stream at EOF"); @in.Close(); }
/// <exception cref="System.IO.IOException"></exception> internal virtual void LoadText(ObjectReader reader) { ObjectLoader ldr = reader.Open(sourceBlob, Constants.OBJ_BLOB); sourceText = new RawText(ldr.GetCachedBytes(int.MaxValue)); }