public virtual void TestStandardFormat_SmallObject() { int type = Constants.OBJ_BLOB; byte[] data = GetRng().NextBytes(300); byte[] gz = CompressStandardFormat(type, data); ObjectId id = ObjectId.ZeroId; ObjectLoader ol = UnpackedObject.Open(new ByteArrayInputStream(gz), Path(id), id, wc); NUnit.Framework.Assert.IsNotNull(ol, "created loader"); NUnit.Framework.Assert.AreEqual(type, ol.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, ol.GetSize()); NUnit.Framework.Assert.IsFalse(ol.IsLarge(), "is not large"); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data, ol.GetCachedBytes()), "same content" ); ObjectStream @in = ol.OpenStream(); NUnit.Framework.Assert.IsNotNull(@in, "have stream"); NUnit.Framework.Assert.AreEqual(type, @in.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, @in.GetSize()); byte[] data2 = new byte[data.Length]; IOUtil.ReadFully(@in, data2, 0, data.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data2, data), "same content"); NUnit.Framework.Assert.AreEqual(-1, @in.Read(), "stream at EOF"); @in.Close(); }
public virtual void TestWhole_SmallObject() { int type = Constants.OBJ_BLOB; byte[] data = GetRng().NextBytes(300); RevBlob id = tr.Blob(data); tr.Branch("master").Commit().Add("A", id).Create(); tr.PackAndPrune(); NUnit.Framework.Assert.IsTrue(wc.Has(id), "has blob"); ObjectLoader ol = wc.Open(id); NUnit.Framework.Assert.IsNotNull(ol, "created loader"); NUnit.Framework.Assert.AreEqual(type, ol.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, ol.GetSize()); NUnit.Framework.Assert.IsFalse(ol.IsLarge(), "is not large"); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data, ol.GetCachedBytes()), "same content" ); ObjectStream @in = ol.OpenStream(); NUnit.Framework.Assert.IsNotNull(@in, "have stream"); NUnit.Framework.Assert.AreEqual(type, @in.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, @in.GetSize()); byte[] data2 = new byte[data.Length]; IOUtil.ReadFully(@in, data2, 0, data.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data2, data), "same content"); NUnit.Framework.Assert.AreEqual(-1, @in.Read(), "stream at EOF"); @in.Close(); }
/// <exception cref="NGit.Errors.MissingObjectException"></exception> /// <exception cref="System.IO.IOException"></exception> public override ObjectStream OpenStream() { // If the object was recently unpacked, its available loose. // The loose format is going to be faster to access than a // delta applied on top of a base. Use that whenever we can. // ObjectId myId = GetObjectId(); WindowCursor wc = new WindowCursor(db); ObjectLoader ldr = db.OpenObject2(wc, myId.Name, myId); if (ldr != null) { return(ldr.OpenStream()); } InputStream @in = Open(wc); @in = new BufferedInputStream(@in, 8192); // While we inflate the object, also deflate it back as a loose // object. This will later be cleaned up by a gc pass, but until // then we will reuse the loose form by the above code path. // int myType = GetType(); long mySize = GetSize(); ObjectDirectoryInserter odi = ((ObjectDirectoryInserter)db.NewInserter()); FilePath tmp = odi.NewTempFile(); DeflaterOutputStream dOut = odi.Compress(new FileOutputStream(tmp)); odi.WriteHeader(dOut, myType, mySize); @in = new TeeInputStream(@in, dOut); return(new _Filter_195(this, odi, wc, tmp, myId, myType, mySize, @in)); }
public static ObjectStream stream(this ObjectLoader objectLoader) { if (objectLoader.notNull()) { return(objectLoader.OpenStream()); } return(null); }
public virtual void TestDelta_SmallObjectChain() { ObjectInserter.Formatter fmt = new ObjectInserter.Formatter(); byte[] data0 = new byte[512]; Arrays.Fill(data0, unchecked ((byte)unchecked ((int)(0xf3)))); ObjectId id0 = fmt.IdFor(Constants.OBJ_BLOB, data0); TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(64 * 1024); PackHeader(pack, 4); ObjectHeader(pack, Constants.OBJ_BLOB, data0.Length); Deflate(pack, data0); byte[] data1 = Clone(unchecked ((int)(0x01)), data0); byte[] delta1 = Delta(data0, data1); ObjectId id1 = fmt.IdFor(Constants.OBJ_BLOB, data1); ObjectHeader(pack, Constants.OBJ_REF_DELTA, delta1.Length); id0.CopyRawTo(pack); Deflate(pack, delta1); byte[] data2 = Clone(unchecked ((int)(0x02)), data1); byte[] delta2 = Delta(data1, data2); ObjectId id2 = fmt.IdFor(Constants.OBJ_BLOB, data2); ObjectHeader(pack, Constants.OBJ_REF_DELTA, delta2.Length); id1.CopyRawTo(pack); Deflate(pack, delta2); byte[] data3 = Clone(unchecked ((int)(0x03)), data2); byte[] delta3 = Delta(data2, data3); ObjectId id3 = fmt.IdFor(Constants.OBJ_BLOB, data3); ObjectHeader(pack, Constants.OBJ_REF_DELTA, delta3.Length); id2.CopyRawTo(pack); Deflate(pack, delta3); Digest(pack); PackParser ip = Index(pack.ToByteArray()); ip.SetAllowThin(true); ip.Parse(NullProgressMonitor.INSTANCE); NUnit.Framework.Assert.IsTrue(wc.Has(id3), "has blob"); ObjectLoader ol = wc.Open(id3); NUnit.Framework.Assert.IsNotNull(ol, "created loader"); NUnit.Framework.Assert.AreEqual(Constants.OBJ_BLOB, ol.GetType()); NUnit.Framework.Assert.AreEqual(data3.Length, ol.GetSize()); NUnit.Framework.Assert.IsFalse(ol.IsLarge(), "is large"); NUnit.Framework.Assert.IsNotNull(ol.GetCachedBytes()); CollectionAssert.AreEquivalent(data3, ol.GetCachedBytes()); ObjectStream @in = ol.OpenStream(); NUnit.Framework.Assert.IsNotNull(@in, "have stream"); NUnit.Framework.Assert.AreEqual(Constants.OBJ_BLOB, @in.GetType()); NUnit.Framework.Assert.AreEqual(data3.Length, @in.GetSize()); byte[] act = new byte[data3.Length]; IOUtil.ReadFully(@in, act, 0, data3.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(act, data3), "same content"); NUnit.Framework.Assert.AreEqual(-1, @in.Read(), "stream at EOF"); @in.Close(); }
public static string ReadFile(ObjectLoader loader) { string strModifiedFile = null; using (System.IO.Stream strm = loader.OpenStream()) { using (System.IO.StreamReader sr = new System.IO.StreamReader(strm)) { strModifiedFile = sr.ReadToEnd(); } } return strModifiedFile; } // End Function ReadFile
/// <exception cref="NGit.Errors.MissingObjectException"></exception> /// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Diff.SimilarityIndex.TableFullException"></exception> internal virtual void Hash(ObjectLoader obj) { if (obj.IsLarge()) { ObjectStream @in = obj.OpenStream(); try { SetFileSize(@in.GetSize()); Hash(@in, fileSize); } finally { @in.Close(); } } else { byte[] raw = obj.GetCachedBytes(); SetFileSize(raw.Length); Hash(raw, 0, raw.Length); } }
/// <exception cref="System.IO.IOException"></exception> public virtual Note Merge(Note @base, Note ours, Note theirs, ObjectReader reader , ObjectInserter inserter) { if (ours == null) { return(theirs); } if (theirs == null) { return(ours); } if (ours.GetData().Equals(theirs.GetData())) { return(ours); } ObjectLoader lo = reader.Open(ours.GetData()); ObjectLoader lt = reader.Open(theirs.GetData()); UnionInputStream union = new UnionInputStream(lo.OpenStream(), lt.OpenStream()); ObjectId noteData = inserter.Insert(Constants.OBJ_BLOB, lo.GetSize() + lt.GetSize (), union); return(new Note(ours, noteData)); }
public virtual void TestWhole_LargeObject() { int type = Constants.OBJ_BLOB; byte[] data = GetRng().NextBytes(streamThreshold + 5); RevBlob id = tr.Blob(data); tr.Branch("master").Commit().Add("A", id).Create(); tr.PackAndPrune(); NUnit.Framework.Assert.IsTrue(wc.Has(id), "has blob"); ObjectLoader ol = wc.Open(id); NUnit.Framework.Assert.IsNotNull(ol, "created loader"); NUnit.Framework.Assert.AreEqual(type, ol.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, ol.GetSize()); NUnit.Framework.Assert.IsTrue(ol.IsLarge(), "is large"); try { ol.GetCachedBytes(); NUnit.Framework.Assert.Fail("Should have thrown LargeObjectException"); } catch (LargeObjectException tooBig) { NUnit.Framework.Assert.AreEqual(MessageFormat.Format(JGitText.Get().largeObjectException , id.Name), tooBig.Message); } ObjectStream @in = ol.OpenStream(); NUnit.Framework.Assert.IsNotNull(@in, "have stream"); NUnit.Framework.Assert.AreEqual(type, @in.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, @in.GetSize()); byte[] data2 = new byte[data.Length]; IOUtil.ReadFully(@in, data2, 0, data.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data2, data), "same content"); NUnit.Framework.Assert.AreEqual(-1, @in.Read(), "stream at EOF"); @in.Close(); }
public string ReadCommit(string commitId, string fileName) { var repo = m_git.GetRepository(); var id = ObjectId.FromString(commitId); RevCommit commit = null; try { commit = ParseCommit(repo, id); if (commit == null) { return(null); } } catch (Exception ex) { Trace.WriteLine(ex.Message); return(null); } //var commits = m_git.Log().AddRange(id, id).Call(); //var commit = commits.SingleOrDefault(); //if (commit == null) // return null; TreeWalk walk = new TreeWalk(repo); //RevWalk r = new RevWalk(m_git.GetRepository()); //var tree = r.ParseTree(commit.Tree.Id); //r.LookupTree( //walk.AddTree(new FileTreeIterator(repo)); //var tree = ParseTree(repo, commit.Tree.Id); walk.AddTree(commit.Tree); var filter = GetGitFriendlyName(fileName); walk.Filter = PathFilterGroup.CreateFromStrings(new string[] { filter }); //walk.EnterSubtree(); while (walk.Next()) { var path = walk.PathString; if (walk.IsSubtree) { walk.EnterSubtree(); continue; } if (path == filter) { var cur = walk.GetObjectId(0); ObjectLoader ol = repo.Open(cur); // //Console.WriteLine(string.Format("Path: {0}{1}", walk.PathString, walk.IsSubtree ? "/" : "")); // //var loader = reader.Open(commit.Tree.Id); var text = ""; using (var stream = ol.OpenStream()) using (var sr = new System.IO.StreamReader(stream)) { text = sr.ReadToEnd(); } return(text); } } //walk.Reset(); //reader.Open(); return(""); }