public virtual void TestStandardFormat_SmallObject() { int type = Constants.OBJ_BLOB; byte[] data = GetRng().NextBytes(300); byte[] gz = CompressStandardFormat(type, data); ObjectId id = ObjectId.ZeroId; ObjectLoader ol = UnpackedObject.Open(new ByteArrayInputStream(gz), Path(id), id, wc); NUnit.Framework.Assert.IsNotNull(ol, "created loader"); NUnit.Framework.Assert.AreEqual(type, ol.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, ol.GetSize()); NUnit.Framework.Assert.IsFalse(ol.IsLarge(), "is not large"); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data, ol.GetCachedBytes()), "same content" ); ObjectStream @in = ol.OpenStream(); NUnit.Framework.Assert.IsNotNull(@in, "have stream"); NUnit.Framework.Assert.AreEqual(type, @in.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, @in.GetSize()); byte[] data2 = new byte[data.Length]; IOUtil.ReadFully(@in, data2, 0, data.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data2, data), "same content"); NUnit.Framework.Assert.AreEqual(-1, @in.Read(), "stream at EOF"); @in.Close(); }
public virtual void TestWhole_SmallObject() { int type = Constants.OBJ_BLOB; byte[] data = GetRng().NextBytes(300); RevBlob id = tr.Blob(data); tr.Branch("master").Commit().Add("A", id).Create(); tr.PackAndPrune(); NUnit.Framework.Assert.IsTrue(wc.Has(id), "has blob"); ObjectLoader ol = wc.Open(id); NUnit.Framework.Assert.IsNotNull(ol, "created loader"); NUnit.Framework.Assert.AreEqual(type, ol.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, ol.GetSize()); NUnit.Framework.Assert.IsFalse(ol.IsLarge(), "is not large"); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data, ol.GetCachedBytes()), "same content" ); ObjectStream @in = ol.OpenStream(); NUnit.Framework.Assert.IsNotNull(@in, "have stream"); NUnit.Framework.Assert.AreEqual(type, @in.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, @in.GetSize()); byte[] data2 = new byte[data.Length]; IOUtil.ReadFully(@in, data2, 0, data.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data2, data), "same content"); NUnit.Framework.Assert.AreEqual(-1, @in.Read(), "stream at EOF"); @in.Close(); }
public virtual void TestDelta_SmallObjectChain() { ObjectInserter.Formatter fmt = new ObjectInserter.Formatter(); byte[] data0 = new byte[512]; Arrays.Fill(data0, unchecked ((byte)unchecked ((int)(0xf3)))); ObjectId id0 = fmt.IdFor(Constants.OBJ_BLOB, data0); TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(64 * 1024); PackHeader(pack, 4); ObjectHeader(pack, Constants.OBJ_BLOB, data0.Length); Deflate(pack, data0); byte[] data1 = Clone(unchecked ((int)(0x01)), data0); byte[] delta1 = Delta(data0, data1); ObjectId id1 = fmt.IdFor(Constants.OBJ_BLOB, data1); ObjectHeader(pack, Constants.OBJ_REF_DELTA, delta1.Length); id0.CopyRawTo(pack); Deflate(pack, delta1); byte[] data2 = Clone(unchecked ((int)(0x02)), data1); byte[] delta2 = Delta(data1, data2); ObjectId id2 = fmt.IdFor(Constants.OBJ_BLOB, data2); ObjectHeader(pack, Constants.OBJ_REF_DELTA, delta2.Length); id1.CopyRawTo(pack); Deflate(pack, delta2); byte[] data3 = Clone(unchecked ((int)(0x03)), data2); byte[] delta3 = Delta(data2, data3); ObjectId id3 = fmt.IdFor(Constants.OBJ_BLOB, data3); ObjectHeader(pack, Constants.OBJ_REF_DELTA, delta3.Length); id2.CopyRawTo(pack); Deflate(pack, delta3); Digest(pack); PackParser ip = Index(pack.ToByteArray()); ip.SetAllowThin(true); ip.Parse(NullProgressMonitor.INSTANCE); NUnit.Framework.Assert.IsTrue(wc.Has(id3), "has blob"); ObjectLoader ol = wc.Open(id3); NUnit.Framework.Assert.IsNotNull(ol, "created loader"); NUnit.Framework.Assert.AreEqual(Constants.OBJ_BLOB, ol.GetType()); NUnit.Framework.Assert.AreEqual(data3.Length, ol.GetSize()); NUnit.Framework.Assert.IsFalse(ol.IsLarge(), "is large"); NUnit.Framework.Assert.IsNotNull(ol.GetCachedBytes()); CollectionAssert.AreEquivalent(data3, ol.GetCachedBytes()); ObjectStream @in = ol.OpenStream(); NUnit.Framework.Assert.IsNotNull(@in, "have stream"); NUnit.Framework.Assert.AreEqual(Constants.OBJ_BLOB, @in.GetType()); NUnit.Framework.Assert.AreEqual(data3.Length, @in.GetSize()); byte[] act = new byte[data3.Length]; IOUtil.ReadFully(@in, act, 0, data3.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(act, data3), "same content"); NUnit.Framework.Assert.AreEqual(-1, @in.Read(), "stream at EOF"); @in.Close(); }
/// <summary> /// Updates the file in the working tree with content and mode from an entry /// in the index. /// </summary> /// <remarks> /// Updates the file in the working tree with content and mode from an entry /// in the index. The new content is first written to a new temporary file in /// the same directory as the real file. Then that new file is renamed to the /// final filename. /// TODO: this method works directly on File IO, we may need another /// abstraction (like WorkingTreeIterator). This way we could tell e.g. /// Eclipse that Files in the workspace got changed /// </remarks> /// <param name="repo"></param> /// <param name="f"> /// the file to be modified. The parent directory for this file /// has to exist already /// </param> /// <param name="entry">the entry containing new mode and content</param> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public static void CheckoutEntry(Repository repo, FilePath f, DirCacheEntry entry ) { ObjectLoader ol = repo.Open(entry.GetObjectId()); FilePath parentDir = f.GetParentFile(); FilePath tmpFile = FilePath.CreateTempFile("._" + f.GetName(), null, parentDir); FileOutputStream channel = new FileOutputStream(tmpFile); try { ol.CopyTo(channel); } finally { channel.Close(); } FS fs = repo.FileSystem; WorkingTreeOptions opt = repo.GetConfig().Get(WorkingTreeOptions.KEY); if (opt.IsFileMode() && fs.SupportsExecute()) { if (FileMode.EXECUTABLE_FILE.Equals(entry.RawMode)) { if (!fs.CanExecute(tmpFile)) { fs.SetExecute(tmpFile, true); } } else { if (fs.CanExecute(tmpFile)) { fs.SetExecute(tmpFile, false); } } } if (!tmpFile.RenameTo(f)) { // tried to rename which failed. Let' delete the target file and try // again FileUtils.Delete(f); if (!tmpFile.RenameTo(f)) { throw new IOException(MessageFormat.Format(JGitText.Get().couldNotWriteFile, tmpFile .GetPath(), f.GetPath())); } } entry.LastModified = f.LastModified(); entry.SetLength((int)ol.GetSize()); }
/// <exception cref="System.IO.IOException"></exception> public virtual Note Merge(Note @base, Note ours, Note theirs, ObjectReader reader , ObjectInserter inserter) { if (ours == null) { return(theirs); } if (theirs == null) { return(ours); } if (ours.GetData().Equals(theirs.GetData())) { return(ours); } ObjectLoader lo = reader.Open(ours.GetData()); ObjectLoader lt = reader.Open(theirs.GetData()); UnionInputStream union = new UnionInputStream(lo.OpenStream(), lt.OpenStream()); ObjectId noteData = inserter.Insert(Constants.OBJ_BLOB, lo.GetSize() + lt.GetSize (), union); return(new Note(ours, noteData)); }
public virtual void TestWhole_LargeObject() { int type = Constants.OBJ_BLOB; byte[] data = GetRng().NextBytes(streamThreshold + 5); RevBlob id = tr.Blob(data); tr.Branch("master").Commit().Add("A", id).Create(); tr.PackAndPrune(); NUnit.Framework.Assert.IsTrue(wc.Has(id), "has blob"); ObjectLoader ol = wc.Open(id); NUnit.Framework.Assert.IsNotNull(ol, "created loader"); NUnit.Framework.Assert.AreEqual(type, ol.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, ol.GetSize()); NUnit.Framework.Assert.IsTrue(ol.IsLarge(), "is large"); try { ol.GetCachedBytes(); NUnit.Framework.Assert.Fail("Should have thrown LargeObjectException"); } catch (LargeObjectException tooBig) { NUnit.Framework.Assert.AreEqual(MessageFormat.Format(JGitText.Get().largeObjectException , id.Name), tooBig.Message); } ObjectStream @in = ol.OpenStream(); NUnit.Framework.Assert.IsNotNull(@in, "have stream"); NUnit.Framework.Assert.AreEqual(type, @in.GetType()); NUnit.Framework.Assert.AreEqual(data.Length, @in.GetSize()); byte[] data2 = new byte[data.Length]; IOUtil.ReadFully(@in, data2, 0, data.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(data2, data), "same content"); NUnit.Framework.Assert.AreEqual(-1, @in.Read(), "stream at EOF"); @in.Close(); }