public virtual void TestContructor() { writer = new PackWriter(config, db.NewObjectReader()); NUnit.Framework.Assert.AreEqual(false, writer.IsDeltaBaseAsOffset()); NUnit.Framework.Assert.AreEqual(true, config.IsReuseDeltas()); NUnit.Framework.Assert.AreEqual(true, config.IsReuseObjects()); NUnit.Framework.Assert.AreEqual(0, writer.GetObjectCount()); }
/// <exception cref="NGit.Errors.MissingObjectException"></exception> /// <exception cref="System.IO.IOException"></exception> private void CreateVerifyOpenPack(IList <RevObject> objectSource) { NullProgressMonitor m = NullProgressMonitor.INSTANCE; writer = new PackWriter(config, db.NewObjectReader()); writer.PreparePack(objectSource.Iterator()); NUnit.Framework.Assert.AreEqual(objectSource.Count, writer.GetObjectCount()); writer.WritePack(m, m, os); writer.Release(); VerifyOpenPack(false); }
/// <exception cref="NGit.Errors.TransportException"></exception> private void Sendpack(IList <RemoteRefUpdate> updates, ProgressMonitor monitor) { string pathPack = null; string pathIdx = null; PackWriter writer = new PackWriter(transport.GetPackConfig(), local.NewObjectReader ()); try { ICollection <ObjectId> need = new HashSet <ObjectId>(); ICollection <ObjectId> have = new HashSet <ObjectId>(); foreach (RemoteRefUpdate r in updates) { need.AddItem(r.GetNewObjectId()); } foreach (Ref r_1 in GetRefs()) { have.AddItem(r_1.GetObjectId()); if (r_1.GetPeeledObjectId() != null) { have.AddItem(r_1.GetPeeledObjectId()); } } writer.PreparePack(monitor, need, have); // We don't have to continue further if the pack will // be an empty pack, as the remote has all objects it // needs to complete this change. // if (writer.GetObjectCount() == 0) { return; } packNames = new LinkedHashMap <string, string>(); foreach (string n in dest.GetPackNames()) { packNames.Put(n, n); } string @base = "pack-" + writer.ComputeName().Name; string packName = @base + ".pack"; pathPack = "pack/" + packName; pathIdx = "pack/" + @base + ".idx"; if (Sharpen.Collections.Remove(packNames, packName) != null) { // The remote already contains this pack. We should // remove the index before overwriting to prevent bad // offsets from appearing to clients. // dest.WriteInfoPacks(packNames.Keys); dest.DeleteFile(pathIdx); } // Write the pack file, then the index, as readers look the // other direction (index, then pack file). // string wt = "Put " + Sharpen.Runtime.Substring(@base, 0, 12); OutputStream os = dest.WriteFile(pathPack, monitor, wt + "..pack"); try { os = new SafeBufferedOutputStream(os); writer.WritePack(monitor, monitor, os); } finally { os.Close(); } os = dest.WriteFile(pathIdx, monitor, wt + "..idx"); try { os = new SafeBufferedOutputStream(os); writer.WriteIndex(os); } finally { os.Close(); } // Record the pack at the start of the pack info list. This // way clients are likely to consult the newest pack first, // and discover the most recent objects there. // AList <string> infoPacks = new AList <string>(); infoPacks.AddItem(packName); Sharpen.Collections.AddAll(infoPacks, packNames.Keys); dest.WriteInfoPacks(infoPacks); } catch (IOException err) { SafeDelete(pathIdx); SafeDelete(pathPack); throw new TransportException(uri, JGitText.Get().cannotStoreObjects, err); } finally { writer.Release(); } }
/// <exception cref="System.IO.IOException"></exception> private PackFile WritePack <_T0, _T1>(ICollection <_T0> want, ICollection <_T1> have , ICollection <ObjectId> tagTargets, IList <PackIndex> excludeObjects) where _T0 : ObjectId where _T1 : ObjectId { FilePath tmpPack = null; FilePath tmpIdx = null; PackWriter pw = new PackWriter(repo); try { // prepare the PackWriter pw.SetDeltaBaseAsOffset(true); pw.SetReuseDeltaCommits(false); if (tagTargets != null) { pw.SetTagTargets(tagTargets); } if (excludeObjects != null) { foreach (PackIndex idx in excludeObjects) { pw.ExcludeObjects(idx); } } pw.PreparePack(pm, want, have); if (pw.GetObjectCount() == 0) { return(null); } // create temporary files string id = pw.ComputeName().GetName(); FilePath packdir = new FilePath(repo.ObjectsDirectory, "pack"); tmpPack = FilePath.CreateTempFile("gc_", ".pack_tmp", packdir); tmpIdx = new FilePath(packdir, Sharpen.Runtime.Substring(tmpPack.GetName(), 0, tmpPack .GetName().LastIndexOf('.')) + ".idx_tmp"); if (!tmpIdx.CreateNewFile()) { throw new IOException(MessageFormat.Format(JGitText.Get().cannotCreateIndexfile, tmpIdx.GetPath())); } // write the packfile FileChannel channel = new FileOutputStream(tmpPack).GetChannel(); OutputStream channelStream = Channels.NewOutputStream(channel); try { pw.WritePack(pm, pm, channelStream); } finally { channel.Force(true); channelStream.Close(); channel.Close(); } // write the packindex FileChannel idxChannel = new FileOutputStream(tmpIdx).GetChannel(); OutputStream idxStream = Channels.NewOutputStream(idxChannel); try { pw.WriteIndex(idxStream); } finally { idxChannel.Force(true); idxStream.Close(); idxChannel.Close(); } // rename the temporary files to real files FilePath realPack = NameFor(id, ".pack"); tmpPack.SetReadOnly(); FilePath realIdx = NameFor(id, ".idx"); realIdx.SetReadOnly(); bool delete = true; try { if (!tmpPack.RenameTo(realPack)) { return(null); } delete = false; if (!tmpIdx.RenameTo(realIdx)) { FilePath newIdx = new FilePath(realIdx.GetParentFile(), realIdx.GetName() + ".new" ); if (!tmpIdx.RenameTo(newIdx)) { newIdx = tmpIdx; } throw new IOException(MessageFormat.Format(JGitText.Get().panicCantRenameIndexFile , newIdx, realIdx)); } } finally { if (delete && tmpPack.Exists()) { tmpPack.Delete(); } if (delete && tmpIdx.Exists()) { tmpIdx.Delete(); } } return(((ObjectDirectory)repo.ObjectDatabase).OpenPack(realPack, realIdx)); } finally { pw.Release(); if (tmpPack != null && tmpPack.Exists()) { tmpPack.Delete(); } if (tmpIdx != null && tmpIdx.Exists()) { tmpIdx.Delete(); } } }
/// <exception cref="NGit.Errors.TransportException"></exception> private void Sendpack(IList<RemoteRefUpdate> updates, ProgressMonitor monitor) { string pathPack = null; string pathIdx = null; PackWriter writer = new PackWriter(transport.GetPackConfig(), local.NewObjectReader ()); try { IList<ObjectId> need = new AList<ObjectId>(); IList<ObjectId> have = new AList<ObjectId>(); foreach (RemoteRefUpdate r in updates) { need.AddItem(r.GetNewObjectId()); } foreach (Ref r_1 in GetRefs()) { have.AddItem(r_1.GetObjectId()); if (r_1.GetPeeledObjectId() != null) { have.AddItem(r_1.GetPeeledObjectId()); } } writer.PreparePack(monitor, need, have); // We don't have to continue further if the pack will // be an empty pack, as the remote has all objects it // needs to complete this change. // if (writer.GetObjectCount() == 0) { return; } packNames = new LinkedHashMap<string, string>(); foreach (string n in dest.GetPackNames()) { packNames.Put(n, n); } string @base = "pack-" + writer.ComputeName().Name; string packName = @base + ".pack"; pathPack = "pack/" + packName; pathIdx = "pack/" + @base + ".idx"; if (Sharpen.Collections.Remove(packNames, packName) != null) { // The remote already contains this pack. We should // remove the index before overwriting to prevent bad // offsets from appearing to clients. // dest.WriteInfoPacks(packNames.Keys); dest.DeleteFile(pathIdx); } // Write the pack file, then the index, as readers look the // other direction (index, then pack file). // string wt = "Put " + Sharpen.Runtime.Substring(@base, 0, 12); OutputStream os = dest.WriteFile(pathPack, monitor, wt + "..pack"); try { os = new BufferedOutputStream(os); writer.WritePack(monitor, monitor, os); } finally { os.Close(); } os = dest.WriteFile(pathIdx, monitor, wt + "..idx"); try { os = new BufferedOutputStream(os); writer.WriteIndex(os); } finally { os.Close(); } // Record the pack at the start of the pack info list. This // way clients are likely to consult the newest pack first, // and discover the most recent objects there. // AList<string> infoPacks = new AList<string>(); infoPacks.AddItem(packName); Sharpen.Collections.AddAll(infoPacks, packNames.Keys); dest.WriteInfoPacks(infoPacks); } catch (IOException err) { SafeDelete(pathIdx); SafeDelete(pathPack); throw new TransportException(uri, JGitText.Get().cannotStoreObjects, err); } finally { writer.Release(); } }