Example #1
0
			public ProgressReportingFilter(ProgressMonitor monitor, int total)
			{
				this.monitor = monitor;
				this.total = total;
				stepSize = total / 100;
				if (stepSize == 0)
				{
					stepSize = 1000;
				}
			}
Example #2
0
        public MergeCommandResult Pop(NGit.ProgressMonitor monitor)
        {
            List <Stash>       stashes = ReadStashes();
            Stash              last    = stashes.Last();
            MergeCommandResult res     = last.Apply(monitor);

            if (res.GetMergeStatus() != MergeStatus.FAILED && res.GetMergeStatus() != MergeStatus.NOT_SUPPORTED)
            {
                Remove(stashes, last);
            }
            return(res);
        }
Example #3
0
			public ProgressReportingFilter(IndexDiff _enclosing, ProgressMonitor monitor, int
				 total)
			{
				this._enclosing = _enclosing;
				this.monitor = monitor;
				this.total = total;
				this.stepSize = total / 100;
				if (this.stepSize == 0)
				{
					this.stepSize = 1000;
				}
			}
Example #4
0
		internal BaseSearch(ProgressMonitor countingMonitor, ICollection<RevTree> bases, 
			ObjectIdOwnerMap<ObjectToPack> objects, IList<ObjectToPack> edges, ObjectReader 
			or)
		{
			progress = countingMonitor;
			reader = or;
			baseTrees = Sharpen.Collections.ToArray(bases, new ObjectId[bases.Count]);
			objectsMap = objects;
			edgeObjects = edges;
			alreadyProcessed = new IntSet();
			treeCache = new ObjectIdOwnerMap<BaseSearch.TreeWithData>();
			parser = new CanonicalTreeParser();
			idBuf = new MutableObjectId();
		}
Example #5
0
        internal MergeCommandResult Apply(NGit.ProgressMonitor monitor, Stash stash)
        {
            monitor.Start(1);
            monitor.BeginTask("Applying stash", 100);
            ObjectId  cid     = _repo.Resolve(stash.CommitId);
            RevWalk   rw      = new RevWalk(_repo);
            RevCommit wip     = rw.ParseCommit(cid);
            RevCommit oldHead = wip.Parents.First();

            rw.ParseHeaders(oldHead);
            MergeCommandResult res = GitUtil.MergeTrees(monitor, _repo, oldHead, wip, "Stash", false);

            monitor.EndTask();
            return(res);
        }
Example #6
0
 /// <summary>The progress monitor associated with the diff operation.</summary>
 /// <remarks>
 /// The progress monitor associated with the diff operation. By default, this
 /// is set to <code>NullProgressMonitor</code>
 /// </remarks>
 /// <seealso cref="NGit.NullProgressMonitor">NGit.NullProgressMonitor</seealso>
 /// <param name="monitor">a progress monitor</param>
 /// <returns>this instance</returns>
 public MyersDiff SetProgressMonitor(ProgressMonitor monitor)
 {
     this.monitor = monitor;
     return(this);
 }
Example #7
0
        /// <exception cref="System.IO.IOException"></exception>
        internal virtual void Search(ProgressMonitor monitor, ObjectToPack[] toSearch, int
			 off, int cnt)
        {
            try
            {
                for (int end = off + cnt; off < end; off++)
                {
                    res = window[resSlot];
                    if (0 < maxMemory)
                    {
                        Clear(res);
                        int tail = Next(resSlot);
                        long need = EstimateSize(toSearch[off]);
                        while (maxMemory < loaded + need && tail != resSlot)
                        {
                            Clear(window[tail]);
                            tail = Next(tail);
                        }
                    }
                    res.Set(toSearch[off]);
                    if ([email protected]())
                    {
                        // We don't actually want to make a delta for
                        // them, just need to push them into the window
                        // so they can be read by other objects.
                        //
                        KeepInWindow();
                    }
                    else
                    {
                        // Search for a delta for the current window slot.
                        //
                        monitor.Update(1);
                        Search();
                    }
                }
            }
            finally
            {
                if (deflater != null)
                {
                    deflater.Finish();
                }
            }
        }
Example #8
0
		/// <exception cref="System.IO.IOException"></exception>
		private void ResolveDeltas(PackParser.DeltaVisit visit, int type, PackParser.ObjectTypeAndSize
			 info, ProgressMonitor progress)
		{
			do
			{
				progress.Update(1);
				info = OpenDatabase(visit.delta, info);
				switch (info.type)
				{
					case Constants.OBJ_OFS_DELTA:
					case Constants.OBJ_REF_DELTA:
					{
						break;
					}

					default:
					{
						throw new IOException(MessageFormat.Format(JGitText.Get().unknownObjectType, Sharpen.Extensions.ValueOf
							(info.type)));
					}
				}
				byte[] delta = InflateAndReturn(PackParser.Source.DATABASE, info.size);
				CheckIfTooLarge(type, BinaryDelta.GetResultSize(delta));
				visit.data = BinaryDelta.Apply(visit.parent.data, delta);
				delta = null;
				if (!CheckCRC(visit.delta.crc))
				{
					throw new IOException(MessageFormat.Format(JGitText.Get().corruptionDetectedReReadingAt
						, Sharpen.Extensions.ValueOf(visit.delta.position)));
				}
				objectDigest.Update(Constants.EncodedTypeString(type));
				objectDigest.Update(unchecked((byte)' '));
				objectDigest.Update(Constants.EncodeASCII(visit.data.Length));
				objectDigest.Update(unchecked((byte)0));
				objectDigest.Update(visit.data);
				tempObjectId.FromRaw(objectDigest.Digest(), 0);
				VerifySafeObject(tempObjectId, type, visit.data);
				PackedObjectInfo oe;
				oe = NewInfo(tempObjectId, visit.delta, visit.parent.id);
				oe.SetOffset(visit.delta.position);
				OnInflatedObjectData(oe, type, visit.data);
				AddObjectAndTrack(oe);
				visit.id = oe;
				visit.nextChild = FirstChildOf(oe);
				visit = visit.Next();
			}
			while (visit != null);
		}
Example #9
0
		// By default there is no locking.
		/// <exception cref="System.IO.IOException"></exception>
		private void ResolveDeltas(ProgressMonitor progress)
		{
			int last = entryCount;
			for (int i = 0; i < last; i++)
			{
				ResolveDeltas(entries[i], progress);
				if (progress.IsCancelled())
				{
					throw new IOException(JGitText.Get().downloadCancelledDuringIndexing);
				}
			}
		}
Example #10
0
		/// <summary>Parse the pack stream.</summary>
		/// <remarks>Parse the pack stream.</remarks>
		/// <param name="progress">
		/// callback to provide progress feedback during parsing. If null,
		/// <see cref="NGit.NullProgressMonitor">NGit.NullProgressMonitor</see>
		/// will be used.
		/// </param>
		/// <returns>
		/// the pack lock, if one was requested by setting
		/// <see cref="SetLockMessage(string)">SetLockMessage(string)</see>
		/// .
		/// </returns>
		/// <exception cref="System.IO.IOException">the stream is malformed, or contains corrupt objects.
		/// 	</exception>
		public PackLock Parse(ProgressMonitor progress)
		{
			return Parse(progress, progress);
		}
Example #11
0
        /// <summary>Fetch objects and refs from the remote repository to the local one.</summary>
        /// <remarks>
        /// Fetch objects and refs from the remote repository to the local one.
        /// <p>
        /// This is a utility function providing standard fetch behavior. Local
        /// tracking refs associated with the remote repository are automatically
        /// updated if this transport was created from a
        /// <see cref="RemoteConfig">RemoteConfig</see>
        /// with
        /// fetch RefSpecs defined.
        /// </remarks>
        /// <param name="monitor">
        /// progress monitor to inform the user about our processing
        /// activity. Must not be null. Use
        /// <see cref="NGit.NullProgressMonitor">NGit.NullProgressMonitor</see>
        /// if
        /// progress updates are not interesting or necessary.
        /// </param>
        /// <param name="toFetch">
        /// specification of refs to fetch locally. May be null or the
        /// empty collection to use the specifications from the
        /// RemoteConfig. Source for each RefSpec can't be null.
        /// </param>
        /// <returns>information describing the tracking refs updated.</returns>
        /// <exception cref="System.NotSupportedException">
        /// this transport implementation does not support fetching
        /// objects.
        /// </exception>
        /// <exception cref="NGit.Errors.TransportException">
        /// the remote connection could not be established or object
        /// copying (if necessary) failed or update specification was
        /// incorrect.
        /// </exception>
        public virtual FetchResult Fetch(ProgressMonitor monitor, ICollection<RefSpec> toFetch
			)
        {
            if (toFetch == null || toFetch.IsEmpty())
            {
                // If the caller did not ask for anything use the defaults.
                //
                if (fetch.IsEmpty())
                {
                    throw new TransportException(JGitText.Get().nothingToFetch);
                }
                toFetch = fetch;
            }
            else
            {
                if (!fetch.IsEmpty())
                {
                    // If the caller asked for something specific without giving
                    // us the local tracking branch see if we can update any of
                    // the local tracking branches without incurring additional
                    // object transfer overheads.
                    //
                    ICollection<RefSpec> tmp = new AList<RefSpec>(toFetch);
                    foreach (RefSpec requested in toFetch)
                    {
                        string reqSrc = requested.GetSource();
                        foreach (RefSpec configured in fetch)
                        {
                            string cfgSrc = configured.GetSource();
                            string cfgDst = configured.GetDestination();
                            if (cfgSrc.Equals(reqSrc) && cfgDst != null)
                            {
                                tmp.AddItem(configured);
                                break;
                            }
                        }
                    }
                    toFetch = tmp;
                }
            }
            FetchResult result = new FetchResult();
            new FetchProcess(this, toFetch).Execute(monitor, result);
            return result;
        }
Example #12
0
			// nothing here
			/// <exception cref="NGit.Errors.TransportException"></exception>
			public virtual void Push(ProgressMonitor monitor, IDictionary<string, RemoteRefUpdate
				> refsToUpdate)
			{
				foreach (RemoteRefUpdate rru in refsToUpdate.Values)
				{
					NUnit.Framework.Assert.AreEqual(RemoteRefUpdate.Status.NOT_ATTEMPTED, rru.GetStatus
						());
					rru.SetStatus(this._enclosing.connectionUpdateStatus);
				}
			}
Example #13
0
		// By default there is no locking.
		/// <exception cref="System.IO.IOException"></exception>
		private void ResolveDeltas(ProgressMonitor progress)
		{
			progress.BeginTask(JGitText.Get().resolvingDeltas, deltaCount);
			int last = entryCount;
			for (int i = 0; i < last; i++)
			{
				int before = entryCount;
				ResolveDeltas(entries[i]);
				progress.Update(entryCount - before);
				if (progress.IsCancelled())
				{
					throw new IOException(JGitText.Get().downloadCancelledDuringIndexing);
				}
			}
			progress.EndTask();
		}
Example #14
0
		/// <summary>Send this buffer to an output stream.</summary>
		/// <remarks>
		/// Send this buffer to an output stream.
		/// <p>
		/// This method may only be invoked after
		/// <see cref="Close()">Close()</see>
		/// has completed
		/// normally, to ensure all data is completely transferred.
		/// </remarks>
		/// <param name="os">stream to send this buffer's complete content to.</param>
		/// <param name="pm">
		/// if not null progress updates are sent here. Caller should
		/// initialize the task and the number of work units to <code>
		/// <see cref="Length()">Length()</see>
		/// /1024</code>.
		/// </param>
		/// <exception cref="System.IO.IOException">
		/// an error occurred reading from a temporary file on the local
		/// system, or writing to the output stream.
		/// </exception>
		public virtual void WriteTo(OutputStream os, ProgressMonitor pm)
		{
			if (pm == null)
			{
				pm = NullProgressMonitor.INSTANCE;
			}
			foreach (TemporaryBuffer.Block b in blocks)
			{
				os.Write(b.buffer, 0, b.count);
				pm.Update(b.count / 1024);
			}
		}
Example #15
0
		internal SideBandInputStream(InputStream @in, ProgressMonitor progress, TextWriter
			 messageStream)
		{
			rawIn = @in;
			pckIn = new PacketLineIn(rawIn);
			monitor = progress;
			messages = messageStream;
			currentTask = string.Empty;
		}
		/// <summary>Open a remote file for writing.</summary>
		/// <remarks>
		/// Open a remote file for writing.
		/// <p>
		/// Path may start with <code>../</code> to request writing of a file that
		/// resides in the repository itself.
		/// <p>
		/// The requested path may or may not exist. If the path already exists as a
		/// file the file should be truncated and completely replaced.
		/// <p>
		/// This method creates any missing parent directories, if necessary.
		/// </remarks>
		/// <param name="path">
		/// name of the file to write, relative to the current object
		/// database.
		/// </param>
		/// <returns>
		/// stream to write into this file. Caller must close the stream to
		/// complete the write request. The stream is not buffered and each
		/// write may cause a network request/response so callers should
		/// buffer to smooth out small writes.
		/// </returns>
		/// <param name="monitor">
		/// (optional) progress monitor to post write completion to during
		/// the stream's close method.
		/// </param>
		/// <param name="monitorTask">(optional) task name to display during the close method.
		/// 	</param>
		/// <exception cref="System.IO.IOException">
		/// writing is not supported, or attempting to write the file
		/// failed, possibly due to permissions or remote disk full, etc.
		/// </exception>
		internal virtual OutputStream WriteFile(string path, ProgressMonitor monitor, string
			 monitorTask)
		{
			throw new IOException(MessageFormat.Format(JGitText.Get().writingNotSupported, path
				));
		}
Example #17
0
 public Stash Create(NGit.ProgressMonitor monitor)
 {
     return(Create(monitor, null));
 }
Example #18
0
		/// <exception cref="System.IO.IOException"></exception>
		/// <exception cref="NGit.Errors.MissingObjectException"></exception>
		public void SelectObjectRepresentation(PackWriter packer, ProgressMonitor monitor
			, Iterable<ObjectToPack> objects)
		{
			foreach (ObjectToPack otp in objects)
			{
				db.SelectObjectRepresentation(packer, otp, this);
				monitor.Update(1);
			}
		}
Example #19
0
 public MergeCommandResult Apply(NGit.ProgressMonitor monitor)
 {
     return(StashCollection.Apply(monitor, this));
 }
		/// <summary>Wrap a ProgressMonitor to be thread safe.</summary>
		/// <remarks>Wrap a ProgressMonitor to be thread safe.</remarks>
		/// <param name="pm">the underlying monitor to receive events.</param>
		public ThreadSafeProgressMonitor(ProgressMonitor pm)
		{
			this.pm = pm;
			this.Lock = new ReentrantLock();
			this.mainThread = Sharpen.Thread.CurrentThread();
			this.workers = new AtomicInteger(0);
			this.pendingUpdates = new AtomicInteger(0);
			this.process = Sharpen.Extensions.CreateSemaphore(0);
		}
Example #21
0
        public Stash Create(NGit.ProgressMonitor monitor, string message)
        {
            if (monitor != null)
            {
                monitor.Start(1);
                monitor.BeginTask("Stashing changes", 100);
            }

            UserConfig config = _repo.GetConfig().Get(UserConfig.KEY);
            RevWalk    rw     = new RevWalk(_repo);
            ObjectId   headId = _repo.Resolve(Constants.HEAD);
            var        parent = rw.ParseCommit(headId);

            PersonIdent author = new PersonIdent(config.GetAuthorName() ?? "unknown", config.GetAuthorEmail() ?? "unknown@(none).");

            if (string.IsNullOrEmpty(message))
            {
                // Use the commit summary as message
                message = parent.Abbreviate(7).ToString() + " " + parent.GetShortMessage();
                int i = message.IndexOfAny(new char[] { '\r', '\n' });
                if (i != -1)
                {
                    message = message.Substring(0, i);
                }
            }

            // Create the index tree commit
            ObjectInserter inserter = _repo.NewObjectInserter();
            DirCache       dc       = _repo.ReadDirCache();

            if (monitor != null)
            {
                monitor.Update(10);
            }

            var tree_id = dc.WriteTree(inserter);

            inserter.Release();

            if (monitor != null)
            {
                monitor.Update(10);
            }

            string   commitMsg   = "index on " + _repo.GetBranch() + ": " + message;
            ObjectId indexCommit = GitUtil.CreateCommit(_repo, commitMsg + "\n", new ObjectId[] { headId }, tree_id, author, author);

            if (monitor != null)
            {
                monitor.Update(20);
            }

            // Create the working dir commit
            tree_id   = WriteWorkingDirectoryTree(parent.Tree, dc);
            commitMsg = "WIP on " + _repo.GetBranch() + ": " + message;
            var wipCommit = GitUtil.CreateCommit(_repo, commitMsg + "\n", new ObjectId[] { headId, indexCommit }, tree_id, author, author);

            if (monitor != null)
            {
                monitor.Update(20);
            }

            string   prevCommit = null;
            FileInfo sf         = StashRefFile;

            if (sf.Exists)
            {
                prevCommit = File.ReadAllText(sf.FullName).Trim(' ', '\t', '\r', '\n');
            }

            Stash s = new Stash(prevCommit, wipCommit.Name, author, commitMsg);

            FileInfo stashLog = StashLogFile;

            File.AppendAllText(stashLog.FullName, s.FullLine + "\n");
            File.WriteAllText(sf.FullName, s.CommitId + "\n");

            if (monitor != null)
            {
                monitor.Update(5);
            }

            // Wipe all local changes
            GitUtil.HardReset(_repo, Constants.HEAD);

            monitor.EndTask();
            s.StashCollection = this;
            return(s);
        }
Example #22
0
			/// <exception cref="System.IO.IOException"></exception>
			public override void WriteTo(OutputStream os, ProgressMonitor pm)
			{
				if (onDiskFile == null)
				{
					base.WriteTo(os, pm);
					return;
				}
				if (pm == null)
				{
					pm = NullProgressMonitor.INSTANCE;
				}
				FileInputStream @in = new FileInputStream(onDiskFile);
				try
				{
					int cnt;
					byte[] buf = new byte[TemporaryBuffer.Block.SZ];
					while ((cnt = @in.Read(buf)) >= 0)
					{
						os.Write(buf, 0, cnt);
						pm.Update(cnt / 1024);
					}
				}
				finally
				{
					@in.Close();
				}
			}
			/// <exception cref="System.IO.IOException"></exception>
			internal override OutputStream WriteFile(string path, ProgressMonitor monitor, string
				 monitorTask)
			{
				return this._enclosing.s3.BeginPut(this._enclosing.bucket, this.ResolveKey(path), 
					monitor, monitorTask);
			}
Example #24
0
		/// <summary>Run the diff operation.</summary>
		/// <remarks>
		/// Run the diff operation. Until this is called, all lists will be empty.
		/// <p>
		/// The operation may be aborted by the progress monitor. In that event it
		/// will report what was found before the cancel operation was detected.
		/// Callers should ignore the result if monitor.isCancelled() is true. If a
		/// progress monitor is not needed, callers should use
		/// <see cref="Diff()">Diff()</see>
		/// instead. Progress reporting is crude and approximate and only intended
		/// for informing the user.
		/// </remarks>
		/// <param name="monitor">for reporting progress, may be null</param>
		/// <param name="estWorkTreeSize">number or estimated files in the working tree</param>
		/// <param name="estIndexSize">number of estimated entries in the cache</param>
		/// <param name="title"></param>
		/// <returns>if anything is different between index, tree, and workdir</returns>
		/// <exception cref="System.IO.IOException">System.IO.IOException</exception>
		public virtual bool Diff(ProgressMonitor monitor, int estWorkTreeSize, int estIndexSize
			, string title)
		{
			dirCache = repository.ReadDirCache();
			TreeWalk treeWalk = new TreeWalk(repository);
			treeWalk.Recursive = true;
			// add the trees (tree, dirchache, workdir)
			if (tree != null)
			{
				treeWalk.AddTree(tree);
			}
			else
			{
				treeWalk.AddTree(new EmptyTreeIterator());
			}
			treeWalk.AddTree(new DirCacheIterator(dirCache));
			treeWalk.AddTree(initialWorkingTreeIterator);
			ICollection<TreeFilter> filters = new AList<TreeFilter>(4);
			if (monitor != null)
			{
				// Get the maximum size of the work tree and index
				// and add some (quite arbitrary)
				if (estIndexSize == 0)
				{
					estIndexSize = dirCache.GetEntryCount();
				}
				int total = Math.Max(estIndexSize * 10 / 9, estWorkTreeSize * 10 / 9);
				monitor.BeginTask(title, total);
				filters.AddItem(new IndexDiff.ProgressReportingFilter(monitor, total));
			}
			if (filter != null)
			{
				filters.AddItem(filter);
			}
			filters.AddItem(new SkipWorkTreeFilter(INDEX));
			filters.AddItem(new IndexDiffFilter(INDEX, WORKDIR));
			treeWalk.Filter = AndTreeFilter.Create(filters);
			while (treeWalk.Next())
			{
				AbstractTreeIterator treeIterator = treeWalk.GetTree<AbstractTreeIterator>(TREE);
				DirCacheIterator dirCacheIterator = treeWalk.GetTree<DirCacheIterator>(INDEX);
				WorkingTreeIterator workingTreeIterator = treeWalk.GetTree<WorkingTreeIterator>(WORKDIR
					);
				if (treeIterator != null)
				{
					if (dirCacheIterator != null)
					{
						if (!treeIterator.IdEqual(dirCacheIterator) || treeIterator.EntryRawMode != dirCacheIterator
							.EntryRawMode)
						{
							// in repo, in index, content diff => changed
							changed.AddItem(treeWalk.PathString);
						}
					}
					else
					{
						// in repo, not in index => removed
						removed.AddItem(treeWalk.PathString);
						if (workingTreeIterator != null)
						{
							untracked.AddItem(treeWalk.PathString);
						}
					}
				}
				else
				{
					if (dirCacheIterator != null)
					{
						// not in repo, in index => added
						added.AddItem(treeWalk.PathString);
					}
					else
					{
						// not in repo, not in index => untracked
						if (workingTreeIterator != null && !workingTreeIterator.IsEntryIgnored())
						{
							untracked.AddItem(treeWalk.PathString);
						}
					}
				}
				if (dirCacheIterator != null)
				{
					if (workingTreeIterator == null)
					{
						// in index, not in workdir => missing
						missing.AddItem(treeWalk.PathString);
					}
					else
					{
						if (workingTreeIterator.IsModified(dirCacheIterator.GetDirCacheEntry(), true))
						{
							// in index, in workdir, content differs => modified
							modified.AddItem(treeWalk.PathString);
						}
					}
				}
			}
			// consume the remaining work
			if (monitor != null)
			{
				monitor.EndTask();
			}
			if (added.IsEmpty() && changed.IsEmpty() && removed.IsEmpty() && missing.IsEmpty(
				) && modified.IsEmpty() && untracked.IsEmpty())
			{
				return false;
			}
			else
			{
				return true;
			}
		}
			/// <exception cref="System.IO.IOException"></exception>
			internal virtual void OpenIndex(ProgressMonitor pm)
			{
				if (this.index != null)
				{
					return;
				}
				if (this.tmpIdx == null)
				{
					this.tmpIdx = FilePath.CreateTempFile("jgit-walk-", ".idx");
				}
				else
				{
					if (this.tmpIdx.IsFile())
					{
						try
						{
							this.index = PackIndex.Open(this.tmpIdx);
							return;
						}
						catch (FileNotFoundException)
						{
						}
					}
				}
				// Fall through and get the file.
				WalkRemoteObjectDatabase.FileStream s;
				s = this.connection.Open("pack/" + this.idxName);
				pm.BeginTask("Get " + Sharpen.Runtime.Substring(this.idxName, 0, 12) + "..idx", s
					.length < 0 ? ProgressMonitor.UNKNOWN : (int)(s.length / 1024));
				try
				{
					FileOutputStream fos = new FileOutputStream(this.tmpIdx);
					try
					{
						byte[] buf = new byte[2048];
						int cnt;
						while (!pm.IsCancelled() && (cnt = [email protected](buf)) >= 0)
						{
							fos.Write(buf, 0, cnt);
							pm.Update(cnt / 1024);
						}
					}
					finally
					{
						fos.Close();
					}
				}
				catch (IOException err)
				{
					FileUtils.Delete(this.tmpIdx);
					throw;
				}
				finally
				{
					[email protected]();
				}
				pm.EndTask();
				if (pm.IsCancelled())
				{
					FileUtils.Delete(this.tmpIdx);
					return;
				}
				try
				{
					this.index = PackIndex.Open(this.tmpIdx);
				}
				catch (IOException e)
				{
					FileUtils.Delete(this.tmpIdx);
					throw;
				}
			}
Example #26
0
        /// <summary>Push objects and refs from the local repository to the remote one.</summary>
        /// <remarks>
        /// Push objects and refs from the local repository to the remote one.
        /// <p>
        /// This is a utility function providing standard push behavior. It updates
        /// remote refs and send there necessary objects according to remote ref
        /// update specification. After successful remote ref update, associated
        /// locally stored tracking branch is updated if set up accordingly. Detailed
        /// operation result is provided after execution.
        /// <p>
        /// For setting up remote ref update specification from ref spec, see helper
        /// method
        /// <see cref="FindRemoteRefUpdatesFor(System.Collections.Generic.ICollection{E})">FindRemoteRefUpdatesFor(System.Collections.Generic.ICollection&lt;E&gt;)
        /// 	</see>
        /// , predefined refspecs
        /// (
        /// <see cref="REFSPEC_TAGS">REFSPEC_TAGS</see>
        /// ,
        /// <see cref="REFSPEC_PUSH_ALL">REFSPEC_PUSH_ALL</see>
        /// ) or consider using
        /// directly
        /// <see cref="RemoteRefUpdate">RemoteRefUpdate</see>
        /// for more possibilities.
        /// <p>
        /// When
        /// <see cref="IsDryRun()">IsDryRun()</see>
        /// is true, result of this operation is just
        /// estimation of real operation result, no real action is performed.
        /// </remarks>
        /// <seealso cref="RemoteRefUpdate">RemoteRefUpdate</seealso>
        /// <param name="monitor">
        /// progress monitor to inform the user about our processing
        /// activity. Must not be null. Use
        /// <see cref="NGit.NullProgressMonitor">NGit.NullProgressMonitor</see>
        /// if
        /// progress updates are not interesting or necessary.
        /// </param>
        /// <param name="toPush">
        /// specification of refs to push. May be null or the empty
        /// collection to use the specifications from the RemoteConfig
        /// converted by
        /// <see cref="FindRemoteRefUpdatesFor(System.Collections.Generic.ICollection{E})">FindRemoteRefUpdatesFor(System.Collections.Generic.ICollection&lt;E&gt;)
        /// 	</see>
        /// . No
        /// more than 1 RemoteRefUpdate with the same remoteName is
        /// allowed. These objects are modified during this call.
        /// </param>
        /// <returns>
        /// information about results of remote refs updates, tracking refs
        /// updates and refs advertised by remote repository.
        /// </returns>
        /// <exception cref="System.NotSupportedException">
        /// this transport implementation does not support pushing
        /// objects.
        /// </exception>
        /// <exception cref="NGit.Errors.TransportException">
        /// the remote connection could not be established or object
        /// copying (if necessary) failed at I/O or protocol level or
        /// update specification was incorrect.
        /// </exception>
        public virtual PushResult Push(ProgressMonitor monitor, ICollection<RemoteRefUpdate
			> toPush)
        {
            if (toPush == null || toPush.IsEmpty())
            {
                // If the caller did not ask for anything use the defaults.
                try
                {
                    toPush = FindRemoteRefUpdatesFor(push);
                }
                catch (IOException e)
                {
                    throw new TransportException(MessageFormat.Format(JGitText.Get().problemWithResolvingPushRefSpecsLocally
                        , e.Message), e);
                }
                if (toPush.IsEmpty())
                {
                    throw new TransportException(JGitText.Get().nothingToPush);
                }
            }
            PushProcess pushProcess = new PushProcess(this, toPush);
            return pushProcess.Execute(monitor);
        }
			/// <exception cref="System.IO.IOException"></exception>
			internal virtual void DownloadPack(ProgressMonitor monitor)
			{
				string name = "pack/" + this.packName;
				WalkRemoteObjectDatabase.FileStream s = this.connection.Open(name);
				PackParser parser = this._enclosing.inserter.NewPackParser(s.@in);
				parser.SetAllowThin(false);
				parser.SetObjectChecker(this._enclosing.objCheck);
				parser.SetLockMessage(this._enclosing.lockMessage);
				PackLock Lock = parser.Parse(monitor);
				if (Lock != null)
				{
					this._enclosing.packLocks.AddItem(Lock);
				}
				this._enclosing.inserter.Flush();
			}
Example #28
0
		/// <summary>Execute this batch update.</summary>
		/// <remarks>
		/// Execute this batch update.
		/// <p>
		/// The default implementation of this method performs a sequential reference
		/// update over each reference.
		/// </remarks>
		/// <param name="walk">
		/// a RevWalk to parse tags in case the storage system wants to
		/// store them pre-peeled, a common performance optimization.
		/// </param>
		/// <param name="update">progress monitor to receive update status on.</param>
		/// <exception cref="System.IO.IOException">
		/// the database is unable to accept the update. Individual
		/// command status must be tested to determine if there is a
		/// partial failure, or a total failure.
		/// </exception>
		public virtual void Execute(RevWalk walk, ProgressMonitor update)
		{
			update.BeginTask(JGitText.Get().updatingReferences, commands.Count);
			foreach (ReceiveCommand cmd in commands)
			{
				try
				{
					update.Update(1);
					if (cmd.GetResult() == ReceiveCommand.Result.NOT_ATTEMPTED)
					{
						cmd.UpdateType(walk);
						RefUpdate ru = NewUpdate(cmd);
						switch (cmd.GetType())
						{
							case ReceiveCommand.Type.DELETE:
							{
								cmd.SetResult(ru.Delete(walk));
								continue;
								goto case ReceiveCommand.Type.CREATE;
							}

							case ReceiveCommand.Type.CREATE:
							case ReceiveCommand.Type.UPDATE:
							case ReceiveCommand.Type.UPDATE_NONFASTFORWARD:
							{
								cmd.SetResult(ru.Update(walk));
								continue;
							}
						}
					}
				}
				catch (IOException err)
				{
					cmd.SetResult(ReceiveCommand.Result.REJECTED_OTHER_REASON, MessageFormat.Format(JGitText
						.Get().lockError, err.Message));
				}
			}
			update.EndTask();
		}
		/// <exception cref="NGit.Errors.TransportException"></exception>
		protected internal override void DoFetch(ProgressMonitor monitor, ICollection<Ref
			> want, ICollection<ObjectId> have)
		{
			MarkLocalRefsComplete(have);
			QueueWants(want);
			while (!monitor.IsCancelled() && !workQueue.IsEmpty())
			{
				ObjectId id = workQueue.RemoveFirst();
				if (!(id is RevObject) || !((RevObject)id).Has(COMPLETE))
				{
					DownloadObject(monitor, id);
				}
				Process(id);
			}
		}
Example #30
0
		/// <summary>Parse the pack stream.</summary>
		/// <remarks>Parse the pack stream.</remarks>
		/// <param name="receiving">
		/// receives progress feedback during the initial receiving
		/// objects phase. If null,
		/// <see cref="NGit.NullProgressMonitor">NGit.NullProgressMonitor</see>
		/// will be
		/// used.
		/// </param>
		/// <param name="resolving">receives progress feedback during the resolving objects phase.
		/// 	</param>
		/// <returns>
		/// the pack lock, if one was requested by setting
		/// <see cref="SetLockMessage(string)">SetLockMessage(string)</see>
		/// .
		/// </returns>
		/// <exception cref="System.IO.IOException">the stream is malformed, or contains corrupt objects.
		/// 	</exception>
		public virtual PackLock Parse(ProgressMonitor receiving, ProgressMonitor resolving
			)
		{
			if (receiving == null)
			{
				receiving = NullProgressMonitor.INSTANCE;
			}
			if (resolving == null)
			{
				resolving = NullProgressMonitor.INSTANCE;
			}
			if (receiving == resolving)
			{
				receiving.Start(2);
			}
			try
			{
				ReadPackHeader();
				entries = new PackedObjectInfo[(int)objectCount];
				baseById = new ObjectIdOwnerMap<PackParser.DeltaChain>();
				baseByPos = new LongMap<PackParser.UnresolvedDelta>();
				deferredCheckBlobs = new BlockList<PackedObjectInfo>();
				receiving.BeginTask(JGitText.Get().receivingObjects, (int)objectCount);
				try
				{
					for (int done = 0; done < objectCount; done++)
					{
						IndexOneObject();
						receiving.Update(1);
						if (receiving.IsCancelled())
						{
							throw new IOException(JGitText.Get().downloadCancelled);
						}
					}
					ReadPackFooter();
					EndInput();
				}
				finally
				{
					receiving.EndTask();
				}
				if (!deferredCheckBlobs.IsEmpty())
				{
					DoDeferredCheckBlobs();
				}
				if (deltaCount > 0)
				{
					if (resolving is BatchingProgressMonitor)
					{
						((BatchingProgressMonitor)resolving).SetDelayStart(1000, TimeUnit.MILLISECONDS);
					}
					resolving.BeginTask(JGitText.Get().resolvingDeltas, deltaCount);
					ResolveDeltas(resolving);
					if (entryCount < objectCount)
					{
						if (!IsAllowThin())
						{
							throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas
								, Sharpen.Extensions.ValueOf(objectCount - entryCount)));
						}
						ResolveDeltasWithExternalBases(resolving);
						if (entryCount < objectCount)
						{
							throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas
								, Sharpen.Extensions.ValueOf(objectCount - entryCount)));
						}
					}
					resolving.EndTask();
				}
				packDigest = null;
				baseById = null;
				baseByPos = null;
			}
			finally
			{
				try
				{
					if (readCurs != null)
					{
						readCurs.Release();
					}
				}
				finally
				{
					readCurs = null;
				}
				try
				{
					inflater.Release();
				}
				finally
				{
					inflater = null;
				}
			}
			return null;
		}
		/// <exception cref="NGit.Errors.TransportException"></exception>
		private void DownloadObject(ProgressMonitor pm, AnyObjectId id)
		{
			if (AlreadyHave(id))
			{
				return;
			}
			for (; ; )
			{
				// Try a pack file we know about, but don't have yet. Odds are
				// that if it has this object, it has others related to it so
				// getting the pack is a good bet.
				//
				if (DownloadPackedObject(pm, id))
				{
					return;
				}
				// Search for a loose object over all alternates, starting
				// from the one we last successfully located an object through.
				//
				string idStr = id.Name;
				string subdir = Sharpen.Runtime.Substring(idStr, 0, 2);
				string file = Sharpen.Runtime.Substring(idStr, 2);
				string looseName = subdir + "/" + file;
				for (int i = lastRemoteIdx; i < remotes.Count; i++)
				{
					if (DownloadLooseObject(id, looseName, remotes[i]))
					{
						lastRemoteIdx = i;
						return;
					}
				}
				for (int i_1 = 0; i_1 < lastRemoteIdx; i_1++)
				{
					if (DownloadLooseObject(id, looseName, remotes[i_1]))
					{
						lastRemoteIdx = i_1;
						return;
					}
				}
				// Try to obtain more pack information and search those.
				//
				while (!noPacksYet.IsEmpty())
				{
					WalkRemoteObjectDatabase wrr = noPacksYet.RemoveFirst();
					ICollection<string> packNameList;
					try
					{
						pm.BeginTask("Listing packs", ProgressMonitor.UNKNOWN);
						packNameList = wrr.GetPackNames();
					}
					catch (IOException e)
					{
						// Try another repository.
						//
						RecordError(id, e);
						continue;
					}
					finally
					{
						pm.EndTask();
					}
					if (packNameList == null || packNameList.IsEmpty())
					{
						continue;
					}
					foreach (string packName in packNameList)
					{
						if (packsConsidered.AddItem(packName))
						{
							unfetchedPacks.AddItem(new WalkFetchConnection.RemotePack(this, wrr, packName));
						}
					}
					if (DownloadPackedObject(pm, id))
					{
						return;
					}
				}
				// Try to expand the first alternate we haven't expanded yet.
				//
				ICollection<WalkRemoteObjectDatabase> al = ExpandOneAlternate(id, pm);
				if (al != null && !al.IsEmpty())
				{
					foreach (WalkRemoteObjectDatabase alt in al)
					{
						remotes.AddItem(alt);
						noPacksYet.AddItem(alt);
						noAlternatesYet.AddItem(alt);
					}
					continue;
				}
				// We could not obtain the object. There may be reasons why.
				//
				IList<Exception> failures = fetchErrors.Get((ObjectId)id);
				TransportException te;
				te = new TransportException(MessageFormat.Format(JGitText.Get().cannotGet, id.Name
					));
				if (failures != null && !failures.IsEmpty())
				{
					if (failures.Count == 1)
					{
						Sharpen.Extensions.InitCause(te, failures[0]);
					}
					else
					{
						Sharpen.Extensions.InitCause(te, new CompoundException(failures));
					}
				}
				throw te;
			}
		}
Example #32
0
		/// <exception cref="System.IO.IOException"></exception>
		private void ResolveDeltas(PackedObjectInfo oe, ProgressMonitor progress)
		{
			PackParser.UnresolvedDelta children = FirstChildOf(oe);
			if (children == null)
			{
				return;
			}
			PackParser.DeltaVisit visit = new PackParser.DeltaVisit();
			visit.nextChild = children;
			PackParser.ObjectTypeAndSize info = OpenDatabase(oe, new PackParser.ObjectTypeAndSize
				());
			switch (info.type)
			{
				case Constants.OBJ_COMMIT:
				case Constants.OBJ_TREE:
				case Constants.OBJ_BLOB:
				case Constants.OBJ_TAG:
				{
					visit.data = InflateAndReturn(PackParser.Source.DATABASE, info.size);
					visit.id = oe;
					break;
				}

				default:
				{
					throw new IOException(MessageFormat.Format(JGitText.Get().unknownObjectType, Sharpen.Extensions.ValueOf
						(info.type)));
				}
			}
			if (!CheckCRC(oe.GetCRC()))
			{
				throw new IOException(MessageFormat.Format(JGitText.Get().corruptionDetectedReReadingAt
					, Sharpen.Extensions.ValueOf(oe.GetOffset())));
			}
			ResolveDeltas(visit.Next(), info.type, info, progress);
		}
		/// <exception cref="NGit.Errors.TransportException"></exception>
		private bool DownloadPackedObject(ProgressMonitor monitor, AnyObjectId id)
		{
			// Search for the object in a remote pack whose index we have,
			// but whose pack we do not yet have.
			//
			Iterator<WalkFetchConnection.RemotePack> packItr = unfetchedPacks.Iterator();
			while (packItr.HasNext() && !monitor.IsCancelled())
			{
				WalkFetchConnection.RemotePack pack = packItr.Next();
				try
				{
					pack.OpenIndex(monitor);
				}
				catch (IOException err)
				{
					// If the index won't open its either not found or
					// its a format we don't recognize. In either case
					// we may still be able to obtain the object from
					// another source, so don't consider it a failure.
					//
					RecordError(id, err);
					packItr.Remove();
					continue;
				}
				if (monitor.IsCancelled())
				{
					// If we were cancelled while the index was opening
					// the open may have aborted. We can't search an
					// unopen index.
					//
					return false;
				}
				if (!pack.index.HasObject(id))
				{
					// Not in this pack? Try another.
					//
					continue;
				}
				// It should be in the associated pack. Download that
				// and attach it to the local repository so we can use
				// all of the contained objects.
				//
				try
				{
					pack.DownloadPack(monitor);
				}
				catch (IOException err)
				{
					// If the pack failed to download, index correctly,
					// or open in the local repository we may still be
					// able to obtain this object from another pack or
					// an alternate.
					//
					RecordError(id, err);
					continue;
				}
				finally
				{
					// If the pack was good its in the local repository
					// and Repository.hasObject(id) will succeed in the
					// future, so we do not need this data anymore. If
					// it failed the index and pack are unusable and we
					// shouldn't consult them again.
					//
					try
					{
						if (pack.tmpIdx != null)
						{
							FileUtils.Delete(pack.tmpIdx);
						}
					}
					catch (IOException e)
					{
						throw new TransportException(e.Message, e);
					}
					packItr.Remove();
				}
				if (!AlreadyHave(id))
				{
					// What the hell? This pack claimed to have
					// the object, but after indexing we didn't
					// actually find it in the pack.
					//
					RecordError(id, new FileNotFoundException(MessageFormat.Format(JGitText.Get().objectNotFoundIn
						, id.Name, pack.packName)));
					continue;
				}
				// Complete any other objects that we can.
				//
				Iterator<ObjectId> pending = SwapFetchQueue();
				while (pending.HasNext())
				{
					ObjectId p = pending.Next();
					if (pack.index.HasObject(p))
					{
						pending.Remove();
						Process(p);
					}
					else
					{
						workQueue.AddItem(p);
					}
				}
				return true;
			}
			return false;
		}
Example #34
0
		/// <exception cref="System.IO.IOException"></exception>
		private void ResolveDeltasWithExternalBases(ProgressMonitor progress)
		{
			GrowEntries(baseById.Size());
			if (needBaseObjectIds)
			{
				baseObjectIds = new ObjectIdSubclassMap<ObjectId>();
			}
			IList<PackParser.DeltaChain> missing = new AList<PackParser.DeltaChain>(64);
			foreach (PackParser.DeltaChain baseId in baseById)
			{
				if (baseId.head == null)
				{
					continue;
				}
				if (needBaseObjectIds)
				{
					baseObjectIds.Add(baseId);
				}
				ObjectLoader ldr;
				try
				{
					ldr = readCurs.Open(baseId);
				}
				catch (MissingObjectException)
				{
					missing.AddItem(baseId);
					continue;
				}
				PackParser.DeltaVisit visit = new PackParser.DeltaVisit();
				visit.data = ldr.GetCachedBytes(int.MaxValue);
				visit.id = baseId;
				int typeCode = ldr.GetType();
				PackedObjectInfo oe = NewInfo(baseId, null, null);
				if (OnAppendBase(typeCode, visit.data, oe))
				{
					entries[entryCount++] = oe;
				}
				visit.nextChild = FirstChildOf(oe);
				ResolveDeltas(visit.Next(), typeCode, new PackParser.ObjectTypeAndSize(), progress
					);
				if (progress.IsCancelled())
				{
					throw new IOException(JGitText.Get().downloadCancelledDuringIndexing);
				}
			}
			foreach (PackParser.DeltaChain @base in missing)
			{
				if (@base.head != null)
				{
					throw new MissingObjectException(@base, "delta base");
				}
			}
			OnEndThinPack();
		}
		private ICollection<WalkRemoteObjectDatabase> ExpandOneAlternate(AnyObjectId id, 
			ProgressMonitor pm)
		{
			while (!noAlternatesYet.IsEmpty())
			{
				WalkRemoteObjectDatabase wrr = noAlternatesYet.RemoveFirst();
				try
				{
					pm.BeginTask(JGitText.Get().listingAlternates, ProgressMonitor.UNKNOWN);
					ICollection<WalkRemoteObjectDatabase> altList = wrr.GetAlternates();
					if (altList != null && !altList.IsEmpty())
					{
						return altList;
					}
				}
				catch (IOException e)
				{
					// Try another repository.
					//
					RecordError(id, e);
				}
				finally
				{
					pm.EndTask();
				}
			}
			return null;
		}
Example #36
0
		/// <summary>Generate and write the bundle to the output stream.</summary>
		/// <remarks>
		/// Generate and write the bundle to the output stream.
		/// <p>
		/// This method can only be called once per BundleWriter instance.
		/// </remarks>
		/// <param name="monitor">progress monitor to report bundle writing status to.</param>
		/// <param name="os">
		/// the stream the bundle is written to. The stream should be
		/// buffered by the caller. The caller is responsible for closing
		/// the stream.
		/// </param>
		/// <exception cref="System.IO.IOException">
		/// an error occurred reading a local object's data to include in
		/// the bundle, or writing compressed object data to the output
		/// stream.
		/// </exception>
		public virtual void WriteBundle(ProgressMonitor monitor, OutputStream os)
		{
			PackConfig pc = packConfig;
			if (pc == null)
			{
				pc = new PackConfig(db);
			}
			PackWriter packWriter = new PackWriter(pc, db.NewObjectReader());
			try
			{
				HashSet<ObjectId> inc = new HashSet<ObjectId>();
				HashSet<ObjectId> exc = new HashSet<ObjectId>();
				Sharpen.Collections.AddAll(inc, include.Values);
				foreach (RevCommit r in assume)
				{
					exc.AddItem(r.Id);
				}
				packWriter.SetDeltaBaseAsOffset(true);
				packWriter.SetThin(exc.Count > 0);
				packWriter.SetReuseValidatingObjects(false);
				if (exc.Count == 0)
				{
					packWriter.SetTagTargets(tagTargets);
				}
				packWriter.PreparePack(monitor, inc, exc);
				TextWriter w = new OutputStreamWriter(os, Constants.CHARSET);
				w.Write(NGit.Transport.TransportBundleConstants.V2_BUNDLE_SIGNATURE);
				w.Write('\n');
				char[] tmp = new char[Constants.OBJECT_ID_STRING_LENGTH];
				foreach (RevCommit a in assume)
				{
					w.Write('-');
					a.CopyTo(tmp, w);
					if (a.RawBuffer != null)
					{
						w.Write(' ');
						w.Write(a.GetShortMessage());
					}
					w.Write('\n');
				}
				foreach (KeyValuePair<string, ObjectId> e in include.EntrySet())
				{
					e.Value.CopyTo(tmp, w);
					w.Write(' ');
					w.Write(e.Key);
					w.Write('\n');
				}
				w.Write('\n');
				w.Flush();
				packWriter.WritePack(monitor, monitor, os);
			}
			finally
			{
				packWriter.Release();
			}
		}
Example #37
0
        public static MergeCommandResult MergeTrees(NGit.ProgressMonitor monitor, NGit.Repository repo, RevCommit srcBase, RevCommit srcCommit, string sourceDisplayName, bool commitResult)
        {
            RevCommit newHead = null;
            RevWalk   revWalk = new RevWalk(repo);

            try
            {
                // get the head commit
                Ref headRef = repo.GetRef(Constants.HEAD);
                if (headRef == null)
                {
                    throw new NoHeadException(JGitText.Get().commitOnRepoWithoutHEADCurrentlyNotSupported
                                              );
                }
                RevCommit headCommit = revWalk.ParseCommit(headRef.GetObjectId());

                ResolveMerger merger = (ResolveMerger)((ThreeWayMerger)MergeStrategy.RESOLVE.NewMerger
                                                           (repo));

                merger.SetWorkingTreeIterator(new FileTreeIterator(repo));

                merger.SetBase(srcBase);

                bool noProblems;
                IDictionary <string, MergeResult <NGit.Diff.Sequence> > lowLevelResults = null;
                IDictionary <string, ResolveMerger.MergeFailureReason>  failingPaths    = null;
                IList <string> modifiedFiles = null;

                ResolveMerger resolveMerger = (ResolveMerger)merger;
                resolveMerger.SetCommitNames(new string[] { "BASE", "HEAD", sourceDisplayName });
                noProblems      = merger.Merge(headCommit, srcCommit);
                lowLevelResults = resolveMerger.GetMergeResults();
                modifiedFiles   = resolveMerger.GetModifiedFiles();
                failingPaths    = resolveMerger.GetFailingPaths();

                if (monitor != null)
                {
                    monitor.Update(50);
                }

                if (noProblems)
                {
                    if (modifiedFiles != null && modifiedFiles.Count == 0)
                    {
                        return(new MergeCommandResult(headCommit, null, new ObjectId[] { headCommit.Id, srcCommit
                                                                                         .Id }, MergeStatus.ALREADY_UP_TO_DATE, MergeStrategy.RESOLVE, null, null));
                    }
                    DirCacheCheckout dco = new DirCacheCheckout(repo, headCommit.Tree, repo.LockDirCache
                                                                    (), merger.GetResultTreeId());
                    dco.SetFailOnConflict(true);
                    dco.Checkout();
                    if (commitResult)
                    {
                        newHead = new NGit.Api.Git(repo).Commit().SetMessage(srcCommit.GetFullMessage()
                                                                             ).SetAuthor(srcCommit.GetAuthorIdent()).Call();
                        return(new MergeCommandResult(newHead.Id, null, new ObjectId[] { headCommit.Id, srcCommit
                                                                                         .Id }, MergeStatus.MERGED, MergeStrategy.RESOLVE, null, null));
                    }
                    else
                    {
                        return(new MergeCommandResult(headCommit, null, new ObjectId[] { headCommit.Id, srcCommit
                                                                                         .Id }, MergeStatus.MERGED, MergeStrategy.RESOLVE, null, null));
                    }
                }
                else
                {
                    if (failingPaths != null)
                    {
                        return(new MergeCommandResult(null, merger.GetBaseCommit(0, 1), new ObjectId[] {
                            headCommit.Id, srcCommit.Id
                        }, MergeStatus.FAILED, MergeStrategy.RESOLVE, lowLevelResults
                                                      , failingPaths, null));
                    }
                    else
                    {
                        return(new MergeCommandResult(null, merger.GetBaseCommit(0, 1), new ObjectId[] {
                            headCommit.Id, srcCommit.Id
                        }, MergeStatus.CONFLICTING, MergeStrategy.RESOLVE, lowLevelResults
                                                      , null));
                    }
                }
            }
            finally
            {
                revWalk.Release();
            }
        }