Exemplo n.º 1
0
 public override void BeginTask(string title, int totalWork)
 {
     if (!IsMainThread())
     {
         throw new InvalidOperationException();
     }
     pm.BeginTask(title, totalWork);
 }
Exemplo n.º 2
0
        internal MergeCommandResult Apply(NGit.ProgressMonitor monitor, Stash stash)
        {
            monitor.Start(1);
            monitor.BeginTask("Applying stash", 100);
            ObjectId  cid     = _repo.Resolve(stash.CommitId);
            RevWalk   rw      = new RevWalk(_repo);
            RevCommit wip     = rw.ParseCommit(cid);
            RevCommit oldHead = wip.Parents.First();

            rw.ParseHeaders(oldHead);
            MergeCommandResult res = GitUtil.MergeTrees(monitor, _repo, oldHead, wip, "Stash", false);

            monitor.EndTask();
            return(res);
        }
Exemplo n.º 3
0
        /// <summary>Execute this batch update.</summary>
        /// <remarks>
        /// Execute this batch update.
        /// <p>
        /// The default implementation of this method performs a sequential reference
        /// update over each reference.
        /// </remarks>
        /// <param name="walk">
        /// a RevWalk to parse tags in case the storage system wants to
        /// store them pre-peeled, a common performance optimization.
        /// </param>
        /// <param name="update">progress monitor to receive update status on.</param>
        /// <exception cref="System.IO.IOException">
        /// the database is unable to accept the update. Individual
        /// command status must be tested to determine if there is a
        /// partial failure, or a total failure.
        /// </exception>
        public virtual void Execute(RevWalk walk, ProgressMonitor update)
        {
            update.BeginTask(JGitText.Get().updatingReferences, commands.Count);
            foreach (ReceiveCommand cmd in commands)
            {
                try
                {
                    update.Update(1);
                    if (cmd.GetResult() == ReceiveCommand.Result.NOT_ATTEMPTED)
                    {
                        cmd.UpdateType(walk);
                        RefUpdate ru = NewUpdate(cmd);
                        switch (cmd.GetType())
                        {
                        case ReceiveCommand.Type.DELETE:
                        {
                            cmd.SetResult(ru.Delete(walk));
                            continue;
                            goto case ReceiveCommand.Type.CREATE;
                        }

                        case ReceiveCommand.Type.CREATE:
                        case ReceiveCommand.Type.UPDATE:
                        case ReceiveCommand.Type.UPDATE_NONFASTFORWARD:
                        {
                            cmd.SetResult(ru.Update(walk));
                            continue;
                        }
                        }
                    }
                }
                catch (IOException err)
                {
                    cmd.SetResult(ReceiveCommand.Result.REJECTED_OTHER_REASON, MessageFormat.Format(JGitText
                                                                                                    .Get().lockError, err.Message));
                }
            }
            update.EndTask();
        }
Exemplo n.º 4
0
		/// <summary>Run the diff operation.</summary>
		/// <remarks>
		/// Run the diff operation. Until this is called, all lists will be empty.
		/// <p>
		/// The operation may be aborted by the progress monitor. In that event it
		/// will report what was found before the cancel operation was detected.
		/// Callers should ignore the result if monitor.isCancelled() is true. If a
		/// progress monitor is not needed, callers should use
		/// <see cref="Diff()">Diff()</see>
		/// instead. Progress reporting is crude and approximate and only intended
		/// for informing the user.
		/// </remarks>
		/// <param name="monitor">for reporting progress, may be null</param>
		/// <param name="estWorkTreeSize">number or estimated files in the working tree</param>
		/// <param name="estIndexSize">number of estimated entries in the cache</param>
		/// <param name="title"></param>
		/// <returns>if anything is different between index, tree, and workdir</returns>
		/// <exception cref="System.IO.IOException">System.IO.IOException</exception>
		public virtual bool Diff(ProgressMonitor monitor, int estWorkTreeSize, int estIndexSize
			, string title)
		{
			dirCache = repository.ReadDirCache();
			TreeWalk treeWalk = new TreeWalk(repository);
			treeWalk.Recursive = true;
			// add the trees (tree, dirchache, workdir)
			if (tree != null)
			{
				treeWalk.AddTree(tree);
			}
			else
			{
				treeWalk.AddTree(new EmptyTreeIterator());
			}
			treeWalk.AddTree(new DirCacheIterator(dirCache));
			treeWalk.AddTree(initialWorkingTreeIterator);
			ICollection<TreeFilter> filters = new AList<TreeFilter>(4);
			if (monitor != null)
			{
				// Get the maximum size of the work tree and index
				// and add some (quite arbitrary)
				if (estIndexSize == 0)
				{
					estIndexSize = dirCache.GetEntryCount();
				}
				int total = Math.Max(estIndexSize * 10 / 9, estWorkTreeSize * 10 / 9);
				monitor.BeginTask(title, total);
				filters.AddItem(new IndexDiff.ProgressReportingFilter(monitor, total));
			}
			if (filter != null)
			{
				filters.AddItem(filter);
			}
			filters.AddItem(new SkipWorkTreeFilter(INDEX));
			filters.AddItem(new IndexDiffFilter(INDEX, WORKDIR));
			treeWalk.Filter = AndTreeFilter.Create(filters);
			while (treeWalk.Next())
			{
				AbstractTreeIterator treeIterator = treeWalk.GetTree<AbstractTreeIterator>(TREE);
				DirCacheIterator dirCacheIterator = treeWalk.GetTree<DirCacheIterator>(INDEX);
				WorkingTreeIterator workingTreeIterator = treeWalk.GetTree<WorkingTreeIterator>(WORKDIR
					);
				if (treeIterator != null)
				{
					if (dirCacheIterator != null)
					{
						if (!treeIterator.IdEqual(dirCacheIterator) || treeIterator.EntryRawMode != dirCacheIterator
							.EntryRawMode)
						{
							// in repo, in index, content diff => changed
							changed.AddItem(treeWalk.PathString);
						}
					}
					else
					{
						// in repo, not in index => removed
						removed.AddItem(treeWalk.PathString);
						if (workingTreeIterator != null)
						{
							untracked.AddItem(treeWalk.PathString);
						}
					}
				}
				else
				{
					if (dirCacheIterator != null)
					{
						// not in repo, in index => added
						added.AddItem(treeWalk.PathString);
					}
					else
					{
						// not in repo, not in index => untracked
						if (workingTreeIterator != null && !workingTreeIterator.IsEntryIgnored())
						{
							untracked.AddItem(treeWalk.PathString);
						}
					}
				}
				if (dirCacheIterator != null)
				{
					if (workingTreeIterator == null)
					{
						// in index, not in workdir => missing
						missing.AddItem(treeWalk.PathString);
					}
					else
					{
						if (workingTreeIterator.IsModified(dirCacheIterator.GetDirCacheEntry(), true))
						{
							// in index, in workdir, content differs => modified
							modified.AddItem(treeWalk.PathString);
						}
					}
				}
			}
			// consume the remaining work
			if (monitor != null)
			{
				monitor.EndTask();
			}
			if (added.IsEmpty() && changed.IsEmpty() && removed.IsEmpty() && missing.IsEmpty(
				) && modified.IsEmpty() && untracked.IsEmpty())
			{
				return false;
			}
			else
			{
				return true;
			}
		}
Exemplo n.º 5
0
		private ICollection<WalkRemoteObjectDatabase> ExpandOneAlternate(AnyObjectId id, 
			ProgressMonitor pm)
		{
			while (!noAlternatesYet.IsEmpty())
			{
				WalkRemoteObjectDatabase wrr = noAlternatesYet.RemoveFirst();
				try
				{
					pm.BeginTask(JGitText.Get().listingAlternates, ProgressMonitor.UNKNOWN);
					ICollection<WalkRemoteObjectDatabase> altList = wrr.GetAlternates();
					if (altList != null && !altList.IsEmpty())
					{
						return altList;
					}
				}
				catch (IOException e)
				{
					// Try another repository.
					//
					RecordError(id, e);
				}
				finally
				{
					pm.EndTask();
				}
			}
			return null;
		}
Exemplo n.º 6
0
		/// <exception cref="NGit.Errors.TransportException"></exception>
		private void DownloadObject(ProgressMonitor pm, AnyObjectId id)
		{
			if (AlreadyHave(id))
			{
				return;
			}
			for (; ; )
			{
				// Try a pack file we know about, but don't have yet. Odds are
				// that if it has this object, it has others related to it so
				// getting the pack is a good bet.
				//
				if (DownloadPackedObject(pm, id))
				{
					return;
				}
				// Search for a loose object over all alternates, starting
				// from the one we last successfully located an object through.
				//
				string idStr = id.Name;
				string subdir = Sharpen.Runtime.Substring(idStr, 0, 2);
				string file = Sharpen.Runtime.Substring(idStr, 2);
				string looseName = subdir + "/" + file;
				for (int i = lastRemoteIdx; i < remotes.Count; i++)
				{
					if (DownloadLooseObject(id, looseName, remotes[i]))
					{
						lastRemoteIdx = i;
						return;
					}
				}
				for (int i_1 = 0; i_1 < lastRemoteIdx; i_1++)
				{
					if (DownloadLooseObject(id, looseName, remotes[i_1]))
					{
						lastRemoteIdx = i_1;
						return;
					}
				}
				// Try to obtain more pack information and search those.
				//
				while (!noPacksYet.IsEmpty())
				{
					WalkRemoteObjectDatabase wrr = noPacksYet.RemoveFirst();
					ICollection<string> packNameList;
					try
					{
						pm.BeginTask("Listing packs", ProgressMonitor.UNKNOWN);
						packNameList = wrr.GetPackNames();
					}
					catch (IOException e)
					{
						// Try another repository.
						//
						RecordError(id, e);
						continue;
					}
					finally
					{
						pm.EndTask();
					}
					if (packNameList == null || packNameList.IsEmpty())
					{
						continue;
					}
					foreach (string packName in packNameList)
					{
						if (packsConsidered.AddItem(packName))
						{
							unfetchedPacks.AddItem(new WalkFetchConnection.RemotePack(this, wrr, packName));
						}
					}
					if (DownloadPackedObject(pm, id))
					{
						return;
					}
				}
				// Try to expand the first alternate we haven't expanded yet.
				//
				ICollection<WalkRemoteObjectDatabase> al = ExpandOneAlternate(id, pm);
				if (al != null && !al.IsEmpty())
				{
					foreach (WalkRemoteObjectDatabase alt in al)
					{
						remotes.AddItem(alt);
						noPacksYet.AddItem(alt);
						noAlternatesYet.AddItem(alt);
					}
					continue;
				}
				// We could not obtain the object. There may be reasons why.
				//
				IList<Exception> failures = fetchErrors.Get((ObjectId)id);
				TransportException te;
				te = new TransportException(MessageFormat.Format(JGitText.Get().cannotGet, id.Name
					));
				if (failures != null && !failures.IsEmpty())
				{
					if (failures.Count == 1)
					{
						Sharpen.Extensions.InitCause(te, failures[0]);
					}
					else
					{
						Sharpen.Extensions.InitCause(te, new CompoundException(failures));
					}
				}
				throw te;
			}
		}
Exemplo n.º 7
0
			/// <exception cref="System.IO.IOException"></exception>
			internal virtual void OpenIndex(ProgressMonitor pm)
			{
				if (this.index != null)
				{
					return;
				}
				if (this.tmpIdx == null)
				{
					this.tmpIdx = FilePath.CreateTempFile("jgit-walk-", ".idx");
				}
				else
				{
					if (this.tmpIdx.IsFile())
					{
						try
						{
							this.index = PackIndex.Open(this.tmpIdx);
							return;
						}
						catch (FileNotFoundException)
						{
						}
					}
				}
				// Fall through and get the file.
				WalkRemoteObjectDatabase.FileStream s;
				s = this.connection.Open("pack/" + this.idxName);
				pm.BeginTask("Get " + Sharpen.Runtime.Substring(this.idxName, 0, 12) + "..idx", s
					.length < 0 ? ProgressMonitor.UNKNOWN : (int)(s.length / 1024));
				try
				{
					FileOutputStream fos = new FileOutputStream(this.tmpIdx);
					try
					{
						byte[] buf = new byte[2048];
						int cnt;
						while (!pm.IsCancelled() && (cnt = [email protected](buf)) >= 0)
						{
							fos.Write(buf, 0, cnt);
							pm.Update(cnt / 1024);
						}
					}
					finally
					{
						fos.Close();
					}
				}
				catch (IOException err)
				{
					FileUtils.Delete(this.tmpIdx);
					throw;
				}
				finally
				{
					[email protected]();
				}
				pm.EndTask();
				if (pm.IsCancelled())
				{
					FileUtils.Delete(this.tmpIdx);
					return;
				}
				try
				{
					this.index = PackIndex.Open(this.tmpIdx);
				}
				catch (IOException e)
				{
					FileUtils.Delete(this.tmpIdx);
					throw;
				}
			}
Exemplo n.º 8
0
		// By default there is no locking.
		/// <exception cref="System.IO.IOException"></exception>
		private void ResolveDeltas(ProgressMonitor progress)
		{
			progress.BeginTask(JGitText.Get().resolvingDeltas, deltaCount);
			int last = entryCount;
			for (int i = 0; i < last; i++)
			{
				int before = entryCount;
				ResolveDeltas(entries[i]);
				progress.Update(entryCount - before);
				if (progress.IsCancelled())
				{
					throw new IOException(JGitText.Get().downloadCancelledDuringIndexing);
				}
			}
			progress.EndTask();
		}
Exemplo n.º 9
0
        /// <summary>Run the diff operation.</summary>
        /// <remarks>
        /// Run the diff operation. Until this is called, all lists will be empty.
        /// <p>
        /// The operation may be aborted by the progress monitor. In that event it
        /// will report what was found before the cancel operation was detected.
        /// Callers should ignore the result if monitor.isCancelled() is true. If a
        /// progress monitor is not needed, callers should use
        /// <see cref="Diff()">Diff()</see>
        /// instead. Progress reporting is crude and approximate and only intended
        /// for informing the user.
        /// </remarks>
        /// <param name="monitor">for reporting progress, may be null</param>
        /// <param name="estWorkTreeSize">number or estimated files in the working tree</param>
        /// <param name="estIndexSize">number of estimated entries in the cache</param>
        /// <param name="title"></param>
        /// <returns>if anything is different between index, tree, and workdir</returns>
        /// <exception cref="System.IO.IOException">System.IO.IOException</exception>
        public virtual bool Diff(ProgressMonitor monitor, int estWorkTreeSize, int estIndexSize
                                 , string title)
        {
            dirCache = repository.ReadDirCache();
            TreeWalk treeWalk = new TreeWalk(repository);

            treeWalk.Recursive = true;
            // add the trees (tree, dirchache, workdir)
            if (tree != null)
            {
                treeWalk.AddTree(tree);
            }
            else
            {
                treeWalk.AddTree(new EmptyTreeIterator());
            }
            treeWalk.AddTree(new DirCacheIterator(dirCache));
            treeWalk.AddTree(initialWorkingTreeIterator);
            ICollection <TreeFilter> filters = new AList <TreeFilter>(4);

            if (monitor != null)
            {
                // Get the maximum size of the work tree and index
                // and add some (quite arbitrary)
                if (estIndexSize == 0)
                {
                    estIndexSize = dirCache.GetEntryCount();
                }
                int total = Math.Max(estIndexSize * 10 / 9, estWorkTreeSize * 10 / 9);
                monitor.BeginTask(title, total);
                filters.AddItem(new IndexDiff.ProgressReportingFilter(monitor, total));
            }
            if (filter != null)
            {
                filters.AddItem(filter);
            }
            filters.AddItem(new SkipWorkTreeFilter(INDEX));
            filters.AddItem(new IndexDiffFilter(INDEX, WORKDIR));
            treeWalk.Filter = AndTreeFilter.Create(filters);
            while (treeWalk.Next())
            {
                AbstractTreeIterator treeIterator        = treeWalk.GetTree <AbstractTreeIterator>(TREE);
                DirCacheIterator     dirCacheIterator    = treeWalk.GetTree <DirCacheIterator>(INDEX);
                WorkingTreeIterator  workingTreeIterator = treeWalk.GetTree <WorkingTreeIterator>(WORKDIR
                                                                                                  );
                if (dirCacheIterator != null)
                {
                    DirCacheEntry dirCacheEntry = dirCacheIterator.GetDirCacheEntry();
                    if (dirCacheEntry != null && dirCacheEntry.Stage > 0)
                    {
                        conflicts.AddItem(treeWalk.PathString);
                        continue;
                    }
                }
                if (treeIterator != null)
                {
                    if (dirCacheIterator != null)
                    {
                        if (!treeIterator.IdEqual(dirCacheIterator) || treeIterator.EntryRawMode != dirCacheIterator
                            .EntryRawMode)
                        {
                            // in repo, in index, content diff => changed
                            changed.AddItem(treeWalk.PathString);
                        }
                    }
                    else
                    {
                        // in repo, not in index => removed
                        removed.AddItem(treeWalk.PathString);
                        if (workingTreeIterator != null)
                        {
                            untracked.AddItem(treeWalk.PathString);
                        }
                    }
                }
                else
                {
                    if (dirCacheIterator != null)
                    {
                        // not in repo, in index => added
                        added.AddItem(treeWalk.PathString);
                    }
                    else
                    {
                        // not in repo, not in index => untracked
                        if (workingTreeIterator != null && !workingTreeIterator.IsEntryIgnored())
                        {
                            untracked.AddItem(treeWalk.PathString);
                        }
                    }
                }
                if (dirCacheIterator != null)
                {
                    if (workingTreeIterator == null)
                    {
                        // in index, not in workdir => missing
                        missing.AddItem(treeWalk.PathString);
                    }
                    else
                    {
                        if (workingTreeIterator.IsModified(dirCacheIterator.GetDirCacheEntry(), true))
                        {
                            // in index, in workdir, content differs => modified
                            modified.AddItem(treeWalk.PathString);
                        }
                    }
                }
            }
            // consume the remaining work
            if (monitor != null)
            {
                monitor.EndTask();
            }
            if (added.IsEmpty() && changed.IsEmpty() && removed.IsEmpty() && missing.IsEmpty(
                    ) && modified.IsEmpty() && untracked.IsEmpty())
            {
                return(false);
            }
            else
            {
                return(true);
            }
        }
Exemplo n.º 10
0
		/// <summary>
		/// Perform push operation between local and remote repository - set remote
		/// refs appropriately, send needed objects and update local tracking refs.
		/// </summary>
		/// <remarks>
		/// Perform push operation between local and remote repository - set remote
		/// refs appropriately, send needed objects and update local tracking refs.
		/// <p>
		/// When
		/// <see cref="Transport.IsDryRun()">Transport.IsDryRun()</see>
		/// is true, result of this operation is
		/// just estimation of real operation result, no real action is performed.
		/// </remarks>
		/// <param name="monitor">progress monitor used for feedback about operation.</param>
		/// <returns>result of push operation with complete status description.</returns>
		/// <exception cref="System.NotSupportedException">when push operation is not supported by provided transport.
		/// 	</exception>
		/// <exception cref="NGit.Errors.TransportException">
		/// when some error occurred during operation, like I/O, protocol
		/// error, or local database consistency error.
		/// </exception>
		internal virtual PushResult Execute(ProgressMonitor monitor)
		{
			try
			{
				monitor.BeginTask(PROGRESS_OPENING_CONNECTION, ProgressMonitor.UNKNOWN);
				PushResult res = new PushResult();
				connection = transport.OpenPush();
				try
				{
					res.SetAdvertisedRefs(transport.GetURI(), connection.GetRefsMap());
					res.SetRemoteUpdates(toPush);
					monitor.EndTask();
					IDictionary<string, RemoteRefUpdate> preprocessed = PrepareRemoteUpdates();
					if (transport.IsDryRun())
					{
						ModifyUpdatesForDryRun();
					}
					else
					{
						if (!preprocessed.IsEmpty())
						{
							connection.Push(monitor, preprocessed);
						}
					}
				}
				finally
				{
					connection.Close();
					res.AddMessages(connection.GetMessages());
				}
				if (!transport.IsDryRun())
				{
					UpdateTrackingRefs();
				}
				foreach (RemoteRefUpdate rru in toPush.Values)
				{
					TrackingRefUpdate tru = rru.GetTrackingRefUpdate();
					if (tru != null)
					{
						res.Add(tru);
					}
				}
				return res;
			}
			finally
			{
				walker.Release();
			}
		}
Exemplo n.º 11
0
 /// <exception cref="System.NotSupportedException"></exception>
 /// <exception cref="NGit.Errors.TransportException"></exception>
 private void ExecuteImp(ProgressMonitor monitor, FetchResult result)
 {
     conn = transport.OpenFetch();
     try
     {
         result.SetAdvertisedRefs(transport.GetURI(), conn.GetRefsMap());
         ICollection<Ref> matched = new HashSet<Ref>();
         foreach (RefSpec spec in toFetch)
         {
             if (spec.GetSource() == null)
             {
                 throw new TransportException(MessageFormat.Format(JGitText.Get().sourceRefNotSpecifiedForRefspec
                     , spec));
             }
             if (spec.IsWildcard())
             {
                 ExpandWildcard(spec, matched);
             }
             else
             {
                 ExpandSingle(spec, matched);
             }
         }
         ICollection<Ref> additionalTags = Sharpen.Collections.EmptyList<Ref>();
         TagOpt tagopt = transport.GetTagOpt();
         if (tagopt == TagOpt.AUTO_FOLLOW)
         {
             additionalTags = ExpandAutoFollowTags();
         }
         else
         {
             if (tagopt == TagOpt.FETCH_TAGS)
             {
                 ExpandFetchTags();
             }
         }
         bool includedTags;
         if (!askFor.IsEmpty() && !AskForIsComplete())
         {
             FetchObjects(monitor);
             includedTags = conn.DidFetchIncludeTags();
             // Connection was used for object transfer. If we
             // do another fetch we must open a new connection.
             //
             CloseConnection(result);
         }
         else
         {
             includedTags = false;
         }
         if (tagopt == TagOpt.AUTO_FOLLOW && !additionalTags.IsEmpty())
         {
             // There are more tags that we want to follow, but
             // not all were asked for on the initial request.
             //
             Sharpen.Collections.AddAll(have, askFor.Keys);
             askFor.Clear();
             foreach (Ref r in additionalTags)
             {
                 ObjectId id = r.GetPeeledObjectId();
                 if (id == null)
                 {
                     id = r.GetObjectId();
                 }
                 if (transport.local.HasObject(id))
                 {
                     WantTag(r);
                 }
             }
             if (!askFor.IsEmpty() && (!includedTags || !AskForIsComplete()))
             {
                 ReopenConnection();
                 if (!askFor.IsEmpty())
                 {
                     FetchObjects(monitor);
                 }
             }
         }
     }
     finally
     {
         CloseConnection(result);
     }
     RevWalk walk = new RevWalk(transport.local);
     try
     {
         if (monitor is BatchingProgressMonitor)
         {
             ((BatchingProgressMonitor)monitor).SetDelayStart(250, TimeUnit.MILLISECONDS);
         }
         monitor.BeginTask(JGitText.Get().updatingReferences, localUpdates.Count);
         if (transport.IsRemoveDeletedRefs())
         {
             DeleteStaleTrackingRefs(result, walk);
         }
         foreach (TrackingRefUpdate u in localUpdates)
         {
             try
             {
                 monitor.Update(1);
                 u.Update(walk);
                 result.Add(u);
             }
             catch (IOException err)
             {
                 throw new TransportException(MessageFormat.Format(JGitText.Get().failureUpdatingTrackingRef
                     , u.GetLocalName(), err.Message), err);
             }
         }
         monitor.EndTask();
     }
     finally
     {
         walk.Release();
     }
     if (!fetchHeadUpdates.IsEmpty())
     {
         try
         {
             UpdateFETCH_HEAD(result);
         }
         catch (IOException err)
         {
             throw new TransportException(MessageFormat.Format(JGitText.Get().failureUpdatingFETCH_HEAD
                 , err.Message), err);
         }
     }
 }
Exemplo n.º 12
0
		/// <exception cref="System.IO.IOException"></exception>
		private void PutImpl(string bucket, string key, byte[] csum, TemporaryBuffer buf, 
			ProgressMonitor monitor, string monitorTask)
		{
			if (monitor == null)
			{
				monitor = NullProgressMonitor.INSTANCE;
			}
			if (monitorTask == null)
			{
				monitorTask = MessageFormat.Format(JGitText.Get().progressMonUploading, key);
			}
			string md5str = Base64.EncodeBytes(csum);
			long len = buf.Length();
			string lenstr = len.ToString();
			for (int curAttempt = 0; curAttempt < maxAttempts; curAttempt++)
			{
				HttpURLConnection c = Open("PUT", bucket, key);
				c.SetRequestProperty("Content-Length", lenstr);
				c.SetRequestProperty("Content-MD5", md5str);
				c.SetRequestProperty(X_AMZ_ACL, acl);
				encryption.Request(c, X_AMZ_META);
				Authorize(c);
				c.SetDoOutput(true);
				c.SetFixedLengthStreamingMode((int)len);
				monitor.BeginTask(monitorTask, (int)(len / 1024));
				OutputStream os = c.GetOutputStream();
				try
				{
					buf.WriteTo(os, monitor);
				}
				finally
				{
					monitor.EndTask();
					os.Close();
				}
				switch (HttpSupport.Response(c))
				{
					case HttpURLConnection.HTTP_OK:
					{
						return;
					}

					case HttpURLConnection.HTTP_INTERNAL_ERROR:
					{
						continue;
						goto default;
					}

					default:
					{
						throw Error("Writing", key, c);
					}
				}
			}
			throw MaxAttempts("Writing", key);
		}
Exemplo n.º 13
0
        public Stash Create(NGit.ProgressMonitor monitor, string message)
        {
            if (monitor != null)
            {
                monitor.Start(1);
                monitor.BeginTask("Stashing changes", 100);
            }

            UserConfig config = _repo.GetConfig().Get(UserConfig.KEY);
            RevWalk    rw     = new RevWalk(_repo);
            ObjectId   headId = _repo.Resolve(Constants.HEAD);
            var        parent = rw.ParseCommit(headId);

            PersonIdent author = new PersonIdent(config.GetAuthorName() ?? "unknown", config.GetAuthorEmail() ?? "unknown@(none).");

            if (string.IsNullOrEmpty(message))
            {
                // Use the commit summary as message
                message = parent.Abbreviate(7).ToString() + " " + parent.GetShortMessage();
                int i = message.IndexOfAny(new char[] { '\r', '\n' });
                if (i != -1)
                {
                    message = message.Substring(0, i);
                }
            }

            // Create the index tree commit
            ObjectInserter inserter = _repo.NewObjectInserter();
            DirCache       dc       = _repo.ReadDirCache();

            if (monitor != null)
            {
                monitor.Update(10);
            }

            var tree_id = dc.WriteTree(inserter);

            inserter.Release();

            if (monitor != null)
            {
                monitor.Update(10);
            }

            string   commitMsg   = "index on " + _repo.GetBranch() + ": " + message;
            ObjectId indexCommit = GitUtil.CreateCommit(_repo, commitMsg + "\n", new ObjectId[] { headId }, tree_id, author, author);

            if (monitor != null)
            {
                monitor.Update(20);
            }

            // Create the working dir commit
            tree_id   = WriteWorkingDirectoryTree(parent.Tree, dc);
            commitMsg = "WIP on " + _repo.GetBranch() + ": " + message;
            var wipCommit = GitUtil.CreateCommit(_repo, commitMsg + "\n", new ObjectId[] { headId, indexCommit }, tree_id, author, author);

            if (monitor != null)
            {
                monitor.Update(20);
            }

            string   prevCommit = null;
            FileInfo sf         = StashRefFile;

            if (sf.Exists)
            {
                prevCommit = File.ReadAllText(sf.FullName).Trim(' ', '\t', '\r', '\n');
            }

            Stash s = new Stash(prevCommit, wipCommit.Name, author, commitMsg);

            FileInfo stashLog = StashLogFile;

            File.AppendAllText(stashLog.FullName, s.FullLine + "\n");
            File.WriteAllText(sf.FullName, s.CommitId + "\n");

            if (monitor != null)
            {
                monitor.Update(5);
            }

            // Wipe all local changes
            GitUtil.HardReset(_repo, Constants.HEAD);

            monitor.EndTask();
            s.StashCollection = this;
            return(s);
        }
Exemplo n.º 14
0
		/// <summary>Execute this batch update.</summary>
		/// <remarks>
		/// Execute this batch update.
		/// <p>
		/// The default implementation of this method performs a sequential reference
		/// update over each reference.
		/// </remarks>
		/// <param name="walk">
		/// a RevWalk to parse tags in case the storage system wants to
		/// store them pre-peeled, a common performance optimization.
		/// </param>
		/// <param name="update">progress monitor to receive update status on.</param>
		/// <exception cref="System.IO.IOException">
		/// the database is unable to accept the update. Individual
		/// command status must be tested to determine if there is a
		/// partial failure, or a total failure.
		/// </exception>
		public virtual void Execute(RevWalk walk, ProgressMonitor update)
		{
			update.BeginTask(JGitText.Get().updatingReferences, commands.Count);
			foreach (ReceiveCommand cmd in commands)
			{
				try
				{
					update.Update(1);
					if (cmd.GetResult() == ReceiveCommand.Result.NOT_ATTEMPTED)
					{
						cmd.UpdateType(walk);
						RefUpdate ru = NewUpdate(cmd);
						switch (cmd.GetType())
						{
							case ReceiveCommand.Type.DELETE:
							{
								cmd.SetResult(ru.Delete(walk));
								continue;
								goto case ReceiveCommand.Type.CREATE;
							}

							case ReceiveCommand.Type.CREATE:
							case ReceiveCommand.Type.UPDATE:
							case ReceiveCommand.Type.UPDATE_NONFASTFORWARD:
							{
								cmd.SetResult(ru.Update(walk));
								continue;
							}
						}
					}
				}
				catch (IOException err)
				{
					cmd.SetResult(ReceiveCommand.Result.REJECTED_OTHER_REASON, MessageFormat.Format(JGitText
						.Get().lockError, err.Message));
				}
			}
			update.EndTask();
		}
		/// <exception cref="System.IO.IOException"></exception>
		internal virtual void Compute(ProgressMonitor pm)
		{
			if (pm == null)
			{
				pm = NullProgressMonitor.INSTANCE;
			}
			pm.BeginTask(JGitText.Get().renamesFindingByContent, 2 * srcs.Count * dsts.Count);
			//
			int mNext = BuildMatrix(pm);
			@out = new AList<DiffEntry>(Math.Min(mNext, dsts.Count));
			// Match rename pairs on a first come, first serve basis until
			// we have looked at everything that is above our minimum score.
			//
			for (--mNext; mNext >= 0; mNext--)
			{
				long ent = matrix[mNext];
				int sIdx = SrcFile(ent);
				int dIdx = DstFile(ent);
				DiffEntry s = srcs[sIdx];
				DiffEntry d = dsts[dIdx];
				if (d == null)
				{
					pm.Update(1);
					continue;
				}
				// was already matched earlier
				DiffEntry.ChangeType type;
				if (s.changeType == DiffEntry.ChangeType.DELETE)
				{
					// First use of this source file. Tag it as a rename so we
					// later know it is already been used as a rename, other
					// matches (if any) will claim themselves as copies instead.
					//
					s.changeType = DiffEntry.ChangeType.RENAME;
					type = DiffEntry.ChangeType.RENAME;
				}
				else
				{
					type = DiffEntry.ChangeType.COPY;
				}
				@out.AddItem(DiffEntry.Pair(type, s, d, Score(ent)));
				dsts.Set(dIdx, null);
				// Claim the destination was matched.
				pm.Update(1);
			}
			srcs = CompactSrcList(srcs);
			dsts = CompactDstList(dsts);
			pm.EndTask();
		}
Exemplo n.º 16
0
		/// <summary>Parse the pack stream.</summary>
		/// <remarks>Parse the pack stream.</remarks>
		/// <param name="receiving">
		/// receives progress feedback during the initial receiving
		/// objects phase. If null,
		/// <see cref="NGit.NullProgressMonitor">NGit.NullProgressMonitor</see>
		/// will be
		/// used.
		/// </param>
		/// <param name="resolving">receives progress feedback during the resolving objects phase.
		/// 	</param>
		/// <returns>
		/// the pack lock, if one was requested by setting
		/// <see cref="SetLockMessage(string)">SetLockMessage(string)</see>
		/// .
		/// </returns>
		/// <exception cref="System.IO.IOException">the stream is malformed, or contains corrupt objects.
		/// 	</exception>
		public virtual PackLock Parse(ProgressMonitor receiving, ProgressMonitor resolving
			)
		{
			if (receiving == null)
			{
				receiving = NullProgressMonitor.INSTANCE;
			}
			if (resolving == null)
			{
				resolving = NullProgressMonitor.INSTANCE;
			}
			if (receiving == resolving)
			{
				receiving.Start(2);
			}
			try
			{
				ReadPackHeader();
				entries = new PackedObjectInfo[(int)objectCount];
				baseById = new ObjectIdOwnerMap<PackParser.DeltaChain>();
				baseByPos = new LongMap<PackParser.UnresolvedDelta>();
				deferredCheckBlobs = new BlockList<PackedObjectInfo>();
				receiving.BeginTask(JGitText.Get().receivingObjects, (int)objectCount);
				try
				{
					for (int done = 0; done < objectCount; done++)
					{
						IndexOneObject();
						receiving.Update(1);
						if (receiving.IsCancelled())
						{
							throw new IOException(JGitText.Get().downloadCancelled);
						}
					}
					ReadPackFooter();
					EndInput();
				}
				finally
				{
					receiving.EndTask();
				}
				if (!deferredCheckBlobs.IsEmpty())
				{
					DoDeferredCheckBlobs();
				}
				if (deltaCount > 0)
				{
					if (resolving is BatchingProgressMonitor)
					{
						((BatchingProgressMonitor)resolving).SetDelayStart(1000, TimeUnit.MILLISECONDS);
					}
					resolving.BeginTask(JGitText.Get().resolvingDeltas, deltaCount);
					ResolveDeltas(resolving);
					if (entryCount < objectCount)
					{
						if (!IsAllowThin())
						{
							throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas
								, Sharpen.Extensions.ValueOf(objectCount - entryCount)));
						}
						ResolveDeltasWithExternalBases(resolving);
						if (entryCount < objectCount)
						{
							throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas
								, Sharpen.Extensions.ValueOf(objectCount - entryCount)));
						}
					}
					resolving.EndTask();
				}
				packDigest = null;
				baseById = null;
				baseByPos = null;
			}
			finally
			{
				try
				{
					if (readCurs != null)
					{
						readCurs.Release();
					}
				}
				finally
				{
					readCurs = null;
				}
				try
				{
					inflater.Release();
				}
				finally
				{
					inflater = null;
				}
			}
			return null;
		}
Exemplo n.º 17
0
		/// <summary>Consume data from the input stream until the packfile is indexed.</summary>
		/// <remarks>Consume data from the input stream until the packfile is indexed.</remarks>
		/// <param name="progress">progress feedback</param>
		/// <exception cref="System.IO.IOException">System.IO.IOException</exception>
		public virtual void Index(ProgressMonitor progress)
		{
			progress.Start(2);
			try
			{
				try
				{
					ReadPackHeader();
					entries = new PackedObjectInfo[(int)objectCount];
					baseById = new ObjectIdSubclassMap<IndexPack.DeltaChain>();
					baseByPos = new LongMap<IndexPack.UnresolvedDelta>();
					deferredCheckBlobs = new AList<PackedObjectInfo>();
					progress.BeginTask(JGitText.Get().receivingObjects, (int)objectCount);
					for (int done = 0; done < objectCount; done++)
					{
						IndexOneObject();
						progress.Update(1);
						if (progress.IsCancelled())
						{
							throw new IOException(JGitText.Get().downloadCancelled);
						}
					}
					ReadPackFooter();
					EndInput();
					if (!deferredCheckBlobs.IsEmpty())
					{
						DoDeferredCheckBlobs();
					}
					progress.EndTask();
					if (deltaCount > 0)
					{
						if (packOut == null)
						{
							throw new IOException(JGitText.Get().needPackOut);
						}
						ResolveDeltas(progress);
						if (entryCount < objectCount)
						{
							if (!fixThin)
							{
								throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas
									, (objectCount - entryCount)));
							}
							FixThinPack(progress);
						}
					}
					if (packOut != null && (keepEmpty || entryCount > 0))
					{
						packOut.GetChannel().Force(true);
					}
					packDigest = null;
					baseById = null;
					baseByPos = null;
					if (dstIdx != null && (keepEmpty || entryCount > 0))
					{
						WriteIdx();
					}
				}
				finally
				{
					try
					{
						if (readCurs != null)
						{
							readCurs.Release();
						}
					}
					finally
					{
						readCurs = null;
					}
					try
					{
						inflater.Release();
					}
					finally
					{
						inflater = null;
						objectDatabase.Close();
					}
					progress.EndTask();
					if (packOut != null)
					{
						packOut.Close();
					}
				}
				if (keepEmpty || entryCount > 0)
				{
					if (dstPack != null)
					{
						dstPack.SetReadOnly();
					}
					if (dstIdx != null)
					{
						dstIdx.SetReadOnly();
					}
				}
			}
			catch (IOException err)
			{
				if (dstPack != null)
				{
					FileUtils.Delete(dstPack);
				}
				if (dstIdx != null)
				{
					FileUtils.Delete(dstIdx);
				}
				throw;
			}
		}