public void GetVersion_PathFilterPlusMerge()
    {
        this.InitializeSourceControl(withInitialCommit: false);
        this.WriteVersionFile(new VersionOptions
        {
            Version     = new SemanticVersion("1.0"),
            PathFilters = new FilterPath[] { new FilterPath(".", string.Empty) },
        });

        string conflictedFilePath = Path.Combine(this.RepoPath, "foo.txt");

        File.WriteAllText(conflictedFilePath, "foo");
        Commands.Stage(this.LibGit2Repository, conflictedFilePath);
        this.LibGit2Repository.Commit("Add foo.txt with foo content.", this.Signer, this.Signer);
        Branch originalBranch = this.LibGit2Repository.Head;

        Branch topicBranch = this.LibGit2Repository.Branches.Add("topic", "HEAD~1");

        Commands.Checkout(this.LibGit2Repository, topicBranch);
        File.WriteAllText(conflictedFilePath, "bar");
        Commands.Stage(this.LibGit2Repository, conflictedFilePath);
        this.LibGit2Repository.Commit("Add foo.txt with bar content.", this.Signer, this.Signer);

        Commands.Checkout(this.LibGit2Repository, originalBranch);
        MergeResult result = this.LibGit2Repository.Merge(topicBranch, this.Signer, new MergeOptions {
            FileConflictStrategy = CheckoutFileConflictStrategy.Ours
        });

        Assert.Equal(MergeStatus.Conflicts, result.Status);
        Commands.Stage(this.LibGit2Repository, conflictedFilePath);
        this.LibGit2Repository.Commit("Merge two branches", this.Signer, this.Signer);

        Assert.Equal(3, this.GetVersionHeight());
    }
        public void CanMergeFetchedRefs()
        {
            string url = "https://github.com/libgit2/TestGitRepository";

            var    scd            = BuildSelfCleaningDirectory();
            string clonedRepoPath = Repository.Clone(url, scd.DirectoryPath);

            using (var repo = new Repository(clonedRepoPath))
            {
                repo.Reset(ResetMode.Hard, "HEAD~1");

                Assert.False(repo.RetrieveStatus().Any());
                Assert.Equal(repo.Lookup <Commit>("refs/remotes/origin/master~1"), repo.Head.Tip);

                repo.Network.Fetch(repo.Head.Remote);

                MergeOptions mergeOptions = new MergeOptions()
                {
                    FastForwardStrategy = FastForwardStrategy.NoFastForward
                };

                MergeResult mergeResult = repo.MergeFetchedRefs(Constants.Signature, mergeOptions);
                Assert.Equal(mergeResult.Status, MergeStatus.NonFastForward);
            }
        }
Beispiel #3
0
 /// <summary>
 /// Initializes a new instance of the <see cref="AssetToImportMerge"/> class.
 /// </summary>
 /// <param name="previousItem">The previous item.</param>
 /// <param name="diff">The difference.</param>
 /// <param name="mergePreviewResult">The merge preview result.</param>
 internal AssetToImportMerge(AssetItem previousItem, AssetDiff diff, MergeResult mergePreviewResult)
 {
     PreviousItem            = previousItem;
     this.Diff               = diff;
     this.MergePreviewResult = mergePreviewResult;
     DependencyGroups        = new List <AssetToImportMergeGroup>();
 }
Beispiel #4
0
        public void PullRemoteChanges(string org, string repository)
        {
            using (var repo = new Repository(FindLocalRepoLocation(org, repository)))
            {
                PullOptions pullOptions = new PullOptions()
                {
                    MergeOptions = new MergeOptions()
                    {
                        FastForwardStrategy = FastForwardStrategy.Default
                    }
                };

                pullOptions.FetchOptions = new FetchOptions();
                pullOptions.FetchOptions.CredentialsProvider = (_url, _user, _cred) =>
                                                               new UsernamePasswordCredentials {
                    Username = GetAppToken(), Password = ""
                };

                MergeResult mergeResult = Commands.Pull(
                    repo,
                    new Signature("my name", "my email", DateTimeOffset.Now), // I dont want to provide these
                    pullOptions
                    );
            }
        }
            public string Update()
            {
                try
                {
                    GitCredentials = new DefaultCredentials();

                    using (var repo = new Repository(Path))
                    {
                        var gitSignature = new Signature(GitUsername, GitEmail, DateTimeOffset.Now);

                        // FETCH
                        Commands.Fetch(repo, "origin", new string[0], new FetchOptions {
                            CredentialsProvider = (_url, _user, _cred) => GitCredentials, TagFetchMode = TagFetchMode.All
                        }, null);

                        // MERGE
                        MergeResult result = repo.MergeFetchedRefs(gitSignature, new MergeOptions());
                    }
                }
                catch (Exception e)
                {
                    return(System.Web.HttpContext.Current.Request.IsLocal ? e.ToString() : "Failed");
                }

                return("Success");
            }
        internal static void DescargarVistas(string proyecto)
        {
            string rutaDirectorio = $"{AppContext.BaseDirectory}Views/{proyecto}";

            using (var repo = new Repository(rutaDirectorio))
            {
                try
                {
                    // Comprobar que la rama actual es la última
                    string ramaActual = GetActualBranchRepository(proyecto);
                    Branch rama       = FindBranch(ramaActual, repo);

                    var fetchOptions = new FetchOptions()
                    {
                        CredentialsProvider = Credential
                    };

                    PullOptions options = new PullOptions();
                    options.FetchOptions = fetchOptions;
                    var signature = new LibGit2Sharp.Signature(
                        new Identity("MERGE_USER_NAME", "MERGE_USER_EMAIL"), DateTimeOffset.Now);

                    MergeResult merge = Commands.Pull(repo, signature, options);
                }
                catch (Exception ex)
                {
                    //repo.Info.WorkingDirectory
                }
            }
        }
        private static void WriteConflictResolver(string name, ManualBlittableJsonDocumentBuilder <UnmanagedWriteBuffer> documentWriter,
                                                  ManualBlittableJsonDocumentBuilder <UnmanagedWriteBuffer> metadataWriter, ConflictResolverAdvisor resolver, int indent)
        {
            MergeResult result = resolver.Resolve(indent);

            if (resolver.IsMetadataResolver)
            {
                if (name != "@metadata")
                {
                    metadataWriter.WritePropertyName(name);
                    metadataWriter.StartWriteObject();
                    result.Document.AddItemsToStream(metadataWriter);
                    metadataWriter.WriteObjectEnd();
                    return;
                }
                result.Document.AddItemsToStream(metadataWriter);
            }
            else
            {
                documentWriter.WritePropertyName(name);
                documentWriter.StartWriteObject();
                result.Document.AddItemsToStream(documentWriter);
                documentWriter.WriteObjectEnd();
            }
        }
 public MergeResult Merge(IEnumerable<SubscriptionItem> oldItems, IEnumerable<FeedItem> newItems)
 {
     var items = newItems.Select(x =>
     {
         var item = new SubscriptionItem(Subscription)
         {
             Id = x.Id,
             PublishDate = x.PublishDate,
             LastUpdatedTime = x.LastUpdatedTime,
             Summary = x.Summary,
             Title = x.Title,
             Categories = x.Categories
         };
         item.Links = x.Links.Select(y => new SubscriptionItemLink(item)
         {
             Length = y.Length,
             MediaType = y.MediaType,
             RelationshipType = y.RelationshipType,
             Title = y.Title,
             Uri = y.Uri
         }).ToList();
         return item;
     });
     var result = new MergeResult();
     foreach (var item in items)
         result.AddItem(item, ItemMergeStatus.NewItem);
     return result;
 }
Beispiel #9
0
        public void initGit(BackgroundWorker worker)
        {
            Worker = worker;
            if (!Repository.IsValid(git))
            {
                Worker.ReportProgress(25, "Baixando Cliente do Zero ... 25%");
                Repository.Clone(gitRepositorio, git);
            }
            using (var repo = new Repository(git))
            {
                Worker.ReportProgress(50, "Verificando Integridade dos Arquivos ... 50%");
                Thread.Sleep(100);
                repo.Reset(ResetMode.Hard, "master");
                Worker.ReportProgress(65, "Atualizando Arquivos ... 65%");
                repo.Branches.Update(
                    repo.Head,
                    b => b.Remote         = "origin",
                    b => b.UpstreamBranch = "refs/heads/master"
                    );
                MergeResult mergeResult = repo.Network.Pull(new Signature(Nome, Email, new DateTimeOffset(DateTime.Now)), new PullOptions());

                //Verifica se Existe Sprite Compactada.
                if (sprite.existe())
                {
                    Worker.ReportProgress(85, "Extraindo Arquivos Compactados ... 85%");
                    sprite.extrair();
                }

                Worker.ReportProgress(100, "Atualização Finalizada ...  100%");
            }
        }
Beispiel #10
0
 /// <summary>
 /// Get's the latest and greatest from remote
 /// </summary>
 /// <returns></returns>
 public bool GitFetch()
 {
     if (cfg["UseGitRemote"] == true && GITRepoOffline == false)
     {
         toolStripOffline.Visible = false;
         using (var repo = new LibGit2Sharp.Repository(cfg["PassDirectory"]))
         {
             LibGit2Sharp.Signature Signature    = new LibGit2Sharp.Signature("pass4win", "*****@*****.**", new DateTimeOffset(2011, 06, 16, 10, 58, 27, TimeSpan.FromHours(2)));
             FetchOptions           fetchOptions = new FetchOptions();
             fetchOptions.CredentialsProvider = (_url, _user, _cred) => new UsernamePasswordCredentials
             {
                 Username = cfg["GitUser"],
                 Password = DecryptConfig(cfg["GitPass"], "pass4win")
             };
             MergeOptions mergeOptions = new MergeOptions();
             PullOptions  pullOptions  = new PullOptions();
             pullOptions.FetchOptions = fetchOptions;
             pullOptions.MergeOptions = mergeOptions;
             try
             {
                 MergeResult mergeResult = repo.Network.Pull(Signature, pullOptions);
             }
             catch
             {
                 return(false);
             }
         }
     }
     return(true);
 }
        public override void TestFixtureSetUp()
        {
            base.TestFixtureSetUp();

            _filename  = GetTempFilePath();
            _mergeFile = GetTempFilePath();

            _map = IndexMapTestFactory.FromFile(_filename, maxTablesPerLevel: 2);
            var memtable = new HashListMemTable(_ptableVersion, maxSize: 10);

            memtable.Add(0, 1, 0);

            _result = _map.AddPTable(
                PTable.FromMemtable(memtable, GetTempFilePath(), skipIndexVerify: _skipIndexVerify),
                123, 321, (streamId, hash) => hash, _ => true, _ => new System.Tuple <string, bool>("", true),
                new GuidFilenameProvider(PathName), _ptableVersion, _maxAutoMergeIndexLevel, 0,
                skipIndexVerify: _skipIndexVerify);
            _result.ToDelete.ForEach(x => x.MarkForDestruction());
            _result = _result.MergedMap.AddPTable(
                PTable.FromMemtable(memtable, GetTempFilePath(), skipIndexVerify: _skipIndexVerify),
                100, 400, (streamId, hash) => hash, _ => true, _ => new System.Tuple <string, bool>("", true),
                new FakeFilenameProvider(_mergeFile), _ptableVersion, _maxAutoMergeIndexLevel, 0,
                skipIndexVerify: _skipIndexVerify);
            _result.ToDelete.ForEach(x => x.MarkForDestruction());
        }
Beispiel #12
0
        public void Pull(string path, string branchMane, string authorName, string authorEmail)
        {
            using (var repo = new Repository(path))
            {
                var trackingBranch = repo.Branches[$"remotes/origin/{branchMane}"];

                if (trackingBranch.IsRemote)
                {
                    var branch = repo.Head;
                    repo.Branches.Update(branch, b => b.TrackedBranch = trackingBranch.CanonicalName);
                }

                PullOptions pullOptions = new PullOptions()
                {
                    MergeOptions = new MergeOptions()
                    {
                        FastForwardStrategy = FastForwardStrategy.Default
                    }
                };

                MergeResult mergeResult = Commands.Pull(
                    repo,
                    new Signature(authorName, authorEmail, DateTimeOffset.Now),
                    pullOptions
                    );
            }
        }
        public override void TestFixtureSetUp()
        {
            base.TestFixtureSetUp();

            _mergeFile = GetTempFilePath();
            _filename  = GetTempFilePath();

            _map = IndexMap.FromFile(_filename, _ptableVersion, maxTablesPerLevel: 2);
            var memtable = new HashListMemTable(_ptableVersion, maxSize: 10);

            memtable.Add(0, 1, 0);

            _result = _map.AddPTable(PTable.FromMemtable(memtable, GetTempFilePath()),
                                     10, 20, (streamId, hash) => hash, _ => true, _ => new Tuple <string, bool>("", true), new GuidFilenameProvider(PathName), _ptableVersion);
            _result.ToDelete.ForEach(x => x.MarkForDestruction());
            _result = _result.MergedMap.AddPTable(PTable.FromMemtable(memtable, GetTempFilePath()),
                                                  20, 30, (streamId, hash) => hash, _ => true, _ => new Tuple <string, bool>("", true), new GuidFilenameProvider(PathName), _ptableVersion);
            _result.ToDelete.ForEach(x => x.MarkForDestruction());
            _result = _result.MergedMap.AddPTable(PTable.FromMemtable(memtable, GetTempFilePath()),
                                                  30, 40, (streamId, hash) => hash, _ => true, _ => new Tuple <string, bool>("", true), new GuidFilenameProvider(PathName), _ptableVersion);
            _result.ToDelete.ForEach(x => x.MarkForDestruction());
            _result = _result.MergedMap.AddPTable(PTable.FromMemtable(memtable, GetTempFilePath()),
                                                  50, 60, (streamId, hash) => hash, _ => true, _ => new Tuple <string, bool>("", true), new FakeFilenameProvider(_mergeFile + ".firstmerge", _mergeFile), _ptableVersion);
            _result.ToDelete.ForEach(x => x.MarkForDestruction());
        }
        protected void AddTables(int count)
        {
            var memtable = new HashListMemTable(_ptableVersion, maxSize: 10);

            memtable.Add(0, 1, 0);
            var first = _map;

            if (_result != null)
            {
                first = _result.MergedMap;
            }
            var pTable = PTable.FromMemtable(memtable, GetTempFilePath(), skipIndexVerify: _skipIndexVerify);

            _result = first.AddPTable(pTable,
                                      10, 20, UpgradeHash, ExistsAt, RecordExistsAt, _fileNameProvider, _ptableVersion,
                                      0, 0, skipIndexVerify: _skipIndexVerify);
            for (int i = 3; i <= count * 2; i += 2)
            {
                pTable  = PTable.FromMemtable(memtable, GetTempFilePath(), skipIndexVerify: _skipIndexVerify);
                _result = _result.MergedMap.AddPTable(
                    pTable,
                    i * 10, (i + 1) * 10, (streamId, hash) => hash, _ => true, _ => new Tuple <string, bool>("", true),
                    _fileNameProvider, _ptableVersion, 0, 0, skipIndexVerify: _skipIndexVerify);
                _result.ToDelete.ForEach(x => x.MarkForDestruction());
            }
        }
        private MergeResult MergeItemsOnUpdates(ProjectItemElement item, ProjectItemElement existingItem)
        {
            if (!string.Equals(item.ItemType, existingItem.ItemType, StringComparison.Ordinal))
            {
                throw new InvalidOperationException(LocalizableStrings.CannotMergeItemsOfDifferentTypesError);
            }

            var commonUpdates = item.IntersectUpdates(existingItem).ToList();
            var mergedItem    = _projectElementGenerator.AddItem(item.ItemType, "placeholder");

            mergedItem.Include = string.Empty;
            mergedItem.Update  = string.Join(";", commonUpdates);

            mergedItem.UnionExcludes(existingItem.Excludes());
            mergedItem.UnionExcludes(item.Excludes());

            mergedItem.AddMetadata(MergeMetadata(existingItem.Metadata, item.Metadata), MigrationTrace.Instance);

            item.RemoveUpdates(commonUpdates);
            existingItem.RemoveUpdates(commonUpdates);

            var mergeResult = new MergeResult
            {
                InputItem    = string.IsNullOrEmpty(item.Update) ? null : item,
                ExistingItem = string.IsNullOrEmpty(existingItem.Update) ? null : existingItem,
                MergedItem   = mergedItem
            };

            return(mergeResult);
        }
Beispiel #16
0
        public void BasicInsertTest()
        {
            SearchNode root = new SearchNode()
            {
                Label = "chrome", Results = new MergedResultList()
            };
            var item = new Item(new DirectoryNodeMock("chrome.exe"))
            {
                Priority = 2
            };

            root.Items.Add(item);
            root.Results.Add(item);
            MergeResult result = new MergeResult();

            root.Insert(".exe", item, ref result);
            root.Insert("google chrome", item, ref result);
            root.Insert("chrome", item, ref result);
            root.Insert("firefox", new Item(new DirectoryNodeMock("firefox.exe"))
            {
                Priority = 3
            }, ref result);
            root.Insert("filezilla", new Item(new DirectoryNodeMock("filezilla.exe"))
            {
                Priority = 4
            }, ref result);
            Assert.IsTrue(root.Find("fi").ResultItems.Count == 2);
            Assert.IsTrue(root.Find(".exe").ResultItems[0] == item);
            Assert.IsTrue(root.Find("google chrome").ResultItems[0] == item);
            Assert.IsTrue(root.Find("chrome").ResultItems[0] == item);
        }
Beispiel #17
0
        public void CanMergeIntoOrphanedBranch()
        {
            string path = CloneMergeTestRepo();

            using (var repo = new Repository(path))
            {
                repo.Refs.Add("HEAD", "refs/heads/orphan", true);

                // Remove entries from the working directory
                foreach (var entry in repo.Index.RetrieveStatus())
                {
                    repo.Index.Unstage(entry.FilePath);
                    repo.Index.Remove(entry.FilePath, true);
                }

                // Assert that we have an empty working directory.
                Assert.False(repo.Index.RetrieveStatus().Any());

                MergeResult result = repo.Merge("master", Constants.Signature);

                Assert.Equal(MergeStatus.FastForward, result.Status);
                Assert.Equal(masterBranchInitialId, result.Commit.Id.Sha);
                Assert.False(repo.Index.RetrieveStatus().Any());
            }
        }
Beispiel #18
0
        public bool Merge(BranchLabelModel merged)
        {
            Signature   s = GetCurrentSignature();
            MergeResult r = Repository.Merge(merged.Branch, s);

            return(r.Status != MergeStatus.Conflicts);
        }
        public override void SetUp()
        {
            base.SetUp();

            _filename  = GetFilePathFor("indexfile");
            _tablename = GetTempFilePath();
            _mergeFile = GetFilePathFor("outfile");

            _map = IndexMap.FromFile(_filename, maxTablesPerLevel: 4);
            var memtable = new HashListMemTable(_ptableVersion, maxSize: 10);

            memtable.Add(0, 2, 123);
            var table = PTable.FromMemtable(memtable, _tablename);

            _result = _map.AddPTable(table, 0, 0, (streamId, hash) => hash, _ => true, _ => new Tuple <string, bool>("", true), new FakeFilenameProvider(_mergeFile), _ptableVersion);
            _result = _result.MergedMap.AddPTable(table, 0, 0, (streamId, hash) => hash, _ => true, _ => new Tuple <string, bool>("", true), new FakeFilenameProvider(_mergeFile), _ptableVersion);
            _result = _result.MergedMap.AddPTable(table, 0, 0, (streamId, hash) => hash, _ => true, _ => new Tuple <string, bool>("", true), new FakeFilenameProvider(_mergeFile), _ptableVersion);
            var merged = _result.MergedMap.AddPTable(table, 0, 0, (streamId, hash) => hash, _ => true, _ => new Tuple <string, bool>("", true), new FakeFilenameProvider(_mergeFile), _ptableVersion);

            _result = merged.MergedMap.AddPTable(table, 0, 0, (streamId, hash) => hash, _ => true, _ => new Tuple <string, bool>("", true), new FakeFilenameProvider(_mergeFile), _ptableVersion);
            _result = _result.MergedMap.AddPTable(table, 7, 11, (streamId, hash) => hash, _ => true, _ => new Tuple <string, bool>("", true), new FakeFilenameProvider(_mergeFile), _ptableVersion);
            _result.MergedMap.SaveToFile(_filename);

            table.Dispose();

            merged.MergedMap.InOrder().ToList().ForEach(x => x.Dispose());
            merged.ToDelete.ForEach(x => x.Dispose());

            _result.MergedMap.InOrder().ToList().ForEach(x => x.Dispose());
            _result.ToDelete.ForEach(x => x.Dispose());
        }
Beispiel #20
0
        /// <summary>
        /// Merges two items on their common sets of includes.
        /// The output is 3 items, the 2 input items and the merged items. If the common
        /// set of includes spans the entirety of the includes of either of the 2 input
        /// items, that item will be returned as null.
        ///
        /// The 3rd output item, the merged item, will have the Union of the excludes and
        /// metadata from the 2 input items. If any metadata between the 2 input items is different,
        /// this will throw.
        ///
        /// This function will mutate the Include property of the 2 input items, removing the common subset.
        /// </summary>
        private MergeResult MergeItems(ProjectItemElement item, ProjectItemElement existingItem)
        {
            if (!string.Equals(item.ItemType, existingItem.ItemType, StringComparison.Ordinal))
            {
                throw new InvalidOperationException("Cannot merge items of different types.");
            }

            if (!item.IntersectIncludes(existingItem).Any())
            {
                throw new InvalidOperationException("Cannot merge items without a common include.");
            }

            var commonIncludes = item.IntersectIncludes(existingItem).ToList();
            var mergedItem     = _projectElementGenerator.AddItem(item.ItemType, string.Join(";", commonIncludes));

            mergedItem.UnionExcludes(existingItem.Excludes());
            mergedItem.UnionExcludes(item.Excludes());

            mergedItem.AddMetadata(MergeMetadata(existingItem.Metadata, item.Metadata));

            item.RemoveIncludes(commonIncludes);
            existingItem.RemoveIncludes(commonIncludes);

            var mergeResult = new MergeResult
            {
                InputItem    = string.IsNullOrEmpty(item.Include) ? null : item,
                ExistingItem = string.IsNullOrEmpty(existingItem.Include) ? null : existingItem,
                MergedItem   = mergedItem
            };

            return(mergeResult);
        }
Beispiel #21
0
        public override void TestFixtureSetUp()
        {
            base.TestFixtureSetUp();

            _mergeFile = GetTempFilePath();
            _filename  = GetTempFilePath();

            _map = IndexMap.FromFile(_filename, maxTablesPerLevel: 4);
            var memtable = new HashListMemTable(maxSize: 10);

            memtable.Add(0, 1, 0);

            _result = _map.AddPTable(PTable.FromMemtable(memtable, GetTempFilePath()), 1, 2,
                                     _ => true, new GuidFilenameProvider(PathName));
            _result.ToDelete.ForEach(x => x.MarkForDestruction());

            _result = _result.MergedMap.AddPTable(PTable.FromMemtable(memtable, GetTempFilePath()), 3, 4,
                                                  _ => true, new GuidFilenameProvider(PathName));
            _result.ToDelete.ForEach(x => x.MarkForDestruction());

            _result = _result.MergedMap.AddPTable(PTable.FromMemtable(memtable, GetTempFilePath()), 4, 5,
                                                  _ => true, new GuidFilenameProvider(PathName));
            _result.ToDelete.ForEach(x => x.MarkForDestruction());

            _result = _result.MergedMap.AddPTable(PTable.FromMemtable(memtable, GetTempFilePath()), 0, 1,
                                                  _ => true, new FakeFilenameProvider(_mergeFile));
            _result.ToDelete.ForEach(x => x.MarkForDestruction());
        }
Beispiel #22
0
        public MergeResult Merge()
        {
            var mergeResult = MergeResult.Empty();

            if (this.MergeSet.IsNew)
            {
                mergeResult = mergeResult.Combine(this.MergeSet.Merge());
            }
            else
            {
                var selectedChangeSets = this.ChangeSets.Where(cs => cs.IsSelected)
                                         .OrderBy(cs => cs.Id)
                                         .ToArray();
                if (this.ChangeSets.Count() == selectedChangeSets.Length)
                {
                    mergeResult = mergeResult.Combine(this.MergeSet.Merge());
                }
                else
                {
                    foreach (var changeSet in selectedChangeSets)
                    {
                        mergeResult = mergeResult.Combine(changeSet.Merge());
                    }
                }
            }

            return(mergeResult);
        }
Beispiel #23
0
        protected void OnMergeComplete(MergeResult result, string mergeType)
        {
            switch (result.Status)
            {
            case MergeStatus.UpToDate:
                GitHistoryWindow.GetWindow(true).ShowNotification(new GUIContent(string.Format("Everything is Up to date. Nothing to {0}.", mergeType)));
                break;

            case MergeStatus.FastForward:
                GitHistoryWindow.GetWindow(true).ShowNotification(new GUIContent(mergeType + " Complete with Fast Forwarding."));
                break;

            case MergeStatus.NonFastForward:
                GitDiffWindow.GetWindow(true).ShowNotification(new GUIContent("Do a merge commit in order to push changes."));
                GitDiffWindow.GetWindow(false).commitMessage = GitManager.Repository.Info.Message;
                Debug.Log(mergeType + " Complete without Fast Forwarding.");
                break;

            case MergeStatus.Conflicts:
                GUIContent content = EditorGUIUtility.IconContent("console.warnicon");
                content.text = "There are merge conflicts!";
                GitDiffWindow.GetWindow(true).ShowNotification(content);
                GitDiffWindow.GetWindow(false).commitMessage = GitManager.Repository.Info.Message;
                break;
            }
            GitManager.Update();
            Debug.LogFormat("{0} Status: {1}", mergeType, result.Status);
        }
Beispiel #24
0
        public static GitStatus PullRepo()
        {
            using (var repo = new Repository(FileUtils.RepoFolder))
            {
                PullOptions options = new PullOptions
                {
                    FetchOptions = new FetchOptions
                    {
                        CredentialsProvider = new CredentialsHandler((_, __, ___)
                                                                     => new DefaultCredentials())
                    }
                };

                // FIXME not particularly sensitive, but would be nice to extract creds
                // if you're viewing this comment in the commit history, hi!
                // sneaky, but no sensitive creds here.

                MergeResult result = Commands.Pull(repo,
                                                   new Signature(Resources.GitName, Resources.GitEmail, new DateTimeOffset(DateTime.Now)), options);

                if (result.Status == MergeStatus.FastForward)
                {
                    return(GitStatus.Updated);
                }

                return(GitStatus.NoUpdates);
            }
        }
Beispiel #25
0
        public override void SetUp()
        {
            base.SetUp();

            _filename  = GetFilePathFor("indexfile");
            _tablename = GetTempFilePath();
            _mergeFile = GetFilePathFor("outfile");

            _map = IndexMap.FromFile(_filename, x => false, maxTablesPerLevel: 4);
            var memtable = new HashListMemTable(maxSize: 10);

            memtable.Add(0, 2, 123);
            var table = PTable.FromMemtable(memtable, _tablename);

            _result = _map.AddFile(table, 0, 0, new FakeFilenameProvider(_mergeFile));
            _result = _result.MergedMap.AddFile(table, 0, 0, new FakeFilenameProvider(_mergeFile));
            _result = _result.MergedMap.AddFile(table, 0, 0, new FakeFilenameProvider(_mergeFile));
            var merged = _result.MergedMap.AddFile(table, 0, 0, new FakeFilenameProvider(_mergeFile));

            _result = merged.MergedMap.AddFile(table, 0, 0, new FakeFilenameProvider(_mergeFile));
            _result = _result.MergedMap.AddFile(table, 7, 11, new FakeFilenameProvider(_mergeFile));
            _result.MergedMap.SaveToFile(_filename);

            table.Dispose();

            merged.MergedMap.InOrder().ToList().ForEach(x => x.Dispose());
            merged.ToDelete.ForEach(x => x.Dispose());

            _result.MergedMap.InOrder().ToList().ForEach(x => x.Dispose());
            _result.ToDelete.ForEach(x => x.Dispose());
        }
        public void MergeCanDetectRenames()
        {
            // The environment is set up such that:
            // file b.txt is edited in the "rename" branch and
            // edited and renamed in the "rename_source" branch.
            // The edits are automergable.
            // We can rename "rename_source" into "rename"
            // if rename detection is enabled,
            // but the merge will fail with conflicts if this
            // change is not detected as a rename.

            string repoPath = SandboxMergeTestRepo();

            using (var repo = new Repository(repoPath))
            {
                Branch currentBranch = Commands.Checkout(repo, "rename_source");
                Assert.NotNull(currentBranch);

                Branch branchToMerge = repo.Branches["rename"];

                MergeResult result = repo.Merge(branchToMerge, Constants.Signature);

                Assert.Equal(MergeStatus.NonFastForward, result.Status);
            }
        }
Beispiel #27
0
        public void Equals_NonGeneric_ObjIsNull_ReturnsFalse(
            int rowsInserted,
            int rowsUpdated)
        {
            var uut = new MergeResult(rowsInserted, rowsUpdated);

            uut.Equals(null as object).ShouldBeFalse();
        }
Beispiel #28
0
        public MergeResult Merge()
        {
            var mergeResult = MergeResult.Empty();

            this.SourceComponent.As <ISupportBranching>().Do(c => mergeResult = c.Merge(_targetComponentsFolder));

            return(mergeResult);
        }
 public void Filter(MergeResult mergeResult)
 {
     var targetItems = mergeResult.AllItemsBelongsTo(ApplyTo);
     var targetLinks = targetItems.SelectMany(x => x.Links);
     var toDownloadLinks = targetLinks.Where(x => AllowerPairs.Contains(new Pair() { MediaType = x.MediaType, RelationshipType = x.RelationshipType }));
     foreach (var link in toDownloadLinks)
         link.Downloadable = true;
 }
Beispiel #30
0
        /// <param name="path"></param>
        /// <param name="lowLevelResult"></param>
        public virtual void AddConflict <_T0>(string path, MergeResult <_T0> lowLevelResult
                                              ) where _T0 : Sequence
        {
            if (!lowLevelResult.ContainsConflicts())
            {
                return;
            }
            if (conflicts == null)
            {
                conflicts = new Dictionary <string, int[][]>();
            }
            int nrOfConflicts = 0;

            // just counting
            foreach (MergeChunk mergeChunk in lowLevelResult)
            {
                if (mergeChunk.GetConflictState().Equals(MergeChunk.ConflictState.FIRST_CONFLICTING_RANGE
                                                         ))
                {
                    nrOfConflicts++;
                }
            }
            int currentConflict = -1;

            int[][] ret = new int[nrOfConflicts][];
            for (int n = 0; n < nrOfConflicts; n++)
            {
                ret[n] = new int[mergedCommits.Length + 1];
            }
            foreach (MergeChunk mergeChunk_1 in lowLevelResult)
            {
                // to store the end of this chunk (end of the last conflicting range)
                int endOfChunk = 0;
                if (mergeChunk_1.GetConflictState().Equals(MergeChunk.ConflictState.FIRST_CONFLICTING_RANGE
                                                           ))
                {
                    if (currentConflict > -1)
                    {
                        // there was a previous conflicting range for which the end
                        // is not set yet - set it!
                        ret[currentConflict][mergedCommits.Length] = endOfChunk;
                    }
                    currentConflict++;
                    endOfChunk = mergeChunk_1.GetEnd();
                    ret[currentConflict][mergeChunk_1.GetSequenceIndex()] = mergeChunk_1.GetBegin();
                }
                if (mergeChunk_1.GetConflictState().Equals(MergeChunk.ConflictState.NEXT_CONFLICTING_RANGE
                                                           ))
                {
                    if (mergeChunk_1.GetEnd() > endOfChunk)
                    {
                        endOfChunk = mergeChunk_1.GetEnd();
                    }
                    ret[currentConflict][mergeChunk_1.GetSequenceIndex()] = mergeChunk_1.GetBegin();
                }
            }
            conflicts.Put(path, ret);
        }
        public static void PullAll()
        {
            var settings = GetGitSettings();
            var dirs     = Directory.EnumerateDirectories(settings.RootDir, ".git", SearchOption.AllDirectories);

            foreach (var dir in dirs.Select(d => new DirectoryInfo(d).Parent.FullName))
            {
                using (var repo = new LibGit2Sharp.Repository(dir, new RepositoryOptions()))
                {
                    var color = Console.ForegroundColor;
                    Console.ForegroundColor = ConsoleColor.Green;
                    Console.WriteLine(Path.GetFileName(dir));
                    Console.ForegroundColor = color;


                    PullOptions pullOptions = new PullOptions()
                    {
                        MergeOptions = new MergeOptions()
                        {
                            FastForwardStrategy = FastForwardStrategy.Default
                        },
                        FetchOptions = new FetchOptions()
                        {
                            CredentialsProvider = new CredentialsHandler((url, usernameFromUrl, types) =>
                                                                         new UsernamePasswordCredentials()
                            {
                                Username = settings.Username,
                                Password = settings.Password
                            }),
                            OnTransferProgress = OnCloneProgress
                        }
                    };
                    Task.Run(() =>
                    {
                        try
                        {
                            MergeResult mergeResult = Commands.Pull(
                                repo,
                                new LibGit2Sharp.Signature(settings.Username, settings.Username, DateTimeOffset.Now),
                                pullOptions
                                );
                            if (mergeResult.Commit != null)
                            {
                                Console.WriteLine();
                                Console.WriteLine(mergeResult.Status);
                                Console.WriteLine(mergeResult.Commit.ToString());
                            }
                        }
                        catch (Exception ex)
                        {
                            Console.ForegroundColor = ConsoleColor.Red;
                            Console.WriteLine(ex.Message);
                            Console.ForegroundColor = ConsoleColor.White;
                        }
                    }).Wait();
                }
            }
        }
Beispiel #32
0
        public void Constructor_Otherwise_RowsInsertedAndRowsUpdatedAreGiven(
            int rowsInserted,
            int rowsUpdated)
        {
            var result = new MergeResult(rowsInserted, rowsUpdated);

            result.RowsInserted.ShouldBe(rowsInserted);
            result.RowsUpdated.ShouldBe(rowsUpdated);
        }
Beispiel #33
0
        /// <summary>
        /// Formats the results of a merge of <see cref="RawText"/> objects in a Git
        /// conformant way. This method also assumes that the <see cref="RawText"/> objects
        /// being merged are line oriented files which use LF as delimiter. This
        /// method will also use LF to separate chunks and conflict metadata,
        /// therefore it fits only to texts that are LF-separated lines.
        /// </summary>
        /// <param name="out">the outputstream where to write the textual presentation</param>
        /// <param name="res">the merge result which should be presented</param>
        /// <param name="seqName">
        /// When a conflict is reported each conflicting range will get a
        /// name. This name is following the "&lt;&lt;&lt;&lt;&lt;&lt;&lt; " or "&gt;&gt;&gt;&gt;&gt;&gt;&gt; "
        /// conflict markers. The names for the sequences are given in
        /// this list
        /// </param>
        /// <param name="charsetName">
        /// the name of the characterSet used when writing conflict
        /// metadata
        /// </param>
        public void formatMerge(BinaryWriter @out, MergeResult res,
                List<String> seqName, string charsetName)
        {
            String lastConflictingName = null; // is set to non-null whenever we are
            // in a conflict
            bool threeWayMerge = (res.getSequences().Count == 3);
            foreach (MergeChunk chunk in res)
            {
                RawText seq = (RawText)res.getSequences()[
                        chunk.getSequenceIndex()];
                if (lastConflictingName != null
                        && chunk.getConflictState() != MergeChunk.ConflictState.NEXT_CONFLICTING_RANGE)
                {
                    // found the end of an conflict
                    @out.Write((">>>>>>> " + lastConflictingName + "\n").getBytes(charsetName));
                    lastConflictingName = null;
                }
                if (chunk.getConflictState() == MergeChunk.ConflictState.FIRST_CONFLICTING_RANGE)
                {
                    // found the start of an conflict
                    @out.Write(("<<<<<<< " + seqName[chunk.getSequenceIndex()] +
                            "\n").getBytes(charsetName));
                    lastConflictingName = seqName[chunk.getSequenceIndex()];
                }
                else if (chunk.getConflictState() == MergeChunk.ConflictState.NEXT_CONFLICTING_RANGE)
                {
                    // found another conflicting chunk

                    /*
                     * In case of a non-three-way merge I'll add the name of the
                     * conflicting chunk behind the equal signs. I also append the
                     * name of the last conflicting chunk after the ending
                     * greater-than signs. If somebody knows a better notation to
                     * present non-three-way merges - feel free to correct here.
                     */
                    lastConflictingName = seqName[chunk.getSequenceIndex()];
                    @out.Write((threeWayMerge ? "=======\n" : "======= "
                            + lastConflictingName + "\n").getBytes(charsetName));
                }
                // the lines with conflict-metadata are written. Now write the chunk
                for (int i = chunk.getBegin(); i < chunk.getEnd(); i++)
                {
                    if (i > 0)
                        @out.Write('\n');
                    seq.writeLine(@out.BaseStream, i);

                }
            }
            // one possible leftover: if the merge result ended with a conflict we
            // have to close the last conflict here
            if (lastConflictingName != null)
            {
                @out.Write('\n');
                @out.Write((">>>>>>> " + lastConflictingName + "\n").getBytes(charsetName));
            }
        }
Beispiel #34
0
		/// <summary>
		/// Formats the results of a merge of
		/// <see cref="NGit.Diff.RawText">NGit.Diff.RawText</see>
		/// objects in a Git
		/// conformant way. This method also assumes that the
		/// <see cref="NGit.Diff.RawText">NGit.Diff.RawText</see>
		/// objects
		/// being merged are line oriented files which use LF as delimiter. This
		/// method will also use LF to separate chunks and conflict metadata,
		/// therefore it fits only to texts that are LF-separated lines.
		/// </summary>
		/// <param name="out">the outputstream where to write the textual presentation</param>
		/// <param name="res">the merge result which should be presented</param>
		/// <param name="seqName">
		/// When a conflict is reported each conflicting range will get a
		/// name. This name is following the "<&lt;&lt;&lt;&lt;&lt;&lt; " or ">&gt;&gt;&gt;&gt;&gt;&gt; "
		/// conflict markers. The names for the sequences are given in
		/// this list
		/// </param>
		/// <param name="charsetName">
		/// the name of the characterSet used when writing conflict
		/// metadata
		/// </param>
		/// <exception cref="System.IO.IOException">System.IO.IOException</exception>
		public virtual void FormatMerge(OutputStream @out, MergeResult<RawText> res, IList
			<string> seqName, string charsetName)
		{
			string lastConflictingName = null;
			// is set to non-null whenever we are
			// in a conflict
			bool threeWayMerge = (res.GetSequences().Count == 3);
			foreach (MergeChunk chunk in res)
			{
				RawText seq = res.GetSequences()[chunk.GetSequenceIndex()];
				if (lastConflictingName != null && chunk.GetConflictState() != MergeChunk.ConflictState
					.NEXT_CONFLICTING_RANGE)
				{
					// found the end of an conflict
					@out.Write(Sharpen.Runtime.GetBytesForString((">>>>>>> " + lastConflictingName + 
						"\n"), charsetName));
					lastConflictingName = null;
				}
				if (chunk.GetConflictState() == MergeChunk.ConflictState.FIRST_CONFLICTING_RANGE)
				{
					// found the start of an conflict
					@out.Write(Sharpen.Runtime.GetBytesForString(("<<<<<<< " + seqName[chunk.GetSequenceIndex
						()] + "\n"), charsetName));
					lastConflictingName = seqName[chunk.GetSequenceIndex()];
				}
				else
				{
					if (chunk.GetConflictState() == MergeChunk.ConflictState.NEXT_CONFLICTING_RANGE)
					{
						// found another conflicting chunk
						lastConflictingName = seqName[chunk.GetSequenceIndex()];
						@out.Write(Sharpen.Runtime.GetBytesForString((threeWayMerge ? "=======\n" : "======= "
							 + lastConflictingName + "\n"), charsetName));
					}
				}
				// the lines with conflict-metadata are written. Now write the chunk
				for (int i = chunk.GetBegin(); i < chunk.GetEnd(); i++)
				{
					seq.WriteLine(@out, i);
					@out.Write('\n');
				}
			}
			// one possible leftover: if the merge result ended with a conflict we
			// have to close the last conflict here
			if (lastConflictingName != null)
			{
				@out.Write(Sharpen.Runtime.GetBytesForString((">>>>>>> " + lastConflictingName + 
					"\n"), charsetName));
			}
		}
 private void ChooseMergeItem(string id, KeyedCollection<string, SubscriptionItem> previousAuxList, KeyedCollection<string, FeedItem> currentAuxList, MergeResult result)
 {
     var previous = previousAuxList.Take(id);
     var current = currentAuxList.Take(id);
     if (previous.LastUpdatedTime == current.LastUpdatedTime)
     {
         result.AddItem(previous, ItemMergeStatus.NoChangedItem);
     }
     else
     {
         previous.Update(current);
         result.AddItem(previous, ItemMergeStatus.UpdatedItem);
     }
 }
 public void Filter(MergeResult mergeResult)
 {
     if (mergeResult.ThereWereNoItems)
     {
         var newestToOlder = mergeResult.Reverse().ToArray();
         var ignore = false;
         var count = 0;
         foreach (var item in newestToOlder)
         {
             ignore = ignore || count == MaxInitialDownloads || (DateTimeOffset.Now - item.PublishDate > MaxOldInitialDownloads);
             if (ignore)
                 foreach (var link in item.Links.Where(x => x.Downloadable))
                     link.IgnoreIt = true;
             else if (item.Links.Any(x => x.Downloadable && !x.IgnoreIt && !x.Deleted))
                 count++;
         }
     }
 }
        public MergeResult Merge(IEnumerable<SubscriptionItem> previousItems, IEnumerable<FeedItem> currentItems)
        {
            var result = new MergeResult();
            var previousAuxList = new KeyedCollection<string, SubscriptionItem>(x => x.Id, previousItems);
            var currentAuxList = new KeyedCollection<string, FeedItem>(x => x.Id, currentItems);

            var previousIds = previousAuxList.Select(x => x.Id).ToArray();
            foreach (var id in previousIds)
            {
                if (!currentAuxList.Contains(id))
                    ChooseRemovedItem(id, previousAuxList, result);
                else
                    ChooseMergeItem(id, previousAuxList, currentAuxList, result);
            }

            var newIds = currentAuxList.Select(x => x.Id).ToArray();
            foreach (var id in newIds)
                ChooseNewItem(id, currentAuxList, result);

            return result;
        }
Beispiel #38
0
 /// <summary>Writes merged file content to the working tree.</summary>
 /// <remarks>
 /// Writes merged file content to the working tree. In case
 /// <see cref="inCore">inCore</see>
 /// is set and we don't have a working tree the content is written to a
 /// temporary file
 /// </remarks>
 /// <param name="result">the result of the content merge</param>
 /// <returns>the file to which the merged content was written</returns>
 /// <exception cref="System.IO.FileNotFoundException">System.IO.FileNotFoundException
 /// 	</exception>
 /// <exception cref="System.IO.IOException">System.IO.IOException</exception>
 private FilePath WriteMergedFile(MergeResult<RawText> result)
 {
     MergeFormatter fmt = new MergeFormatter();
     FilePath of = null;
     FileOutputStream fos;
     if (!inCore)
     {
         FilePath workTree = db.WorkTree;
         if (workTree == null)
         {
             // TODO: This should be handled by WorkingTreeIterators which
             // support write operations
             throw new NGit.Errors.NotSupportedException();
         }
         of = new FilePath(workTree, tw.PathString);
         fos = new FileOutputStream(of);
         try
         {
             fmt.FormatMerge(fos, result, Arrays.AsList(commitNames), Constants.CHARACTER_ENCODING
                 );
         }
         finally
         {
             fos.Close();
         }
     }
     else
     {
         if (!result.ContainsConflicts())
         {
             // When working inCore, only trivial merges can be handled,
             // so we generate objects only in conflict free cases
             of = FilePath.CreateTempFile("merge_", "_temp", null);
             fos = new FileOutputStream(of);
             try
             {
                 fmt.FormatMerge(fos, result, Arrays.AsList(commitNames), Constants.CHARACTER_ENCODING
                     );
             }
             finally
             {
                 fos.Close();
             }
         }
     }
     return of;
 }
Beispiel #39
0
 /// <summary>
 /// Formats the results of a merge of exactly two <see cref="RawText"/> objects in
 /// a Git conformant way. This convenience method accepts the names for the
 /// three sequences (base and the two merged sequences) as explicit
 /// parameters and doesn't require the caller to specify a List
 /// </summary>
 /// <param name="out">
 /// the <see cref="BinaryWriter"/> where to write the textual
 /// presentation
 /// </param>
 /// <param name="res">the merge result which should be presented</param>
 /// <param name="baseName">the name ranges from the base should get</param>
 /// <param name="oursName">the name ranges from ours should get</param>
 /// <param name="theirsName">the name ranges from theirs should get</param>
 /// <param name="charsetName">
 /// the name of the characterSet used when writing conflict
 /// metadata
 /// </param>
 public void formatMerge(BinaryWriter @out, MergeResult res, String baseName,
     String oursName, String theirsName, string charsetName)
 {
     var names = new List<String>(3);
     names.Add(baseName);
     names.Add(oursName);
     names.Add(theirsName);
     formatMerge(@out, res, names, charsetName);
 }
Beispiel #40
0
        /// <summary>Updates the index after a content merge has happened.</summary>
        /// <remarks>
        /// Updates the index after a content merge has happened. If no conflict has
        /// occurred this includes persisting the merged content to the object
        /// database. In case of conflicts this method takes care to write the
        /// correct stages to the index.
        /// </remarks>
        /// <param name="base"></param>
        /// <param name="ours"></param>
        /// <param name="theirs"></param>
        /// <param name="result"></param>
        /// <param name="of"></param>
        /// <exception cref="System.IO.FileNotFoundException">System.IO.FileNotFoundException
        /// 	</exception>
        /// <exception cref="System.IO.IOException">System.IO.IOException</exception>
        private void UpdateIndex(CanonicalTreeParser @base, CanonicalTreeParser ours, CanonicalTreeParser
			 theirs, MergeResult<RawText> result, FilePath of)
        {
            if (result.ContainsConflicts())
            {
                // a conflict occurred, the file will contain conflict markers
                // the index will be populated with the three stages and only the
                // workdir (if used) contains the halfways merged content
                Add(tw.RawPath, @base, DirCacheEntry.STAGE_1);
                Add(tw.RawPath, ours, DirCacheEntry.STAGE_2);
                Add(tw.RawPath, theirs, DirCacheEntry.STAGE_3);
                mergeResults.Put(tw.PathString, result.Upcast ());
            }
            else
            {
                // no conflict occurred, the file will contain fully merged content.
                // the index will be populated with the new merged version
                DirCacheEntry dce = new DirCacheEntry(tw.PathString);
                int newMode = MergeFileModes(tw.GetRawMode(0), tw.GetRawMode(1), tw.GetRawMode(2)
                    );
                // set the mode for the new content. Fall back to REGULAR_FILE if
                // you can't merge modes of OURS and THEIRS
                dce.FileMode = (newMode == FileMode.MISSING.GetBits()) ? FileMode.REGULAR_FILE :
                    FileMode.FromBits(newMode);
                dce.LastModified = of.LastModified();
                dce.SetLength((int)of.Length());
                InputStream @is = new FileInputStream(of);
                try
                {
                    dce.SetObjectId(oi.Insert(Constants.OBJ_BLOB, of.Length(), @is));
                }
                finally
                {
                    @is.Close();
                    if (inCore)
                    {
                        FileUtils.Delete(of);
                    }
                }
                builder.Add(dce);
            }
        }
 private void ChooseRemovedItem(string id, KeyedCollection<string, SubscriptionItem> previousAuxList, MergeResult result)
 {
     result.AddItem(previousAuxList.Take(id), ItemMergeStatus.RemovedItem);
 }
 private void ChooseNewItem(string id, KeyedCollection<string, FeedItem> currentAuxList, MergeResult result)
 {
     result.AddItem(new SubscriptionItem(currentAuxList.Take(id)), ItemMergeStatus.NewItem);
 }
 public void Filter(MergeResult mergeResult)
 {
     BaseFilter.Filter(mergeResult);
 }
        /// <summary>
        /// Calculates a solution.
        /// </summary>
        /// <param name="problem"></param>
        /// <returns></returns>
        internal override MaxTimeSolution Solve(MaxTimeProblem problem)
        {
            // create the calculator.
            MaxTimeCalculator calculator = new MaxTimeCalculator(problem);

            // create the solution.
            MaxTimeSolution solution = new MaxTimeSolution(problem.Size, true);

            double max = problem.Max.Value;

            // keep placing customer until none are left.
            List<int> customers = new List<int>(problem.Customers);

            // create n routes.
            for (int customer = 0; customer < customers.Count; customer++)
            {
                solution.Add(customer);
                solution[solution.Count - 1] = calculator.CalculateOneRouteIncrease(
                    0, 0);
            }

            // creates a result.
            MergeResult result = new MergeResult();
            result.Weight = double.MaxValue;

            // loop over all route pairs and merge the smallest merge.
            while (result != null)
            { // keep looping until there is no result anymore.
                result = new MergeResult();
                result.Weight = double.MaxValue;

                for (int route1_idx = 1; route1_idx < solution.Count; route1_idx++)
                { // keep looping over all routes.
                    for (int route2_idx = 0; route2_idx < solution.Count; route2_idx++)
                    { // keep looping over all routes.
                        if (route1_idx == route2_idx)
                        { // only consider different routes.
                            break;
                        }

                        // calculate the merge result.
                        MergeResult current_result = this.TryMerge(problem, solution,
                            route1_idx, route2_idx, problem.Max.Value);

                        // evaluate the current result.
                        if (current_result != null && current_result.Weight < result.Weight)
                        { // current result is best.
                            result = current_result;
                        }
                    }
                }

                // evaluate the result.
                if (result.Weight < double.MaxValue)
                { // there is a result; apply it!
                    IRoute source = solution.Route(result.RouteSourceId);
                    IRoute target = solution.Route(result.RouteTargetId);

                    //string source_string = source.ToString();
                    //string target_string = target.ToString();

                    if (target.Count > 1 && target.First == target.GetNeigbours(result.CustomerTargetSource)[0])
                    {
                        //throw new Exception();
                    }

                    // create an enumeration of all customers of source in the correct order.
                    IEnumerable<int> source_between = new List<int>(
                        source.Between(result.CustomerSourceSource, result.CustomerSourceTarget));

                    // insert after the complete source.
                    int previous = result.CustomerTargetSource;
                    int next = target.GetNeigbours(result.CustomerTargetSource)[0];
                    foreach (int source_customer in source_between)
                    {
                        // insert.
                        target.ReplaceEdgeFrom(previous, source_customer);

                        previous = source_customer; // update previous.
                    }
                    target.ReplaceEdgeFrom(previous, next);

                    // remove the source route.
                    solution.Remove(result.RouteSourceId);
                    solution.RemoveWeight(result.RouteTargetId);

                    // calculate the weight of the new route.
                    solution[result.RouteTargetId] = solution[result.RouteTargetId] + result.Weight +
                        solution[result.RouteSourceId];

                    if (!solution.IsValid())
                    {
                        throw new Exception();
                    }
                }
                else
                { // set the result null.
                    result = null;
                }
            }

            return solution;
        }
        /// <summary>
        /// Try and merge route2 into route1.
        /// </summary>
        /// <param name="problem"></param>
        /// <param name="solution"></param>
        /// <param name="route1_idx"></param>
        /// <param name="route2_idx"></param>
        /// <param name="max"></param>
        /// <returns></returns>
        private MergeResult TryMerge(MaxTimeProblem problem, MaxTimeSolution solution, 
            int route1_idx, int route2_idx, double max)
        {
            // get the route weights.
            double route1_weight = solution[route1_idx];
            double route2_weight = solution[route2_idx];

            // creates a result.
            MergeResult result = new MergeResult();
            result.Weight = double.MaxValue;

            // get the two routes.
            IRoute route1 = solution.Route(route1_idx);
            IRoute route2 = solution.Route(route2_idx);

            // just first do the case where both routes are of zero length.
            if (route1.Count == 1 && route2.Count == 1)
            { // calculate the increase when joining the two points.
                foreach (int customer1 in route1)
                {
                    foreach (int customer2 in route2)
                    {
                        double difference = problem.WeightMatrix[customer1][customer2] +
                                problem.WeightMatrix[customer2][customer1];
                        double new_route_weight = route1_weight + difference + route2_weight;
                        if (new_route_weight < max)
                        {
                            result.Weight = difference;
                            result.RouteSourceId = route2_idx;
                            result.RouteTargetId = route1_idx;
                            result.CustomerSourceSource = customer2;
                            result.CustomerSourceTarget = customer2;
                            result.CustomerTargetSource = customer1;

                            return result;
                        }
                    }
                }
            }

            foreach (Edge route1_edge in route1.Edges())
            { // loop over all route1 edges.
                // calculate weight of the current edge.
                double route1_edge_weight = problem.WeightMatrix[route1_edge.From][route1_edge.To];
                double route1_edge_without = route1_weight - route1_edge_weight;

                if (route2.Count == 1)
                { // there is only one customer.
                    foreach (int customer2 in route2)
                    {
                        //// calculate weight of the current edge.
                        //double route2_edge_weight = problem.WeightMatrix[route2_edge.From][route2_edge.To];
                        //double route2_edge_without = route2_weight - route2_edge_weight;

                        double new_edges_weight = problem.WeightMatrix[route1_edge.From][customer2] +
                            problem.WeightMatrix[customer2][route1_edge.To];

                        double difference = problem.WeightDifferenceAfterMerge(solution,
                            new_edges_weight - (route1_edge_weight));

                        // check if the max bound is not violated.
                        double new_route_weight = route1_edge_without + difference + route2_weight; // the customer remain the same.
                        if (new_route_weight < max)
                        {
                            // the difference is smaller than the current result.
                            if (difference < result.Weight)
                            {
                                result.Weight = difference;
                                result.RouteSourceId = route2_idx;
                                result.RouteTargetId = route1_idx;
                                result.CustomerSourceSource = customer2;
                                result.CustomerSourceTarget = customer2;
                                result.CustomerTargetSource = route1_edge.From;
                            }
                        }
                    }
                }
                else
                { // there is at least one edge.
                    foreach (Edge route2_edge in route2.Edges())
                    { // loop over all route2 edges.
                        // calculate weight of the current edge.
                        double route2_edge_weight = problem.WeightMatrix[route2_edge.From][route2_edge.To];
                        double route2_edge_without = route2_weight - route2_edge_weight;

                        double new_edges_weight = problem.WeightMatrix[route1_edge.From][route2_edge.To] +
                            problem.WeightMatrix[route2_edge.From][route1_edge.To];

                        double difference = problem.WeightDifferenceAfterMerge(solution,
                            new_edges_weight - (route1_edge_weight + route2_edge_weight));

                        // check if the max bound is not violated.
                        double new_route_weight = route1_edge_weight + route2_edge_without + new_edges_weight; // the customer remain the same.
                        if (new_route_weight < max)
                        {
                            // the difference is smaller than the current result.
                            if (difference < result.Weight)
                            {
                                result.Weight = difference;
                                result.RouteSourceId = route2_idx;
                                result.RouteTargetId = route1_idx;
                                result.CustomerSourceSource = route2_edge.To;
                                result.CustomerSourceTarget = route2_edge.From;
                                result.CustomerTargetSource = route1_edge.From;
                            }
                        }
                    }
                }
            }
            return result;
        }
Beispiel #46
0
        /// <summary>
        /// Does the three way merge between a common base and two sequences.
        /// </summary>
        /// <param name="base">base the common base sequence</param>
        /// <param name="ours">ours the first sequence to be merged</param>
        /// <param name="theirs">theirs the second sequence to be merged</param>
        /// <returns>the resulting content</returns>
        public static MergeResult merge(Sequence @base, Sequence ours,
            Sequence theirs)
        {
            List<Sequence> sequences = new List<Sequence>(3);
            sequences.Add(@base);
            sequences.Add(ours);
            sequences.Add(theirs);
            MergeResult result = new MergeResult(sequences);
            EditList oursEdits = new MyersDiff(@base, ours).getEdits();
            IteratorBase<Edit> baseToOurs = oursEdits.iterator();
            EditList theirsEdits = new MyersDiff(@base, theirs).getEdits();
            IteratorBase<Edit> baseToTheirs = theirsEdits.iterator();
            int current = 0; // points to the next line (first line is 0) of base
            // which was not handled yet
            Edit oursEdit = nextEdit(baseToOurs);
            Edit theirsEdit = nextEdit(baseToTheirs);

            // iterate over all edits from base to ours and from base to theirs
            // leave the loop when there are no edits more for ours or for theirs
            // (or both)
            while (theirsEdit != END_EDIT || oursEdit != END_EDIT)
            {
                if (oursEdit.EndA <= theirsEdit.BeginA)
                {
                    // something was changed in ours not overlapping with any change
                    // from theirs. First add the common part in front of the edit
                    // then the edit.
                    if (current != oursEdit.BeginA)
                    {
                        result.add(0, current, oursEdit.BeginA,
                            MergeChunk.ConflictState.NO_CONFLICT);
                    }
                    result.add(1, oursEdit.BeginB, oursEdit.EndB,
                        MergeChunk.ConflictState.NO_CONFLICT);
                    current = oursEdit.EndA;
                    oursEdit = nextEdit(baseToOurs);
                }
                else if (theirsEdit.EndA <= oursEdit.BeginA)
                {
                    // something was changed in theirs not overlapping with any
                    // from ours. First add the common part in front of the edit
                    // then the edit.
                    if (current != theirsEdit.BeginA)
                    {
                        result.add(0, current, theirsEdit.BeginA,
                            MergeChunk.ConflictState.NO_CONFLICT);
                    }
                    result.add(2, theirsEdit.BeginB, theirsEdit.EndB,
                        MergeChunk.ConflictState.NO_CONFLICT);
                    current = theirsEdit.EndA;
                    theirsEdit = nextEdit(baseToTheirs);
                }
                else
                {
                    // here we found a real overlapping modification

                    // if there is a common part in front of the conflict add it
                    if (oursEdit.BeginA != current
                        && theirsEdit.BeginA != current)
                    {
                        result.add(0, current, Math.Min(oursEdit.BeginA,
                            theirsEdit.BeginA), MergeChunk.ConflictState.NO_CONFLICT);
                    }

                    // set some initial values for the ranges in A and B which we
                    // want to handle
                    int oursBeginB = oursEdit.BeginB;
                    int theirsBeginB = theirsEdit.BeginB;
                    // harmonize the start of the ranges in A and B
                    if (oursEdit.BeginA < theirsEdit.BeginA)
                    {
                        theirsBeginB -= theirsEdit.BeginA
                            - oursEdit.BeginA;
                    }
                    else
                    {
                        oursBeginB -= oursEdit.BeginA - theirsEdit.BeginA;
                    }

                    // combine edits:
                    // Maybe an Edit on one side corresponds to multiple Edits on
                    // the other side. Then we have to combine the Edits of the
                    // other side - so in the end we can merge together two single
                    // edits.
                    //
                    // It is important to notice that this combining will extend the
                    // ranges of our conflict always downwards (towards the end of
                    // the content). The starts of the conflicting ranges in ours
                    // and theirs are not touched here.
                    //
                    // This combining is an iterative process: after we have
                    // combined some edits we have to do the check again. The
                    // combined edits could now correspond to multiple edits on the
                    // other side.
                    //
                    // Example: when this combining algorithm works on the following
                    // edits
                    // oursEdits=((0-5,0-5),(6-8,6-8),(10-11,10-11)) and
                    // theirsEdits=((0-1,0-1),(2-3,2-3),(5-7,5-7))
                    // it will merge them into
                    // oursEdits=((0-8,0-8),(10-11,10-11)) and
                    // theirsEdits=((0-7,0-7))
                    //
                    // Since the only interesting thing to us is how in ours and
                    // theirs the end of the conflicting range is changing we let
                    // oursEdit and theirsEdit point to the last conflicting edit
                    Edit nextOursEdit = nextEdit(baseToOurs);
                    Edit nextTheirsEdit = nextEdit(baseToTheirs);
                    for (; ; )
                    {
                        if (oursEdit.EndA > nextTheirsEdit.BeginA)
                        {
                            theirsEdit = nextTheirsEdit;
                            nextTheirsEdit = nextEdit(baseToTheirs);
                        }
                        else if (theirsEdit.EndA > nextOursEdit.BeginA)
                        {
                            oursEdit = nextOursEdit;
                            nextOursEdit = nextEdit(baseToOurs);
                        }
                        else
                        {
                            break;
                        }
                    }

                    // harmonize the end of the ranges in A and B
                    int oursEndB = oursEdit.EndB;
                    int theirsEndB = theirsEdit.EndB;
                    if (oursEdit.EndA < theirsEdit.EndA)
                    {
                        oursEndB += theirsEdit.EndA - oursEdit.EndA;
                    }
                    else
                    {
                        theirsEndB += oursEdit.EndA - theirsEdit.EndA;
                    }

                    // Add the conflict
                    result.add(1, oursBeginB, oursEndB,
                        MergeChunk.ConflictState.FIRST_CONFLICTING_RANGE);
                    result.add(2, theirsBeginB, theirsEndB,
                        MergeChunk.ConflictState.NEXT_CONFLICTING_RANGE);

                    current = Math.Max(oursEdit.EndA, theirsEdit.EndA);
                    oursEdit = nextOursEdit;
                    theirsEdit = nextTheirsEdit;
                }
            }
            // maybe we have a common part behind the last edit: copy it to the
            // result
            if (current < @base.size())
            {
                result.add(0, current, @base.size(), MergeChunk.ConflictState.NO_CONFLICT);
            }
            return result;
        }
Beispiel #47
0
		/// <summary>
		/// Formats the results of a merge of exactly two
		/// <see cref="NGit.Diff.RawText">NGit.Diff.RawText</see>
		/// objects in
		/// a Git conformant way. This convenience method accepts the names for the
		/// three sequences (base and the two merged sequences) as explicit
		/// parameters and doesn't require the caller to specify a List
		/// </summary>
		/// <param name="out">
		/// the
		/// <see cref="Sharpen.OutputStream">Sharpen.OutputStream</see>
		/// where to write the textual
		/// presentation
		/// </param>
		/// <param name="res">the merge result which should be presented</param>
		/// <param name="baseName">the name ranges from the base should get</param>
		/// <param name="oursName">the name ranges from ours should get</param>
		/// <param name="theirsName">the name ranges from theirs should get</param>
		/// <param name="charsetName">
		/// the name of the characterSet used when writing conflict
		/// metadata
		/// </param>
		/// <exception cref="System.IO.IOException">System.IO.IOException</exception>
		public virtual void FormatMerge(OutputStream @out, MergeResult<RawText> res, string baseName
			, string oursName, string theirsName, string charsetName)
		{
			IList<string> names = new AList<string>(3);
			names.AddItem(baseName);
			names.AddItem(oursName);
			names.AddItem(theirsName);
			FormatMerge(@out, res, names, charsetName);
		}