public void The_Removed_Columns_Are_In_The_ResultSet() { Database db1 = TestDatabaseLoader.TestDatabase(); db1.Tables[0].AddColumn(new Column("aaaaaa")); Database db2 = TestDatabaseLoader.TestDatabase(); db2.Tables[0].RemoveColumn(db1.Tables[0].Columns[2]); DatabaseMergeResult result = new DatabaseProcessor().MergeDatabases(db1, db2); Assert.That(result.TableOperations, Is.Empty); Assert.That(result.IndexOperations, Is.Empty); Assert.That(result.KeyOperations, Is.Empty); Assert.That(result.ColumnOperations, Is.Not.Empty); Assert.That(result.ColumnOperations, Has.Count(2)); IMergeOperation <IColumn> op = result.ColumnOperations.ElementAt(0); Assert.That(op.Object, Is.SameAs(db1.Tables[0].Columns[2])); Assert.That(op, Is.TypeOf(typeof(ColumnRemovalOperation))); op = result.ColumnOperations.ElementAt(1); Assert.That(op.Object, Is.SameAs(db1.Tables[0].Columns[3])); Assert.That(op, Is.TypeOf(typeof(ColumnRemovalOperation))); }
public void The_New_Column_And_The_Changed_Key_Are_In_The_ResultSet() { Database db1 = TestDatabaseLoader.TestDatabase(); Database db2 = TestDatabaseLoader.TestDatabase(); db2.Tables[0].AddColumn(new Column("Column4") { InPrimaryKey = true }); db2.Tables[0].Keys[0].AddColumn("Column4"); db2.Tables[0].Indexes[0].AddColumn("Column4"); DatabaseMergeResult result = new DatabaseProcessor().MergeDatabases(db1, db2); Assert.That(result.TableOperations, Is.Empty); Assert.That(result.IndexOperations, Has.Count(1)); var indexOp = result.IndexOperations.ElementAt(0); Assert.That(indexOp, Is.TypeOf(typeof(IndexChangeOperation))); Assert.That(indexOp.Object, Is.SameAs(db1.Tables[0].Indexes[0]), "Changed index should be equal to PK_Table1 from db1"); Assert.That(result.KeyOperations, Has.Count(1)); var keyOp = result.KeyOperations.ElementAt(0); Assert.That(keyOp, Is.TypeOf(typeof(KeyChangeOperation))); Assert.That(keyOp.Object, Is.SameAs(db1.Tables[0].Keys[0]), "Changed Key should be equal to PK_Table1 from db1"); Assert.That(result.ColumnOperations, Has.Count(1)); IMergeOperation <IColumn> columnOp = result.ColumnOperations.ElementAt(0); Assert.That(columnOp, Is.TypeOf(typeof(ColumnAdditionOperation))); Assert.That(columnOp.Object, Is.SameAs(db2.Tables[0].Columns[3]), "Added column should be equal to Table1.Column4 from db2"); }
public void The_Removed_Column_And_The_Changed_Key_Are_In_The_ResultSet() { Database db1 = TestDatabaseLoader.TestDatabase(); Database db2 = TestDatabaseLoader.TestDatabase(); db2.Tables[0].RemoveKey(db2.Tables[0].Keys[0]); db2.Tables[0].RemoveIndex(db2.Tables[0].Indexes[0]); db2.Tables[0].RemoveColumn(db2.Tables[0].Columns[0]); DatabaseMergeResult result = new DatabaseProcessor().MergeDatabases(db1, db2); Assert.That(result.TableOperations, Is.Empty); Assert.That(result.IndexOperations, Has.Count(1), "There should be one Index operation"); var indexOp = result.IndexOperations.ElementAt(0); Assert.That(indexOp, Is.TypeOf(typeof(IndexRemovalOperation))); Assert.That(indexOp.Object, Is.SameAs(db1.Tables[0].Indexes[0]), "Removed index should be equal to PK_Table1 from db1"); Assert.That(result.KeyOperations, Has.Count(1), "There should be one Key operation"); var keyOp = result.KeyOperations.ElementAt(0); Assert.That(keyOp, Is.TypeOf(typeof(KeyRemovalOperation))); Assert.That(keyOp.Object, Is.SameAs(db1.Tables[0].Keys[0]), "Removed Key should be equal to PK_Table1 from db1"); Assert.That(result.ColumnOperations, Has.Count(1), "There should be one Column operation"); IMergeOperation <IColumn> columnOp = result.ColumnOperations.ElementAt(0); Assert.That(columnOp, Is.TypeOf(typeof(ColumnRemovalOperation))); Assert.That(columnOp.Object, Is.SameAs(db1.Tables[0].Columns[0]), "Removed column should be equal to Table1.Column1 from db1"); }
public void The_Removed_Keys_Are_In_The_ResultSet() { Database db1 = TestDatabaseLoader.TestDatabase(); // This key doesn't exist in the second db, so is effectively "removed" db1.Tables[0].AddKey(new Key("aaaaaa")); Database db2 = TestDatabaseLoader.TestDatabase(); db2.Tables[0].RemoveKey(db2.Tables[0].Keys[0]); DatabaseMergeResult result = new DatabaseProcessor().MergeDatabases(db1, db2); Assert.That(result.TableOperations, Is.Empty); Assert.That(result.IndexOperations, Is.Empty); Assert.That(result.ColumnOperations, Is.Empty); Assert.That(result.KeyOperations, Has.Count(2)); IMergeOperation <IKey> op = result.KeyOperations.ElementAt(0); Assert.That(op.Object, Is.SameAs(db1.Tables[0].Keys[0])); Assert.That(op, Is.TypeOf(typeof(KeyRemovalOperation))); op = result.KeyOperations.ElementAt(1); Assert.That(op.Object, Is.SameAs(db1.Tables[0].Keys[1])); Assert.That(op, Is.TypeOf(typeof(KeyRemovalOperation))); }
public void Init() { commitOperation = new Mock <ICommitOperation>(); dataProvider = new Mock <IDataProvider>(); diff = new Mock <IDiffOperation>(); treeOperation = new Mock <ITreeOperation>(); mergeOperation = new MergeOperation(dataProvider.Object, commitOperation.Object, treeOperation.Object, diff.Object); }
public TeamMergeViewModel2017(ITeamService teamService, IMergeOperation mergeOperation, IConfigManager configManager, ILogger logger, ISolutionService solutionService) { _logger = logger; TeamMergeCommandsViewModel = new TeamMergeCommonCommandsViewModel(teamService, mergeOperation, configManager, logger, solutionService, SetBusyWhileExecutingAsync); ViewChangesetDetailsCommand = new RelayCommand(ViewChangeset, CanViewChangeset); Title = Resources.TeamMerge; }
public void Initialize() { _teamService = MockRepository.GenerateStrictMock <ITeamService>(); _mergeOperation = MockRepository.GenerateStrictMock <IMergeOperation>(); _configHelper = MockRepository.GenerateStrictMock <IConfigHelper>(); _serviceProvider = MockRepository.GenerateStrictMock <IServiceProvider>(); _logger = MockRepository.GenerateStrictMock <ILogger>(); _solutionService = MockRepository.GenerateStrictMock <ISolutionService>(); _sut = new TeamMergeViewModel(_teamService, _mergeOperation, _configHelper, _logger, _solutionService); }
public void Initialize() { _teamService = MockRepository.GenerateStrictMock <ITeamService>(); _mergeOperation = MockRepository.GenerateStrictMock <IMergeOperation>(); _configManager = MockRepository.GenerateStrictMock <IConfigManager>(); _serviceProvider = MockRepository.GenerateStrictMock <IServiceProvider>(); _logger = MockRepository.GenerateStrictMock <ILogger>(); _solutionService = MockRepository.GenerateStrictMock <ISolutionService>(); _sut = new TeamMergeCommonCommandsViewModel(_teamService, _mergeOperation, _configManager, _logger, _solutionService, SetThingsBusyAndStuffAsync); }
static Program() { FileSystem = new FileSystem(); FileOperator = new PhysicalFileOperator(FileSystem); DataProvider = new LocalDataProvider(FileOperator); Diff = new DiffOperation(DataProvider, new DiffProxy()); TreeOperation = new TreeOperation(DataProvider); CommitOperation = new CommitOperation(DataProvider, TreeOperation); TagOperation = new TagOperation(DataProvider); ResetOperation = new ResetOperation(DataProvider); MergeOperation = new MergeOperation(DataProvider, CommitOperation, TreeOperation, Diff); InitOperation = new DefaultInitOperation(DataProvider); BranchOperation = new BranchOperation(DataProvider); CheckoutOperation = new CheckoutOperation(DataProvider, TreeOperation, CommitOperation, BranchOperation); AddOperation = new AddOperation(DataProvider); OidConverter = DataProvider.GetOid; }
public MergeInterface Initialize(IMergeOperation op) { _mergeOp = op; _mergeData = new FullBindingList<FileCompare>(_mergeOp.GetChanges()); var existing = GetExisting(); MergeStatus status; foreach (var item in _mergeData) { if (existing.TryGetValue(item.Path, out status)) item.ResolutionStatus = status; } lblFilter.Text = string.Format("Filter: ({0} row(s))", _mergeData.Count); _mergeData.ListChanged += _mergeData_ListChanged; _mergeData.SortChanging += _mergeData_SortChanging; grid.DataSource = _mergeData; return this; }
public void The_Removed_Index_Is_In_The_ResultSet() { Database db1 = TestDatabaseLoader.TestDatabase(); Database db2 = TestDatabaseLoader.TestDatabase(); db2.Tables[0].RemoveIndex(db2.Tables[0].Indexes[0]); DatabaseMergeResult result = new DatabaseProcessor().MergeDatabases(db1, db2); Assert.That(result.TableOperations, Is.Empty); Assert.That(result.KeyOperations, Is.Empty); Assert.That(result.ColumnOperations, Is.Empty); Assert.That(result.IndexOperations, Has.Count(1)); IMergeOperation <IIndex> op = result.IndexOperations.ElementAt(0); Assert.That(op.Object, Is.SameAs(db1.Tables[0].Indexes[0])); Assert.That(op, Is.TypeOf(typeof(IndexRemovalOperation))); }
public void The_New_Key_Is_In_The_ResultSet() { Database db1 = TestDatabaseLoader.TestDatabase(); Database db2 = TestDatabaseLoader.TestDatabase(); db2.Tables[0].AddKey(new Key("PK_Table1_1")); DatabaseMergeResult result = new DatabaseProcessor().MergeDatabases(db1, db2); Assert.That(result.TableOperations, Is.Empty); Assert.That(result.IndexOperations, Is.Empty); Assert.That(result.ColumnOperations, Is.Empty); Assert.That(result.KeyOperations, Has.Count(1)); IMergeOperation <IKey> op = result.KeyOperations.ElementAt(0); Assert.That(op.Object, Is.SameAs(db2.Tables[0].Keys[1])); Assert.That(op, Is.TypeOf(typeof(KeyAdditionOperation))); }
public TeamMergeCommonCommandsViewModel(ITeamService teamService, IMergeOperation mergeOperation, IConfigManager configManager, ILogger logger, ISolutionService solutionService, Func <Func <Task>, Task> setBusyWhileExecutingAsync) { _teamService = teamService; _mergeOperation = mergeOperation; _configManager = configManager; _logger = logger; _solutionService = solutionService; _setBusyWhileExecutingAsync = setBusyWhileExecutingAsync; MergeCommand = new AsyncRelayCommand(MergeAsync, CanMerge); FetchChangesetsCommand = new AsyncRelayCommand(FetchChangesetsAsync, CanFetchChangesets); SelectWorkspaceCommand = new RelayCommand <Workspace>(SelectWorkspace); OpenSettingsCommand = new RelayCommand(OpenSettings); SourcesBranches = new ObservableCollection <string>(); TargetBranches = new ObservableCollection <string>(); ProjectNames = new ObservableCollection <string>(); Changesets = new ObservableCollection <Changeset>(); SelectedChangesets = new ObservableCollection <Changeset>(); }
public MergeInterface Initialize(IMergeOperation op) { _mergeOp = op; _mergeData = new FullBindingList <FileCompare>(_mergeOp.GetChanges()); if (ContinueLastMerge) { var existing = GetExisting(); MergeStatus status; foreach (var item in _mergeData) { if (existing.TryGetValue(item.Path, out status)) { item.ResolutionStatus = status; } } } lblFilter.Text = string.Format("Filter: ({0} row(s))", _mergeData.Count); _mergeData.ListChanged += _mergeData_ListChanged; _mergeData.SortChanging += _mergeData_SortChanging; grid.DataSource = _mergeData; return(this); }
public void The_Changed_Key_Is_In_The_ResultSet() { Database db1 = TestDatabaseLoader.TestDatabase(); db1.Tables[0].Keys[0].UID = new Guid(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1); Database db2 = TestDatabaseLoader.TestDatabase(); db2.Tables[0].Keys[0].Keytype = DatabaseKeyType.None; db2.Tables[0].Keys[0].UID = new Guid(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1); DatabaseMergeResult result = new DatabaseProcessor().MergeDatabases(db1, db2); Assert.That(result.TableOperations, Is.Empty); Assert.That(result.IndexOperations, Is.Empty); Assert.That(result.ColumnOperations, Is.Empty); Assert.That(result.KeyOperations, Has.Count(1)); IMergeOperation <IKey> op = result.KeyOperations.ElementAt(0); Assert.That(op.Object, Is.SameAs(db1.Tables[0].Keys[0])); Assert.That(op, Is.TypeOf(typeof(KeyChangeOperation))); }
public TeamMergeViewModel(ITeamService teamService, IMergeOperation mergeOperation, IConfigHelper configHelper, ILogger logger, ISolutionService solutionService) : base(logger) { _teamService = teamService; _mergeOperation = mergeOperation; _configHelper = configHelper; _solutionService = solutionService; ViewChangesetDetailsCommand = new RelayCommand(ViewChangeset, CanViewChangeset); MergeCommand = new AsyncRelayCommand(MergeAsync, CanMerge); FetchChangesetsCommand = new AsyncRelayCommand(FetchChangesetsAsync, CanFetchChangesets); SelectWorkspaceCommand = new RelayCommand <WorkspaceModel>(SelectWorkspace); OpenSettingsCommand = new RelayCommand(OpenSettings); SourcesBranches = new ObservableCollection <string>(); TargetBranches = new ObservableCollection <string>(); ProjectNames = new ObservableCollection <string>(); Changesets = new ObservableCollection <ChangesetModel>(); SelectedChangesets = new ObservableCollection <ChangesetModel>(); Title = Resources.TeamMerge; }
public void AddKeyOperation(IMergeOperation <IKey> operation) { keyOperations.Add(operation); }
public void AddRelationshipOperation(IMergeOperation <Relationship> operation) { relationshipOperations.Add(operation); }
public void AddIndexOperation(IMergeOperation <IIndex> operation) { indexOperations.Add(operation); }
public void AddTableOperation(IMergeOperation <ITable> op) { tableOperations.Add(op); }
public void AddViewOperation(IMergeOperation <ITable> op) { viewOperations.Add(op); }
public void Execute(IEnumerable<FileCompare> files, IMergeOperation mergeOp) { try { var output = new StringBuilder(); ExecCmd("hg", "status", mergeOp.MergePath(""), output); var statuses = GetStatus(output.ToString()); WorkingDirStatus status; var toProcess = files.Where(f => f.ResolutionStatus == MergeStatus.TakeLocal || f.ResolutionStatus == MergeStatus.TakeRemote) .ToArray(); FileCompare file; for (var i = 0; i < toProcess.Length; i++ ) { file = toProcess[i]; var path = mergeOp.MergePath(file.Path); if (file.ResolutionStatus == MergeStatus.TakeLocal) { if (file.InLocal == FileStatus.DoesntExist) { if (File.Exists(path)) File.Delete(path); } else { if (statuses.ContainsKey(file.Path)) { Directory.CreateDirectory(Path.GetDirectoryName(path)); using (var write = new FileStream(path, FileMode.Create, FileAccess.Write)) { mergeOp.GetLocal(file.Path).CopyTo(write); } } } } else { if (File.Exists(path)) File.Delete(path); if (file.InRemote != FileStatus.DoesntExist) { Directory.CreateDirectory(Path.GetDirectoryName(path)); using (var write = new FileStream(path, FileMode.Create, FileAccess.Write)) { mergeOp.GetRemote(file.Path).CopyTo(write); } } } OnProgressChanged("Processing files...", ((i + 1) * 100) / toProcess.Length); } OnActionComplete(); } catch (Exception ex) { OnActionComplete(ex); } }
public void Execute(IEnumerable <FileCompare> files, IMergeOperation mergeOp) { try { var output = new StringBuilder(); ExecCmd("hg", "status", mergeOp.MergePath(""), output); var statuses = GetStatus(output.ToString()); WorkingDirStatus status; var toProcess = files.Where(f => f.ResolutionStatus == MergeStatus.TakeLocal || f.ResolutionStatus == MergeStatus.TakeRemote) .ToArray(); FileCompare file; for (var i = 0; i < toProcess.Length; i++) { file = toProcess[i]; var path = mergeOp.MergePath(file.Path); if (file.ResolutionStatus == MergeStatus.TakeLocal) { if (file.InLocal == FileStatus.DoesntExist) { if (File.Exists(path)) { File.Delete(path); } } else { if (statuses.ContainsKey(file.Path)) { Directory.CreateDirectory(Path.GetDirectoryName(path)); using (var write = new FileStream(path, FileMode.Create, FileAccess.Write)) { mergeOp.GetLocal(file.Path).CopyTo(write); } } } } else { if (File.Exists(path)) { File.Delete(path); } if (file.InRemote != FileStatus.DoesntExist) { Directory.CreateDirectory(Path.GetDirectoryName(path)); using (var write = new FileStream(path, FileMode.Create, FileAccess.Write)) { mergeOp.GetRemote(file.Path).CopyTo(write); } } } OnProgressChanged("Processing files...", ((i + 1) * 100) / toProcess.Length); } OnActionComplete(); } catch (Exception ex) { OnActionComplete(ex); } }
public void AddColumnOperation(IMergeOperation <IColumn> op) { columnOperations.Add(op); }