/// <summary> /// Merges selected shapes /// </summary> public static void MergeShapes() { var result = MergeOperation.Merge(); switch (result) { case MergeResult.Ok: App.Map.Redraw2(tkRedrawType.RedrawSkipDataLayers); App.RefreshUI(); MessageHelper.Info("Shapes were merged successfully."); break; case MergeResult.TooManyShapes: MessageHelper.Info("Too many shapes. The number of shapes for operation is limited to 50."); break; case MergeResult.Failed: MessageHelper.Info("Failed to merge."); break; case MergeResult.NoInput: MessageHelper.Info("No input for operation was found."); break; } }
private void LogDml(MergeOperation operation, TEntity oldEntity, TEntity newEntity) { if (_dmlLogCallback != null) { _dmlLogCallback(operation, oldEntity, newEntity); } }
/// <summary> /// Processes the delete operation. /// </summary> /// <param name="operation">The operation.</param> /// <param name="operationAccessor">The operation accessor.</param> private void processDeleteOperation(MergeOperation operation, TrackingMergeWorkerOperationAccessor operationAccessor) { MemoryIndex targetIndex = operation.TargetIndex; IIndexDefinition targetDefinition; if (targetStructure.TryGetIndexDefinition(targetIndex, out targetDefinition)) { // Index is set in target snapshot if (targetDefinition.Array != null) { // Target contains array - continue deleting AssociativeArray targetArray = targetDefinition.Array; IArrayDescriptor targetArrayDescriptor = targetStructure.GetDescriptor(targetArray); foreach (var index in targetArrayDescriptor.Indexes) { // Enqueue delete operation for every child index MemoryIndex childIndex = index.Value; MergeOperation childOperation = new MergeOperation(); childOperation.SetTargetIndex(childIndex); childOperation.SetDeleteOperation(); operationQueue.AddLast(childOperation); } // Enqueue delete operation for unknown index MergeOperation unknownOperation = new MergeOperation(); unknownOperation.SetTargetIndex(targetArrayDescriptor.UnknownIndex); unknownOperation.SetUndefined(); unknownOperation.SetDeleteOperation(); operationQueue.AddLast(unknownOperation); } operationAccessor.provideCustomDeleteOperation(targetIndex, targetDefinition); } }
/// <summary> /// Merges selected shapes /// </summary> public void MergeShapes() { var result = MergeOperation.Run(_context); switch (result) { case MergeResult.Ok: _context.Map.Redraw(RedrawType.SkipDataLayers); MessageService.Current.Info("Shapes were merged successfully."); //App.RefreshUI(); break; case MergeResult.TooManyShapes: MessageService.Current.Info("Too many shapes. The number of shapes for operation is limited to 50."); break; case MergeResult.Failed: MessageService.Current.Info("Failed to merge."); break; case MergeResult.NoInput: MessageService.Current.Info("No input for operation was found."); break; } }
public string CreateCategoryQuery(UpdateParameters parameters, DbContext context , string prefix = null) { string tvpName = prefix + CoreTVP.UPDATE_USER_TYPE; var merge = new MergeOperation<UserCategorySettings<Guid>>(context, null, tvpName, CoreTVP.UPDATE_USERS_PARAMETER_NAME); merge.Compare.IncludeProperty(p => p.UserID) .IncludeProperty(p => p.DeliveryType) .IncludeProperty(p => p.CategoryID); merge.Update.ExcludeAllPropertiesByDefault = true; if (parameters.UpdateCategorySendCount) { merge.Update.Assign(t => t.SendCount, (t, s) => t.SendCount + s.SendCount); } if (parameters.UpdateCategoryLastSendDateUtc) { merge.Update.Assign(t => t.LastSendDateUtc, (t, s) => DateTime.UtcNow); } if (parameters.CreateCategoryIfNotExist) { merge.Insert.IncludeDefault(t => t.IsEnabled, true) .IncludeDefault(t => t.LastSendDateUtc, DateTime.UtcNow); } MergeType mergeType = parameters.CreateCategoryIfNotExist ? MergeType.Upsert : MergeType.Update; return merge.ConstructCommand(mergeType); }
private static async Task Exec() { Console.WriteLine("----------------------" + "------------"); var stopwatch = Stopwatch.StartNew(); using (var db = new MyContext()) { using (var transaction = db.Database.BeginTransaction(IsolationLevel.ReadCommitted)) { // db.Database.OpenConnection(); var mergeOperation = new MergeOperation <Blog>(db); mergeOperation.WithData(() => GetBlogToInsert(100500)); mergeOperation.OnConflictDoUpdate( (Blog b) => new { b.BlogId }, (target, excluded) => new Blog { Url = excluded.Url, // Url = "TEST", }); await mergeOperation.ExecuteAsync(); transaction.Commit(); } } stopwatch.Stop(); Console.WriteLine("-------------- DONE --------------"); Console.WriteLine(stopwatch.Elapsed); }
public void PerfCounters() { var op = new MergeOperation(); op.Merge( MergeKind.Auto, A.Dummy<MergeEntity>(), A.Dummy<MergeEntity>(), A.Dummy<MergeEntity>()); }
//when editing this function delete everything from this folder: 'C:\Users\YOUR_NAME\AppData\Local\Microsoft\VisualStudio\NUMBER_WITH_EXP_IN' //FYI: This will reset the whole VS experimental instance //or for only resetting the extension find it and delete the folder. protected override ITeamExplorerSection CreateViewModel(SectionInitializeEventArgs e) { var logger = new Logger(); var configHelper = new ConfigHelper(); var solutionService = new SolutionService(ServiceProvider, configHelper); var tfvcService = new TFVCService(ServiceProvider, solutionService); var teamService = new TeamService(ServiceProvider, tfvcService); var mergeService = new MergeService(ServiceProvider, tfvcService); var mergeOperation = new MergeOperation(mergeService, configHelper); return(base.CreateViewModel(e) ?? new TeamMergeViewModel(teamService, mergeOperation, configHelper, logger, solutionService)); }
public void Initialize() { _mergeService = MockRepository.GenerateStrictMock <IMergeService>(); _configHelper = MockRepository.GenerateStrictMock <IConfigHelper>(); _sut = new MergeOperation(_mergeService, _configHelper); _currentWorkspaceModel = new WorkspaceModel { OwnerName = "MyOwnerName", Name = "WorkspaceName" }; _sourceBranchName = "SourceBranchName"; _targetbranchName = "TargetBranchName"; }
public void Bad_entity_configurations_throw() { var op = new MergeOperation(); ExceptionAssert.Throws<TypeInitializationException>( _ => _.InnerException != null && _.InnerException is MergeException, () => op.Merge( MergeKind.Auto, A.Dummy<InvalidMergeDefEntityBadPropertyImpl>(), A.Dummy<InvalidMergeDefEntityBadPropertyImpl>(), A.Dummy<InvalidMergeDefEntityBadPropertyImpl>())); ExceptionAssert.Throws<TypeInitializationException>( _ => _.InnerException != null && _.InnerException is MergeException, () => op.Merge( MergeKind.Auto, A.Dummy<InvalidMergeDefEntityBadDecoration>(), A.Dummy<InvalidMergeDefEntityBadDecoration>(), A.Dummy<InvalidMergeDefEntityBadDecoration>())); ExceptionAssert.Throws<TypeInitializationException>( _ => _.InnerException != null && _.InnerException is MergeException, () => op.Merge( MergeKind.Auto, A.Dummy<InvalidMergeDefEntityBadTimeStampType>(), A.Dummy<InvalidMergeDefEntityBadTimeStampType>(), A.Dummy<InvalidMergeDefEntityBadTimeStampType>())); //ExceptionAssert.Throws<TypeInitializationException>( // _ => // _.InnerException != null && _.InnerException is MergeException, // () => op.Merge( MergeKind.Auto, A.Dummy<InvalidMergeDefEntityOverloadedDecoration>(), A.Dummy<InvalidMergeDefEntityOverloadedDecoration>(), A.Dummy<InvalidMergeDefEntityOverloadedDecoration>()); //); }
/// <summary> /// Processes the merge operations. /// </summary> protected void processMergeOperations() { // Process operations while queue is not empty while (operationQueue.Count > 0) { // Dequeue next operation MergeOperation operation = operationQueue.First.Value; operationQueue.RemoveFirst(); TrackingMergeWorkerOperationAccessor operationAccessor = createNewOperationAccessor(operation); if (operation.IsDeleteOperation) { processDeleteOperation(operation, operationAccessor); } else { processMergeOperation(operation, operationAccessor); } } }
public MergingSpec(YamlMappingNode node) { var path_node = node.TryGet("path"); if (path_node == null) { Path = null; } else { Path = Util.Split((string)path_node); } var ext_node = node.TryGet("extension"); if (ext_node == null) { Extension = null; } else { Extension = (string)ext_node; if (!Extension.StartsWith('.')) { Extension = "." + Extension; } } OverwriteKeys = node.Go("overwrite").ToStringList() ?? new List <string>(); var operation_node = node.TryGet("operation"); if (operation_node == null) { Operation = MergeOperation.MergeJson; } else { Operation = StringUtils.ParseUnderscoredEnum <MergeOperation>((string)operation_node); } KeyMover = node.Go("move_keys").NullableParse(x => new KeyMover((YamlMappingNode)x)); }
protected ICUDResult WhatIfMerged(ICUDResult cudResult, IMethodDescription methodDescription, List <MergeOperation> mergeOperationSequence, IncrementalMergeState incrementalState) { IList <MergeOperation> lastMergeOperationSequence; while (true) { IMap <Type, IList <IChangeContainer> > sortedChanges = BucketSortChanges(cudResult.AllChanges); lastMergeOperationSequence = CreateMergeOperationSequence(sortedChanges); ParamHolder <bool> hasAtLeastOneImplicitChange = new ParamHolder <bool>(false); IList <MergeOperation> fLastMergeOperationSequence = lastMergeOperationSequence; cudResult = CacheContext.ExecuteWithCache(incrementalState.GetStateCache(), delegate(ICUDResult cudResult2) { for (int a = 0, size = fLastMergeOperationSequence.Count; a < size; a++) { MergeOperation mergeOperation = fLastMergeOperationSequence[a]; IMergeServiceExtension mergeServiceExtension = mergeOperation.MergeServiceExtension; ICUDResult explAndImplCudResult = mergeServiceExtension.EvaluateImplicitChanges(cudResult2, incrementalState); cudResult2 = MergeCudResult(cudResult2, explAndImplCudResult, mergeServiceExtension, hasAtLeastOneImplicitChange, incrementalState); } return(cudResult2); }, cudResult); foreach (IMergeListener mergeListener in mergeListeners.GetExtensions()) { ICUDResult explAndImplCudResult = mergeListener.PreMerge(cudResult, incrementalState.GetStateCache()); cudResult = MergeCudResult(cudResult, explAndImplCudResult, mergeListener, hasAtLeastOneImplicitChange, incrementalState); } if (!hasAtLeastOneImplicitChange.Value) { break; } } mergeOperationSequence.AddRange(lastMergeOperationSequence); return(cudResult); }
/// <summary> /// Collects the indexes. /// </summary> /// <param name="childName">Name of the child.</param> /// <param name="sourceContainers">The source containers.</param> /// <param name="operation">The operation.</param> /// <returns></returns> private bool collectIndexes(string childName, List <ContainerContext> sourceContainers, MergeOperation operation) { bool childDefined = false; // Collect source indexes from source collection foreach (ContainerContext containerContext in sourceContainers) { MemoryIndex sourceIndex; if (containerContext.IndexContainer.TryGetIndex(childName, out sourceIndex)) { // Collection contains field - use it operation.Add(new MergeOperationContext(sourceIndex, containerContext.SnapshotContext)); childDefined = true; } else { // Collection do not contain - use unknown index as source // When unknown index is the source - all subtree has to be merged into operation.Add( new MergeOperationContext( containerContext.IndexContainer.UnknownIndex, containerContext.SnapshotContext, MergeOperationType.WholeSubtree) ); operation.SetUndefined(); } } return(childDefined); }
public void Dirty_update_merges_on_type_where_not_all_properties_can_merge_resolves() { var bfim = new MergeTypeEntity { Timestamp = _ts1, Name = "foo", Id = 1 }; var cfim = new MergeTypeEntity { Timestamp = _ts2, Name = "bar", Id = 1 }; var afim = new MergeTypeEntity { Timestamp = _ts1, Name = "baz", Id = 1 }; var op = new MergeOperation<MergeTypeEntity>(bfim, cfim, afim); var result = op.Merge(); Assert.IsTrue(result.IsResolved()); //Assert.AreEqual(cfim, op.AFIM); Assert.AreEqual("foo", bfim.Name); Assert.AreEqual("baz", cfim.Name); Assert.AreEqual("baz", afim.Name); Assert.AreEqual(_ts1, bfim.Timestamp); Assert.AreEqual(_ts2, cfim.Timestamp); Assert.AreEqual(_ts1, afim.Timestamp); }
public MergeWhenMatched(Predicate predicate, MergeOperation operation) { Debug.Assert(operation != null); this.predicate = predicate; this.operation = operation; }
/// <summary> /// Processes the merge operation. /// </summary> /// <param name="operation">The operation.</param> /// <param name="operationAccessor">The operation accessor.</param> /// <exception cref="System.Exception"> /// Error merging structure in readonly mode - undefined index + targetIndex /// or /// Error merging structure in readonly mode - target descriptor for + targetIndex /// </exception> private void processMergeOperation(MergeOperation operation, TrackingMergeWorkerOperationAccessor operationAccessor) { MemoryIndex targetIndex = operation.TargetIndex; AssociativeArray targetArray = null; List <ContainerContext> sourceArrays = new List <ContainerContext>(); bool arrayAlwaysDefined = !operation.IsUndefined; bool cotainsArray = false; // Iterate sources foreach (MergeOperationContext operationContext in operation.Indexes) { // Retreive source context and definition MemoryIndex sourceIndex = operationContext.Index; SnapshotContext context = operationContext.SnapshotContext; IIndexDefinition sourceDefinition = context.SourceStructure.GetIndexDefinition(sourceIndex); // Provide custom operation for merge algorithm operationAccessor.addSource(operationContext, sourceDefinition); // Source array if (sourceDefinition.Array != null) { // Becomes target array when not set if (targetArray == null && sourceIndex.Equals(targetIndex)) { targetArray = sourceDefinition.Array; } cotainsArray = true; // Save source array to merge descriptors IArrayDescriptor descriptor = context.SourceStructure.GetDescriptor(sourceDefinition.Array); sourceArrays.Add(new ContainerContext(context, descriptor, operationContext.OperationType)); // Equeue all array indexes when whole subtree should be merged if (operationContext.OperationType == MergeOperationType.WholeSubtree) { foreach (var index in descriptor.Indexes) { operation.TreeNode.GetOrCreateChild(index.Key); } operation.TreeNode.GetOrCreateAny(); } } else { // Source do not contain array - at least one source is empty arrayAlwaysDefined = false; } } IIndexDefinition targetDefinition; IArrayDescriptor targetArrayDescriptor = null; if (targetStructure.TryGetIndexDefinition(targetIndex, out targetDefinition)) { // Index is set in target snapshot if (targetDefinition.Array != null) { // Target contains array - continue merging targetArray = targetDefinition.Array; targetArrayDescriptor = targetStructure.GetDescriptor(targetArray); } } else { // Index is not set in target snapshot - create it if (isStructureWriteable) { writeableTargetStructure.NewIndex(targetIndex); } else { throw new Exception("Error merging structure in readonly mode - undefined index " + targetIndex); } } // Provide custom operation for merge algorithm operationAccessor.provideCustomOperation(targetIndex); // Process next array if (cotainsArray) { if (targetArray == null) { targetArray = targetSnapshot.CreateArray(); } if (targetArrayDescriptor == null) { // Target does not contain array - create and add new in target snapshot if (isStructureWriteable) { targetArrayDescriptor = Factories.StructuralContainersFactories.ArrayDescriptorFactory.CreateArrayDescriptor(writeableTargetStructure, targetArray, targetIndex); writeableTargetStructure.SetDescriptor(targetArray, targetArrayDescriptor); writeableTargetStructure.NewIndex(targetArrayDescriptor.UnknownIndex); writeableTargetStructure.SetArray(targetIndex, targetArray); } else { throw new Exception("Error merging structure in readonly mode - target descriptor for " + targetIndex); } } // Create context and merge descriptors var arrayContext = new ArrayTargetContainerContext(writeableTargetStructure, targetArrayDescriptor); createAndEnqueueOperations(arrayContext, operation.TreeNode, sourceArrays, arrayAlwaysDefined); if (isStructureWriteable) { // Ubdate current descriptor when changed IArrayDescriptor currentDescriptor = arrayContext.getCurrentDescriptor(); if (currentDescriptor != targetArrayDescriptor) { writeableTargetStructure.SetDescriptor(targetArray, currentDescriptor); } } } }
/// <summary> /// Initializes a new instance of the <see cref="MergePoint" /> struct. /// </summary> /// <param name="operation">The operation.</param> public MergePoint(MergeOperation operation) { Operation = operation; Target = null; }
protected IList <MergeOperation> CreateMergeOperationSequence(IMap <Type, IList <IChangeContainer> > sortedChanges) { Type[] entityPersistOrder = EntityMetaDataProvider.GetEntityPersistOrder(); IList <MergeOperation> mergeOperations = new List <MergeOperation>(); if (entityPersistOrder != null) { for (int a = entityPersistOrder.Length; a-- > 0;) { Type orderedEntityType = entityPersistOrder[a]; IList <IChangeContainer> changes = sortedChanges.Get(orderedEntityType); if (changes == null) { // No changes of current type found. Nothing to do here continue; } List <IChangeContainer> removes = new List <IChangeContainer>(changes.Count); List <IChangeContainer> insertsAndUpdates = new List <IChangeContainer>(changes.Count); for (int b = changes.Count; b-- > 0;) { IChangeContainer change = changes[b]; if (change is DeleteContainer) { removes.Add(change); } else { insertsAndUpdates.Add(change); } } if (removes.Count == 0) { // Nothing to do. Ordering is not necessary here continue; } if (insertsAndUpdates.Count == 0) { sortedChanges.Remove(orderedEntityType); } else { sortedChanges.Put(orderedEntityType, insertsAndUpdates); } IMergeServiceExtension mergeServiceExtension = GetServiceForType(orderedEntityType); MergeOperation mergeOperation = new MergeOperation(); mergeOperation.MergeServiceExtension = mergeServiceExtension; mergeOperation.ChangeContainer = removes; mergeOperations.Add(mergeOperation); } for (int a = 0, size = entityPersistOrder.Length; a < size; a++) { Type orderedEntityType = entityPersistOrder[a]; IList <IChangeContainer> changes = sortedChanges.Get(orderedEntityType); if (changes == null) { // No changes of current type found. Nothing to do here continue; } bool containsNew = false; for (int b = changes.Count; b-- > 0;) { if (changes[b].Reference.Id == null) { containsNew = true; break; } } if (!containsNew) { // Nothing to do. Ordering is not necessary here continue; } // Remove batch of changes where at least 1 new entity occured and // this type of entity has to be inserted in a global order sortedChanges.Remove(orderedEntityType); IMergeServiceExtension mergeServiceExtension = GetServiceForType(orderedEntityType); MergeOperation mergeOperation = new MergeOperation(); mergeOperation.MergeServiceExtension = mergeServiceExtension; mergeOperation.ChangeContainer = changes; mergeOperations.Add(mergeOperation); } } // Everything which is left in the sortedChanges map can be merged without global order, so batch together as much as possible foreach (Entry <Type, IList <IChangeContainer> > entry in sortedChanges) { Type type = entry.Key; IList <IChangeContainer> unorderedChanges = entry.Value; IMergeServiceExtension mergeServiceExtension = GetServiceForType(type); if (mergeServiceExtension == null) { throw new Exception("No extension found to merge entities of type '" + type.FullName + "'"); } bool cont = false; foreach (MergeOperation existingMergeOperation in mergeOperations) { if (Object.ReferenceEquals(existingMergeOperation.MergeServiceExtension, mergeServiceExtension)) { IList <IChangeContainer> orderedChanges = existingMergeOperation.ChangeContainer; for (int b = unorderedChanges.Count; b-- > 0;) { orderedChanges.Add(unorderedChanges[b]); } cont = true; break; } } if (cont) { continue; } MergeOperation mergeOperation = new MergeOperation(); mergeOperation.MergeServiceExtension = mergeServiceExtension; mergeOperation.ChangeContainer = unorderedChanges; mergeOperations.Add(mergeOperation); } ; return(mergeOperations); }
public MergeWhenNotMatchedByTarget(Predicate predicate, MergeOperation operation): base(predicate, operation) {}
public MergeWhenNotMatchedByTarget(MergeOperation operation): this(null, operation) {}
public MergeWhenNotMatchedBySource(MergeOperation operation): this(null, operation) {}
protected IOriCollection Intern(ICUDResult cudResult, IMethodDescription methodDescription, IList <MergeOperation> mergeOperationSequence, IncrementalMergeState state) { IList <IChangeContainer> allChanges = cudResult.AllChanges; IList <Object> originalRefs = cudResult.GetOriginalRefs(); IdentityHashMap <IChangeContainer, int> changeToChangeIndexDict = new IdentityHashMap <IChangeContainer, int>(); for (int a = allChanges.Count; a-- > 0;) { changeToChangeIndexDict.Put(allChanges[a], a); } IObjRef[] objRefs = new IObjRef[allChanges.Count]; long[] allChangedOn = new long[allChanges.Count]; String[] allChangedBy = new String[allChanges.Count]; CHashSet <long> changedOnSet = new CHashSet <long>(); CHashSet <String> changedBySet = new CHashSet <String>(); for (int a = 0, size = mergeOperationSequence.Count; a < size; a++) { MergeOperation mergeOperation = mergeOperationSequence[a]; IMergeServiceExtension mergeServiceExtension = mergeOperation.MergeServiceExtension; IList <IChangeContainer> changesForMergeService = mergeOperation.ChangeContainer; ICUDResult msCudResult = BuildCUDResult(changesForMergeService, changeToChangeIndexDict, originalRefs); IOriCollection msOriCollection = mergeServiceExtension.Merge(msCudResult, methodDescription); MergeController.ApplyChangesToOriginals(msCudResult, msOriCollection, state.GetStateCache()); IList <IObjRef> allChangeORIs = msOriCollection.AllChangeORIs; long? msDefaultChangedOn = msOriCollection.ChangedOn; String msDefaultChangedBy = msOriCollection.ChangedBy; long[] msAllChangedOn = msOriCollection.AllChangedOn; String[] msAllChangedBy = msOriCollection.AllChangedBy; for (int b = changesForMergeService.Count; b-- > 0;) { int index = changeToChangeIndexDict.Get(changesForMergeService[b]); objRefs[index] = allChangeORIs[b]; if (msAllChangedOn != null) { long msChangedOn = msAllChangedOn[b]; allChangedOn[index] = msChangedOn; changedOnSet.Add(msChangedOn); } else { allChangedOn[index] = msDefaultChangedOn.Value; } if (msAllChangedBy != null) { String msChangedBy = msAllChangedBy[b]; allChangedBy[index] = msChangedBy; changedBySet.Add(msChangedBy); } else { allChangedBy[index] = msDefaultChangedBy; } } if (msDefaultChangedOn != null) { changedOnSet.Add(msDefaultChangedOn.Value); } if (msDefaultChangedBy != null) { changedBySet.Add(msDefaultChangedBy); } } OriCollection oriCollection = new OriCollection(); oriCollection.AllChangeORIs = new List <IObjRef>(objRefs); if (changedBySet.Count == 1) { Iterator <String> iter = changedBySet.Iterator(); iter.MoveNext(); oriCollection.ChangedBy = iter.Current; } else { oriCollection.AllChangedBy = allChangedBy; } if (changedOnSet.Count == 1) { Iterator <long> iter = changedOnSet.Iterator(); iter.MoveNext(); oriCollection.ChangedOn = iter.Current; } else { oriCollection.AllChangedOn = allChangedOn; } foreach (IMergeListener mergeListener in mergeListeners.GetExtensions()) { mergeListener.PostMerge(cudResult, objRefs); } if (originalRefs != null) { // Set each original ref to null in order to suppress a post-processing in a potentially calling IMergeProcess for (int a = originalRefs.Count; a-- > 0;) { originalRefs[a] = null; } } // TODO DCE must be fired HERE <--- return(oriCollection); }
public void Non_OC_results_in_noop() { var bfim = A.Dummy<ChaosEntity>(); var cfim = A.Dummy<ChaosEntity>(); var afim = A.Dummy<ChaosEntity>(); var op = new MergeOperation<ChaosEntity>(bfim, cfim, afim); var result = op.Merge(); Assert.IsTrue(result.IsResolved()); Assert.AreEqual(afim, op.AFIM); A.CallTo(() => bfim.Id).MustNotHaveHappened(); A.CallTo(() => bfim.Name).MustNotHaveHappened(); A.CallTo(() => cfim.Id).MustNotHaveHappened(); A.CallTo(() => cfim.Name).MustNotHaveHappened(); A.CallTo(() => afim.Id).MustNotHaveHappened(); A.CallTo(() => afim.Name).MustNotHaveHappened(); }
/// <summary> /// Creates the and enqueue operations. /// </summary> /// <param name="targetContainerContext">The target container context.</param> /// <param name="treeNode">The tree node.</param> /// <param name="sourceContainers">The source containers.</param> /// <param name="alwaysDefined">if set to <c>true</c> [always defined].</param> private void createAndEnqueueOperations( ITargetContainerContext targetContainerContext, MemoryIndexTreeNode treeNode, List <ContainerContext> sourceContainers, bool alwaysDefined) { IReadonlyIndexContainer targetContainer = targetContainerContext.getSourceContainer(); // Creates and enques merge operations for all child nodes of given node foreach (var childNode in treeNode.ChildNodes) { string childName = childNode.Key; MemoryIndexTreeNode childTreeNode = childNode.Value; MergeOperation operation = new MergeOperation(); bool isChildDefined = collectIndexes(childName, sourceContainers, operation); if (isChildDefined) { // Child is defined at least in one collection - enqueue merge operation MemoryIndex targetIndex; // Use index from target collection or crete and add it to the target collection if (!targetContainer.TryGetIndex(childName, out targetIndex)) { targetIndex = createNewTargetIndex(targetContainerContext, childName); if (targetIndex == null) { continue; } } // Set parameters and add it to collection operation.TreeNode = childTreeNode; operation.SetTargetIndex(targetIndex); if (!alwaysDefined) { operation.SetUndefined(); } operationQueue.AddLast(operation); } else { // Child is not defined - enqueue delete operation MemoryIndex targetIndex; if (targetContainer.TryGetIndex(childName, out targetIndex)) { // Enque delete operation only if target index exists in paret snapshot operation.TreeNode = childTreeNode; operation.SetTargetIndex(targetIndex); operation.SetDeleteOperation(); operationQueue.AddLast(operation); deleteChild(targetContainerContext, childName); } } } // Enqueue merge operation for unknown index if is defined if (treeNode.AnyChild != null) { MergeOperation unknownOperation = new MergeOperation(); foreach (ContainerContext containerContext in sourceContainers) { unknownOperation.Add(new MergeOperationContext( containerContext.IndexContainer.UnknownIndex, containerContext.SnapshotContext)); } unknownOperation.TreeNode = treeNode.AnyChild; unknownOperation.SetTargetIndex(targetContainer.UnknownIndex); unknownOperation.SetUndefined(); operationQueue.AddLast(unknownOperation); } }
public void OC_create_resolves() { OccEntity bfim = null; var cfim = new OccEntity { Timestamp = _ts1, Name = "bar", Id = 1 }; var afim = new OccEntity { Timestamp = _ts2, Name = "baz", Id = 1 }; var op = new MergeOperation<OccEntity>(bfim, cfim, afim); var result = op.Merge(); Assert.IsTrue(result.IsResolved()); Assert.AreEqual(afim, op.AFIM); Assert.AreEqual("bar", cfim.Name); Assert.AreEqual("baz", afim.Name); Assert.AreEqual(_ts1, cfim.Timestamp); Assert.AreEqual(_ts2, afim.Timestamp); }
/// <summary> /// Initializes a new instance of the <see cref="MergePoint" /> struct. /// </summary> /// <param name="operation">The operation.</param> /// <param name="target">The target. Can be <see langword="null"/>.</param> public MergePoint(MergeOperation operation, string target) { Operation = operation; Target = target; }
public void OC_is_current_resolves() { var bfim = new OccEntity { Timestamp = _ts1, Name = "foo", Id = 1 }; var cfim = new OccEntity { Timestamp = _ts1, Name = "bar", Id = 1 }; var afim = new OccEntity { Timestamp = _ts1, Name = "baz", Id = 1 }; var op = new MergeOperation<OccEntity>(bfim, cfim, afim); var result = op.Merge(); Assert.IsTrue(result.IsResolved()); Assert.AreEqual("foo", bfim.Name); Assert.AreEqual("bar", cfim.Name); Assert.AreEqual("baz", afim.Name); Assert.AreEqual(_ts1, bfim.Timestamp); Assert.AreEqual(_ts1, cfim.Timestamp); Assert.AreEqual(_ts1, afim.Timestamp); }
/// <summary> /// Creates the new operation accessor. /// </summary> /// <param name="operation">The operation.</param> /// <returns></returns> protected abstract TrackingMergeWorkerOperationAccessor createNewOperationAccessor(MergeOperation operation);
public MergeWhenMatched(MergeOperation operation): this(null, operation) {}