/// <summary>This method updates child views and clears the batch of events. </summary> protected void SendBatch() { // If there are child views and the batch was filled, fireStatementStopped Update method if (HasViews) { // Convert to object arrays EventBean[] newData = null; EventBean[] oldData = null; if (CurrentBatch.IsNotEmpty()) { newData = CurrentBatch.ToArray(); } if ((LastBatch != null) && (LastBatch.IsNotEmpty())) { oldData = LastBatch.ToArray(); } // Post new data (current batch) and old data (prior batch) if ((newData != null) || (oldData != null)) { if (InstrumentationHelper.ENABLED) { InstrumentationHelper.Get().QViewIndicate(this, _lengthBatchViewFactory.ViewName, newData, oldData); } UpdateChildren(newData, oldData); if (InstrumentationHelper.ENABLED) { InstrumentationHelper.Get().AViewIndicate(); } } } LastBatch = CurrentBatch; CurrentBatch = new LinkedHashSet <EventBean>(); }
/// <summary>Validate the variant stream definition. </summary> /// <param name="variantStreamname">the stream name</param> /// <param name="variantStreamConfig">the configuration information</param> /// <param name="eventAdapterService">the event adapters</param> /// <returns>specification for variant streams</returns> public static VariantSpec ValidateVariantStream(String variantStreamname, ConfigurationVariantStream variantStreamConfig, EventAdapterService eventAdapterService) { if (variantStreamConfig.TypeVariance == TypeVarianceEnum.PREDEFINED) { if (variantStreamConfig.VariantTypeNames.IsEmpty()) { throw new ConfigurationException("Invalid variant stream configuration, no event type name has been added and default type variance requires at least one type, for name '" + variantStreamname + "'"); } } ICollection <EventType> types = new LinkedHashSet <EventType>(); foreach (String typeName in variantStreamConfig.VariantTypeNames) { EventType type = eventAdapterService.GetEventTypeByName(typeName); if (type == null) { throw new ConfigurationException("Event type by name '" + typeName + "' could not be found for use in variant stream configuration by name '" + variantStreamname + "'"); } types.Add(type); } EventType[] eventTypes = types.ToArray(); return(new VariantSpec(variantStreamname, eventTypes, variantStreamConfig.TypeVariance)); }
/// <summary> /// Validate the variant stream definition. /// </summary> /// <param name="variantStreamname">the stream name</param> /// <param name="variantStreamConfig">the configuration information</param> /// <param name="repo">the event types</param> /// <returns>specification for variant streams</returns> private static VariantSpec ValidateVariantStream( string variantStreamname, ConfigurationCommonVariantStream variantStreamConfig, EventTypeRepositoryImpl repo) { if (variantStreamConfig.TypeVariance == TypeVariance.PREDEFINED) { if (variantStreamConfig.VariantTypeNames.IsEmpty()) { throw new ConfigurationException( "Invalid variant stream configuration, no event type name has been added and default type variance requires at least one type, for name '" + variantStreamname + "'"); } } ISet<EventType> types = new LinkedHashSet<EventType>(); foreach (var typeName in variantStreamConfig.VariantTypeNames) { var type = repo.GetTypeByName(typeName); if (type == null) { throw new ConfigurationException( "Event type by name '" + typeName + "' could not be found for use in variant stream configuration by name '" + variantStreamname + "'"); } types.Add(type); } var eventTypes = types.ToArray(); return new VariantSpec(eventTypes, variantStreamConfig.TypeVariance); }
/// <summary> /// This method updates child views and clears the batch of events. /// </summary> protected void SendBatch() { // If there are child views and the batch was filled, fireStatementStopped update method if (child != null) { // Convert to object arrays EventBean[] newData = null; EventBean[] oldData = null; if (!currentBatch.IsEmpty()) { newData = currentBatch.ToArray(); } if (lastBatch != null && !lastBatch.IsEmpty()) { oldData = lastBatch.ToArray(); } // Post new data (current batch) and old data (prior batch) if (newData != null || oldData != null) { agentInstanceContext.InstrumentationProvider.QViewIndicate( lengthBatchViewFactory, newData, oldData); child.Update(newData, oldData); agentInstanceContext.InstrumentationProvider.AViewIndicate(); } } lastBatch = currentBatch; currentBatch = new LinkedHashSet<EventBean>(); }
public void ShouldPreserveOrderingOnExcept() { var set = new LinkedHashSet<int> { 1, 10, 5, 7, 8, 9 }; set.ExceptWith(new int[] { 7, 10, 9, 18 }); Assert.That(set, Has.Count.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 1, 5, 8 })); }
public void ShouldPreserveOrderingOnUnion() { var set = new LinkedHashSet<int> { 1, 10, 5 }; set.UnionWith(new int[] { 10, 30, 15 }); Assert.That(set, Has.Count.EqualTo(5)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 1, 10, 5, 30, 15 })); }
public void CanIterateInInsertionOrder() { // Deliberatly add in an order different from the natural ordering. var set = new LinkedHashSet<int> { 1, 10, 5 }; Assert.That(set, Has.Count.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 1, 10, 5 })); }
public void CanIterateInInsertionOrder() { // Deliberatly add in an order different from the natural ordering. var set = new LinkedHashSet <int> { 1, 10, 5 }; Assert.That(set, Has.Count.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 1, 10, 5 })); }
public void ReinsertShouldNotAffectOrdering() { // Deliberatly add in an order different from the natural ordering. var set = new LinkedHashSet<int> { 1, 10, 5 }; var added = set.Add(1); // This element should still be first in the list. Assert.That(added, Is.False); Assert.That(set, Has.Count.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 1, 10, 5 })); }
private EventType HandleVariantType( CreateSchemaDesc spec, StatementCompileTimeServices services) { if (spec.CopyFrom != null && !spec.CopyFrom.IsEmpty()) { throw new ExprValidationException("Copy-from types are not allowed with variant types"); } var eventTypeName = spec.SchemaName; // determine typing var isAny = false; ISet<EventType> types = new LinkedHashSet<EventType>(); foreach (var typeName in spec.Types) { if (typeName.Trim().Equals("*")) { isAny = true; } else { var eventType = services.EventTypeCompileTimeResolver.GetTypeByName(typeName); if (eventType == null) { throw new ExprValidationException( "Event type by name '" + typeName + "' could not be found for use in variant stream by name '" + eventTypeName + "'"); } types.Add(eventType); } } var eventTypes = types.ToArray(); var variantSpec = new VariantSpec(eventTypes, isAny ? TypeVariance.ANY : TypeVariance.PREDEFINED); var visibility = services.ModuleVisibilityRules.GetAccessModifierEventType(@base.StatementRawInfo, spec.SchemaName); var eventBusVisibility = services.ModuleVisibilityRules.GetBusModifierEventType(@base.StatementRawInfo, eventTypeName); EventTypeUtility.ValidateModifiers(spec.SchemaName, eventBusVisibility, visibility); var metadata = new EventTypeMetadata( eventTypeName, @base.ModuleName, EventTypeTypeClass.VARIANT, EventTypeApplicationType.VARIANT, visibility, eventBusVisibility, false, EventTypeIdPair.Unassigned()); var variantEventType = new VariantEventType(metadata, variantSpec); services.EventTypeCompileTimeRegistry.NewType(variantEventType); return variantEventType; }
public void ShouldPreserveOrderingOnUnion() { var set = new LinkedHashSet <int> { 1, 10, 5 }; set.UnionWith(new int[] { 10, 30, 15 }); Assert.That(set, Has.Count.EqualTo(5)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 1, 10, 5, 30, 15 })); }
public void ShouldPreserveOrderingOnSymmetricExcept() { var set = new LinkedHashSet <int> { 1, 10, 5 }; set.SymmetricExceptWith(new int[] { 1, 10, 3, 9 }); Assert.That(set, Has.Count.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 5, 3, 9 })); }
public void ShouldPreserveOrderingOnExcept() { var set = new LinkedHashSet <int> { 1, 10, 5, 7, 8, 9 }; set.ExceptWith(new int[] { 7, 10, 9, 18 }); Assert.That(set, Has.Count.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 1, 5, 8 })); }
public void ReinsertShouldNotAffectOrdering() { // Deliberatly add in an order different from the natural ordering. var set = new LinkedHashSet <int> { 1, 10, 5 }; var added = set.Add(1); // This element should still be first in the list. Assert.That(added, Is.False); Assert.That(set, Has.Count.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 1, 10, 5 })); }
public void TestAddRemove() { var set = new LinkedHashSet <int>(EqualityComparer <int> .Default); Assert.That(set.Add(11), Is.True); Assert.That(set.Add(21), Is.True); Assert.That(set.Add(31), Is.True); Assert.That(set.Count, Is.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new int[] { 11, 21, 31 })); Assert.That(set.Add(31), Is.False); Assert.That(set.Add(21), Is.False); Assert.That(set.Add(11), Is.False); Assert.That(set.Count, Is.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new int[] { 11, 21, 31 })); Assert.That(set.Add(42), Is.True); Assert.That(set.Count, Is.EqualTo(4)); Assert.That(set.ToArray(), Is.EqualTo(new int[] { 11, 21, 31, 42 })); Assert.That(set.Remove(21), Is.True); Assert.That(set.Remove(31), Is.True); Assert.That(set.Remove(52), Is.False); Assert.That(set.Count, Is.EqualTo(2)); Assert.That(set.ToArray(), Is.EqualTo(new int[] { 11, 42 })); Assert.That(set.Add(13), Is.True); Assert.That(set.Add(23), Is.True); Assert.That(set.Count, Is.EqualTo(4)); Assert.That(set.ToArray(), Is.EqualTo(new int[] { 11, 42, 13, 23 })); }
public void RebuildContext(MethodInfo frameworkMethod) { DisposeContext(); Properties.ResetApplication(); Properties.LoadBootstrapPropertyFile(); Properties baseProps = new Properties(Properties.Application); ExtendProperties(frameworkMethod, baseProps); LinkedHashSet <Type> testClassLevelTestFrameworkModulesList = new LinkedHashSet <Type>(); LinkedHashSet <Type> testClassLevelTestModulesList = new LinkedHashSet <Type>(); testClassLevelTestModulesList.AddAll(BuildTestModuleList(frameworkMethod)); testClassLevelTestFrameworkModulesList.AddAll(BuildFrameworkTestModuleList(frameworkMethod)); Type[] frameworkModules = testClassLevelTestFrameworkModulesList.ToArray(); Type[] applicationModules = testClassLevelTestModulesList.ToArray(); testClassLevelContext = BeanContextFactory.CreateBootstrap(baseProps); bool success = false; try { IServiceContext currentBeanContext = testClassLevelContext; if (frameworkModules.Length > 0) { currentBeanContext = currentBeanContext.CreateService(delegate(IBeanContextFactory childContextFactory) { RebuildContextDetails(childContextFactory); }, frameworkModules); } if (applicationModules.Length > 0) { currentBeanContext = currentBeanContext.CreateService(applicationModules); } currentBeanContext.RegisterWithLifecycle(originalTestInstance).Finish(); beanContext = currentBeanContext; success = true; } finally { if (!success && testClassLevelContext != null) { testClassLevelContext.GetService <IThreadLocalCleanupController>().CleanupThreadLocal(); } } }
/// <summary> /// This method updates child views and clears the batch of events. /// We schedule a new callback at this time if there were events in the batch. /// </summary> protected void SendBatch() { _isCallbackScheduled = false; // If there are child views and the batch was filled, fireStatementStopped update method if (HasViews) { // Convert to object arrays EventBean[] newData = null; EventBean[] oldData = null; if (!_currentBatch.IsEmpty()) { newData = _currentBatch.ToArray(); } if ((_lastBatch != null) && (!_lastBatch.IsEmpty())) { oldData = _lastBatch.ToArray(); } if ((newData != null) || (oldData != null) || _isForceOutput) { if (InstrumentationHelper.ENABLED) { InstrumentationHelper.Get().QViewIndicate(this, _timeBatchViewFactory.ViewName, newData, oldData); } UpdateChildren(newData, oldData); if (InstrumentationHelper.ENABLED) { InstrumentationHelper.Get().AViewIndicate(); } } } // Only if forceOutput is enabled or // there have been any events in this or the last interval do we schedule a callback, // such as to not waste resources when no events arrive. if ((!_currentBatch.IsEmpty()) || ((_lastBatch != null) && (!_lastBatch.IsEmpty())) || _isForceOutput) { ScheduleCallback(); _isCallbackScheduled = true; } _lastBatch = _currentBatch; _currentBatch = new LinkedHashSet <EventBean>(); }
public override void Visit(TypeAttributes access, String name, Type superName, Type[] interfaces) { LinkedHashSet <Type> ints = new LinkedHashSet <Type>(interfaces); ints.AddAll(newInterfaces); Type type = State.CurrentType; while (type != null && type != typeof(Object)) { foreach (Type alreadyImplementedInterface in type.GetInterfaces()) { ints.Remove(alreadyImplementedInterface); } type = type.BaseType; } base.Visit(access, name, superName, ints.ToArray()); }
/// <summary> /// This method updates child views and clears the batch of events. We schedule a /// new callback at this time if there were events in the batch. /// </summary> protected void SendBatch() { IsCallbackScheduled = false; // If there are child views and the batch was filled, fireStatementStopped Update method if (HasViews) { // Convert to object arrays EventBean[] newData = null; EventBean[] oldData = null; if (CurrentBatch.IsNotEmpty()) { newData = CurrentBatch.ToArray(); } if ((LastBatch != null) && (LastBatch.IsNotEmpty())) { oldData = LastBatch.ToArray(); } if ((newData != null) || (oldData != null) || (_isForceOutput)) { Instrument.With( i => i.QViewIndicate(this, _timeBatchViewFactory.ViewName, newData, oldData), i => i.AViewIndicate(), () => UpdateChildren(newData, oldData)); } } // Only if forceOutput is enabled or // there have been any events in this or the last interval do we schedule a callback, // such as to not waste resources when no events arrive. if ((CurrentBatch.IsNotEmpty()) || ((LastBatch != null) && (LastBatch.IsNotEmpty())) || (_isForceOutput)) { ScheduleCallback(); IsCallbackScheduled = true; } LastBatch = CurrentBatch; CurrentBatch = new LinkedHashSet <EventBean>(); }
public void TestGetSuper() { var classes = new LinkedHashSet <Type>(); BeanEventType.GetSuper(typeof(ISupportAImplSuperGImplPlus), classes); Assert.AreEqual(6, classes.Count); EPAssertionUtil.AssertEqualsAnyOrder( classes.ToArray(), new[] { typeof(ISupportAImplSuperG), typeof(ISupportBaseAB), typeof(ISupportA), typeof(ISupportB), typeof(ISupportC), typeof(Object) } ); classes.Clear(); BeanEventType.GetSuper(typeof(Object), classes); Assert.AreEqual(0, classes.Count); }
public static Type[] MergeModules(Type[] leftModules, params Type[] rightModules) { if (leftModules == null) { return(rightModules); } else if (rightModules == null) { return(leftModules); } LinkedHashSet <Type> modules = new LinkedHashSet <Type>(leftModules.Length + rightModules.Length); for (int a = 0, size = leftModules.Length; a < size; a++) { modules.Add(leftModules[a]); } for (int a = 0, size = rightModules.Length; a < size; a++) { modules.Add(rightModules[a]); } return(modules.ToArray()); }
/// <summary> /// This method updates child views and clears the batch of events. /// We schedule a new callback at this time if there were events in the batch. /// </summary> protected void SendBatch() { isCallbackScheduled = false; // If there are child views and the batch was filled, fireStatementStopped update method if (Child != null) { // Convert to object arrays EventBean[] newData = null; EventBean[] oldData = null; if (!currentBatch.IsEmpty()) { newData = currentBatch.ToArray(); } if (lastBatch != null && !lastBatch.IsEmpty()) { oldData = lastBatch.ToArray(); } if (newData != null || oldData != null || factory.isForceUpdate) { agentInstanceContext.InstrumentationProvider.QViewIndicate(factory, newData, oldData); Child.Update(newData, oldData); agentInstanceContext.InstrumentationProvider.AViewIndicate(); } } // Only if forceOutput is enabled or // there have been any events in this or the last interval do we schedule a callback, // such as to not waste resources when no events arrive. if (!currentBatch.IsEmpty() || lastBatch != null && !lastBatch.IsEmpty() || factory.isForceUpdate) { ScheduleCallback(); isCallbackScheduled = true; } lastBatch = currentBatch; currentBatch = new LinkedHashSet<EventBean>(); }
public void EmptySetToArray() { var set = new LinkedHashSet <int>(); Assert.That(set.ToArray(), Is.Empty); }
public void ShouldPreserveOrderingOnSymmetricExcept() { var set = new LinkedHashSet<int> { 1, 10, 5 }; set.SymmetricExceptWith(new int[] { 1, 10, 3, 9 }); Assert.That(set, Has.Count.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 5, 3, 9 })); }
public PropertyEntry(Type type, String propertyName) { this.propertyName = propertyName; LinkedHashSet <String> propertyNames = new LinkedHashSet <String>(); propertyNames.Add(propertyName); PropertyInfo prop = type.GetProperty(propertyName); doesModifyToBeUpdated = !AnnotationUtil.IsAnnotationPresent <IgnoreToBeUpdated>(prop, false); isParentChildSetter = AnnotationUtil.IsAnnotationPresent <ParentChild>(prop, false); isAddedRemovedCheckNecessary = !prop.PropertyType.IsPrimitive && ImmutableTypeSet.GetUnwrappedType(prop.PropertyType) == null && !typeof(String).Equals(prop.PropertyType) && !prop.PropertyType.IsValueType; EvaluateDependentProperties(type, prop, propertyNames); while (true) { int startCount = propertyNames.Count; foreach (String currPropertyName in new List <String>(propertyNames)) { PropertyInfo currProp = type.GetProperty(currPropertyName); if (currProp.CanWrite) { continue; } // Is is just an evaluating property which has to be re-evaluated because of the change on the current property EvaluateDependentProperties(type, currProp, propertyNames); } if (startCount == propertyNames.Count) { break; } } this.propertyNames = propertyNames.ToArray(); bool firesToBeCreatedPCE = false; unknownValues = CreateArrayOfValues(UNKNOWN_VALUE, this.propertyNames.Length); pceArgs = new PropertyChangedEventArgs[propertyNames.Count]; int index = 0; foreach (String invokedPropertyName in propertyNames) { pceArgs[index] = new PropertyChangedEventArgs(invokedPropertyName); index++; firesToBeCreatedPCE |= "ToBeCreated".Equals(invokedPropertyName); } this.firesToBeCreatedPCE = firesToBeCreatedPCE; if (prop.CanRead) { getDelegate = TypeUtility.GetMemberGetDelegate(type, ValueHolderIEC.GetGetterNameOfRelationPropertyWithNoInit(prop.Name), true); if (getDelegate == null) { getDelegate = TypeUtility.GetMemberGetDelegate(type, prop.Name); } } if (prop.CanWrite) { setDelegate = TypeUtility.GetMemberSetDelegate(type, ValueHolderIEC.GetSetterNameOfRelationPropertyWithNoInit(prop.Name), true); if (setDelegate == null) { setDelegate = TypeUtility.GetMemberSetDelegate(type, prop.Name); } } }
protected void ApplyRelationUpdateItems(IObjRefContainer entity, IRelationUpdateItem[] ruis, bool isUpdate, IEntityMetaData metadata, IReader reader) { List <Object> toPrefetch = new List <Object>(); RelationMember[] relationMembers = metadata.RelationMembers; foreach (IRelationUpdateItem rui in ruis) { String memberName = rui.MemberName; int relationIndex = metadata.GetIndexByRelationName(memberName); if (ValueHolderState.INIT == entity.Get__State(relationIndex)) { throw new Exception("ValueHolder already initialized for property '" + memberName + "'"); } IObjRef[] existingORIs = entity.Get__ObjRefs(relationIndex); IObjRef[] addedORIs = rui.AddedORIs; IObjRef[] removedORIs = rui.RemovedORIs; IObjRef[] newORIs; if (existingORIs.Length == 0) { if (removedORIs != null && addedORIs.Length > 0) { throw new ArgumentException("Removing from empty member"); } newORIs = addedORIs != null && addedORIs.Length > 0 ? addedORIs : ObjRef.EMPTY_ARRAY; } else { // Set to efficiently remove entries LinkedHashSet <IObjRef> existingORIsSet = new LinkedHashSet <IObjRef>(existingORIs); if (removedORIs != null && removedORIs.Length > 0) { foreach (IObjRef removedORI in removedORIs) { if (!existingORIsSet.Remove(removedORI)) { throw OptimisticLockUtil.ThrowModified(OriHelper.EntityToObjRef(entity), null, entity); } } } if (addedORIs != null && addedORIs.Length > 0) { foreach (IObjRef addedORI in addedORIs) { if (!existingORIsSet.Add(addedORI)) { throw OptimisticLockUtil.ThrowModified(OriHelper.EntityToObjRef(entity), null, entity); } } } if (existingORIsSet.Count == 0) { newORIs = ObjRef.EMPTY_ARRAY; } else { newORIs = existingORIsSet.ToArray(); } } RelationMember member = relationMembers[relationIndex]; if (isUpdate) { entity.Set__ObjRefs(relationIndex, newORIs); if (!entity.Is__Initialized(relationIndex)) { DirectValueHolderRef dvhr = new DirectValueHolderRef(entity, member); toPrefetch.Add(dvhr); } } else { BuildSetterCommands(entity, newORIs, member, reader); } } if (toPrefetch.Count > 0) { IObjectFuture objectFuture = new PrefetchFuture(toPrefetch); IObjectCommand command = CommandBuilder.Build(reader.CommandTypeRegistry, objectFuture, null); reader.AddObjectCommand(command); } }
public Type[] GetEnhancements() { return(supportedEnhancements.ToArray()); }
/// <summary> /// Convert the input into a pcore molecule. /// </summary> /// <param name="input">the compound being converted from</param> /// <returns>pcore molecule </returns> /// <exception cref="CDKException">match failed</exception> private IAtomContainer GetPharmacophoreMolecule(IAtomContainer input) { // XXX: prepare query, to be moved PrepareInput(input); var pharmacophoreMolecule = input.Builder.NewAtomContainer(); var matched = new HashSet <string>(); var uniqueAtoms = new LinkedHashSet <PharmacophoreAtom>(); Debug.WriteLine($"Converting [{input.Title}] to a pcore molecule"); // lets loop over each pcore query atom foreach (var atom in pharmacophoreQuery.Atoms) { var qatom = (PharmacophoreQueryAtom)atom; var smarts = qatom.Smarts; // a pcore query might have multiple instances of a given pcore atom (say // 2 hydrophobic groups separated by X unit). In such a case we want to find // the atoms matching the pgroup SMARTS just once, rather than redoing the // matching for each instance of the pcore query atom. if (!matched.Add(qatom.Symbol)) { continue; } // see if the smarts for this pcore query atom gets any matches // in our query molecule. If so, then collect each set of // matching atoms and for each set make a new pcore atom and // add it to the pcore atom container object int count = 0; foreach (var query in qatom.CompiledSmarts) { // create the lazy mappings iterator var mappings = query.MatchAll(input).GetUniqueAtoms(); foreach (var mapping in mappings) { uniqueAtoms.Add(NewPCoreAtom(input, qatom, smarts, mapping)); count++; } } Debug.WriteLine($"\tFound {count} unique matches for {smarts}"); } pharmacophoreMolecule.SetAtoms(uniqueAtoms.ToArray()); // now that we have added all the pcore atoms to the container // we need to join all atoms with pcore bonds (i.e. distance constraints) if (HasDistanceConstraints(pharmacophoreQuery)) { var npatom = pharmacophoreMolecule.Atoms.Count; for (int i = 0; i < npatom - 1; i++) { for (int j = i + 1; j < npatom; j++) { var atom1 = PharmacophoreAtom.Get(pharmacophoreMolecule.Atoms[i]); var atom2 = PharmacophoreAtom.Get(pharmacophoreMolecule.Atoms[j]); var bond = new PharmacophoreBond(atom1, atom2); pharmacophoreMolecule.Bonds.Add(bond); } } } // if we have angle constraints, generate only the valid // possible angle relationships, rather than all possible if (HasAngleConstraints(pharmacophoreQuery)) { int nangleDefs = 0; foreach (var bond in pharmacophoreQuery.Bonds) { if (!(bond is PharmacophoreQueryAngleBond)) { continue; } var startQAtom = bond.Atoms[0]; var middleQAtom = bond.Atoms[1]; var endQAtom = bond.Atoms[2]; // make a list of the patoms in the target that match // each type of angle atom var startl = new List <IAtom>(); var middlel = new List <IAtom>(); var endl = new List <IAtom>(); foreach (var tatom in pharmacophoreMolecule.Atoms) { if (tatom.Symbol.Equals(startQAtom.Symbol, StringComparison.Ordinal)) { startl.Add(tatom); } if (tatom.Symbol.Equals(middleQAtom.Symbol, StringComparison.Ordinal)) { middlel.Add(tatom); } if (tatom.Symbol.Equals(endQAtom.Symbol, StringComparison.Ordinal)) { endl.Add(tatom); } } // now we form the relevant angles, but we will // have reversed repeats var tmpl = new List <IAtom[]>(); foreach (var middle in middlel) { foreach (var start in startl) { if (middle.Equals(start)) { continue; } foreach (var end in endl) { if (start.Equals(end) || middle.Equals(end)) { continue; } tmpl.Add(new IAtom[] { start, middle, end }); } } } // now clean up reversed repeats var unique = new List <IAtom[]>(); for (int i = 0; i < tmpl.Count; i++) { var seq1 = tmpl[i]; bool isRepeat = false; for (int j = 0; j < unique.Count; j++) { if (i == j) { continue; } var seq2 = unique[j]; if (Compares.AreDeepEqual(seq1[1], seq2[1]) && Compares.AreDeepEqual(seq1[0], seq2[2]) && Compares.AreDeepEqual(seq1[2], seq2[0])) { isRepeat = true; } } if (!isRepeat) { unique.Add(seq1); } } // finally we can add the unique angle to the target foreach (var seq in unique) { var pbond = new PharmacophoreAngleBond(PharmacophoreAtom.Get(seq[0]), PharmacophoreAtom.Get(seq[1]), PharmacophoreAtom.Get(seq[2])); pharmacophoreMolecule.Bonds.Add(pbond); nangleDefs++; } } Debug.WriteLine($"Added {nangleDefs} defs to the target pcore molecule"); } return(pharmacophoreMolecule); }
protected void ApplyRelationUpdateItem(IObjRefContainer entity, IRelationUpdateItem rui, bool isUpdate, IEntityMetaData metaData, IList <DirectValueHolderRef> toPrefetch, List <IObjRef> toFetchFromCache, bool checkBaseState, IList <IBackgroundWorkerDelegate> runnables) { IObjRefHelper objRefHelper = this.ObjRefHelper; String memberName = rui.MemberName; int relationIndex = metaData.GetIndexByRelationName(memberName); RelationMember relationMember = metaData.RelationMembers[relationIndex]; IObjRef[] existingORIs; if (entity.Is__Initialized(relationIndex)) { existingORIs = ListUtil.ToArray(ObjRefHelper.ExtractObjRefList(relationMember.GetValue(entity), null)); } else { existingORIs = entity.Get__ObjRefs(relationIndex); if (existingORIs == null) { toPrefetch.Add(new DirectValueHolderRef(entity, relationMember, true)); runnables.Add(new IBackgroundWorkerDelegate(delegate() { ApplyRelationUpdateItem(entity, rui, isUpdate, metaData, toPrefetch, toFetchFromCache, checkBaseState, runnables); })); return; } } IObjRef[] addedORIs = rui.AddedORIs; IObjRef[] removedORIs = rui.RemovedORIs; IObjRef[] newORIs; if (existingORIs.Length == 0) { if (checkBaseState && removedORIs != null) { throw new Exception("Removing from empty member"); } newORIs = addedORIs != null ? (IObjRef[])addedORIs.Clone() : ObjRef.EMPTY_ARRAY; for (int a = newORIs.Length; a-- > 0;) { newORIs[a] = CloneObjRef(newORIs[a], false); } } else { // Set to efficiently remove entries LinkedHashSet <IObjRef> existingORIsSet = new LinkedHashSet <IObjRef>(existingORIs); if (removedORIs != null) { foreach (IObjRef removedORI in removedORIs) { IObjRef clonedObjRef = CloneObjRef(removedORI, false); if (existingORIsSet.Remove(clonedObjRef) || !checkBaseState) { continue; } throw OptimisticLockUtil.ThrowModified(objRefHelper.EntityToObjRef(entity), null, entity); } } if (addedORIs != null) { foreach (IObjRef addedORI in addedORIs) { IObjRef clonedObjRef = CloneObjRef(addedORI, false); if (existingORIsSet.Add(clonedObjRef) || !checkBaseState) { continue; } throw OptimisticLockUtil.ThrowModified(objRefHelper.EntityToObjRef(entity), null, entity); } } if (existingORIsSet.Count == 0) { newORIs = ObjRef.EMPTY_ARRAY; } else { newORIs = existingORIsSet.ToArray(); } } if (!entity.Is__Initialized(relationIndex)) { entity.Set__ObjRefs(relationIndex, newORIs); return; } toFetchFromCache.AddRange(newORIs); runnables.Add(new IBackgroundWorkerDelegate(delegate() { ICache stateCache = cloneStateTL.Value.incrementalState.GetStateCache(); IList <Object> objects = stateCache.GetObjects(newORIs, CacheDirective.FailEarly); Object value; if (relationMember.IsToMany) { // To-many relation Object coll = ListUtil.CreateObservableCollectionOfType(relationMember.RealType, objects.Count); ListUtil.FillList(coll, objects); value = coll; } else { // To-one relation value = objects.Count > 0 ? objects[0] : null; } relationMember.SetValue(entity, value); })); }