// // PRIVATE METHODS // private void constructInternalDataStructures(ICollection <Chain> sos, ICollection <Chain> background) { ICollection <Chain> toIndex = CollectionFactory.CreateQueue <Chain>(); toIndex.AddAll(sos); toIndex.AddAll(background); foreach (Chain c in toIndex) { addToIndex(c); } }
// function REPRODUCE(x, y) returns an individual // inputs: x, y, parent individuals protected virtual Individual <A> reproduce(Individual <A> x, Individual <A> y) { // n <- LENGTH(x); // Note: this is = this.individualLength // c <- random number from 1 to n int c = randomOffset(individualLength); // return APPEND(SUBSTRING(x, 1, c), SUBSTRING(y, c+1, n)) ICollection <A> childRepresentation = CollectionFactory.CreateQueue <A>(); childRepresentation.AddAll(x.getRepresentation().subList(0, c)); childRepresentation.AddAll(y.getRepresentation().subList(c, individualLength)); return(new Individual <A>(childRepresentation)); }
/// <summary> /// Constructor which allows injecting a custom /// <see cref="ISvgNodeRendererMapper"/> /// implementation. /// </summary> /// <param name="mapper"> /// the custom mapper implementation - if null, then we fall /// back to the /// <see cref="DefaultSvgNodeRendererMapper"/> /// </param> public DefaultSvgNodeRendererFactory(ISvgNodeRendererMapper mapper) { if (mapper != null) { rendererMap.AddAll(mapper.GetMapping()); ignoredTags.AddAll(mapper.GetIgnoredTags()); } else { ISvgNodeRendererMapper defaultMapper = new DefaultSvgNodeRendererMapper(); rendererMap.AddAll(defaultMapper.GetMapping()); ignoredTags.AddAll(defaultMapper.GetIgnoredTags()); } }
/// <returns>the topmost parent added to set. If encountered flushed element - stops and returns this flushed element. /// </returns> private static PdfDictionary AddAllParentsToSet(PdfMcr mcr, ICollection <PdfObject> set) { IList <PdfDictionary> allParents = RetrieveParents(mcr, true); set.AddAll(allParents); return(allParents.IsEmpty() ? null : allParents[allParents.Count - 1]); }
public DynamicBayesNet(IBayesianNetwork priorNetwork, IMap <IRandomVariable, IRandomVariable> X_0_to_X_1, ISet <IRandomVariable> E_1, params INode[] rootNodes) : base(rootNodes) { foreach (var x0_x1 in X_0_to_X_1) { IRandomVariable x0 = x0_x1.GetKey(); IRandomVariable x1 = x0_x1.GetValue(); this.X_0.Add(x0); this.X_1.Add(x1); this.X_0_to_X_1.Put(x0, x1); this.X_1_to_X_0.Put(x1, x0); } this.E_1.AddAll(E_1); // Assert the X_0, X_1, and E_1 sets are of expected sizes ISet <IRandomVariable> combined = CollectionFactory.CreateSet <IRandomVariable>(); combined.AddAll(X_0); combined.AddAll(X_1); combined.AddAll(E_1); if (SetOps.difference(CollectionFactory.CreateSet <IRandomVariable>(varToNodeMap.GetKeys()), combined).Size() != 0) { throw new IllegalArgumentException("X_0, X_1, and E_1 do not map correctly to the Nodes describing this Dynamic Bayesian Network."); } this.priorNetwork = priorNetwork; X_1_VariablesInTopologicalOrder.AddAll(GetVariablesInTopologicalOrder()); X_1_VariablesInTopologicalOrder.RemoveAll(X_0); X_1_VariablesInTopologicalOrder.RemoveAll(E_1); }
public static ICollection <EventBean> SingleIndexLookup(ExprEvaluator[] evaluators, EventBean[] eventsPerStream, ExprEvaluatorContext exprEvaluatorContext, PropertyIndexedEventTableSingle index) { var first = true; ICollection <EventBean> result = null; var evaluateParams = new EvaluateParams(eventsPerStream, true, exprEvaluatorContext); foreach (var evaluator in evaluators) { var key = evaluator.Evaluate(evaluateParams); ICollection <EventBean> found = index.Lookup(key); if (found != null && !found.IsEmpty()) { if (result == null) { result = found; } else if (first) { var copy = new LinkedHashSet <EventBean>(); copy.AddAll(result); copy.AddAll(found); result = copy; first = false; } else { result.AddAll(found); } } } return(result); }
/// <summary>Add a list of allowed attributes to a tag.</summary> /// <remarks> /// Add a list of allowed attributes to a tag. (If an attribute is not allowed on an element, it will be removed.) /// <para /> /// E.g.: <code>addAttributes("a", "href", "class")</code> allows <code>href</code> and <code>class</code> attributes /// on <code>a</code> tags. /// <para /> /// To make an attribute valid for <b>all tags</b>, use the pseudo tag <code>:all</code>, e.g. /// <code>addAttributes(":all", "class")</code>. /// </remarks> /// <param name="tag">The tag the attributes are for. The tag will be added to the allowed tag list if necessary. /// </param> /// <param name="keys">List of valid attributes for the tag</param> /// <returns>this (for chaining)</returns> public virtual iText.StyledXmlParser.Jsoup.Safety.Whitelist AddAttributes(String tag, params String[] keys ) { Validate.NotEmpty(tag); Validate.NotNull(keys); Validate.IsTrue(keys.Length > 0, "No attributes supplied."); Whitelist.TagName tagName = Whitelist.TagName.ValueOf(tag); if (!tagNames.Contains(tagName)) { tagNames.Add(tagName); } ICollection <Whitelist.AttributeKey> attributeSet = new HashSet <Whitelist.AttributeKey>(); foreach (String key in keys) { Validate.NotEmpty(key); attributeSet.Add(Whitelist.AttributeKey.ValueOf(key)); } if (attributes.ContainsKey(tagName)) { ICollection <Whitelist.AttributeKey> currentSet = attributes.Get(tagName); currentSet.AddAll(attributeSet); } else { attributes.Put(tagName, attributeSet); } return(this); }
/// <summary>Merges text decoration.</summary> /// <param name="firstValue">the first value</param> /// <param name="secondValue">the second value</param> /// <returns>the merged value</returns> public static String MergeTextDecoration(String firstValue, String secondValue) { if (firstValue == null) { return(secondValue); } else { if (secondValue == null) { return(firstValue); } } ICollection <String> merged = NormalizeTextDecoration(firstValue); merged.AddAll(NormalizeTextDecoration(secondValue)); StringBuilder sb = new StringBuilder(); foreach (String mergedProp in merged) { if (sb.Length != 0) { sb.Append(" "); } sb.Append(mergedProp); } return(sb.Length != 0 ? sb.ToString() : CommonCssConstants.NONE); }
public ICollection<EventBean> GetCollectKeys( EventBean[] eventsPerStream, IDictionary<object, CompositeIndexEntry> parent, ExprEvaluatorContext context, ICollection<object> keys, CompositeIndexQueryResultPostProcessor postProcessor) { EventBean[] eventsToUse; if (_isNwOnTrigger) { eventsToUse = eventsPerStream; } else { Array.Copy(eventsPerStream, 0, _events, 1, eventsPerStream.Length); eventsToUse = _events; } var mk = _hashGetter.Evaluate(eventsToUse, true, context); if (mk is MultiKeyArrayOfKeys<object> mkArray) { keys.AddAll(mkArray.Array); } else { keys.Add(mk); } var innerEntry = parent.Get(mk); if (innerEntry == null) { return null; } var innerIndex = innerEntry.AssertIndex(); return _next.GetCollectKeys(eventsPerStream, innerIndex, context, keys, postProcessor); }
public void Add( ICollection <object> constants, object value) { var map = value.AsObjectDictionary(MagicMarker.SingletonInstance); constants.AddAll(map.Keys); }
public void Add( ICollection <object> constants, object value) { var coll = value.UnwrapEnumerable <object>(); constants.AddAll(coll); }
private static void SortCollection <T>(ICollection <T> list, IComparer <T> comparator) { var elements = list.ToArray(); Array.Sort(elements, 0, elements.Length, comparator); list.Clear(); list.AddAll(elements); }
public static void AddToCollection(ISet <MultiKey <EventBean> > toAdd, ICollection <MultiKey <EventBean> > events) { if (toAdd == null) { return; } events.AddAll(toAdd); }
public static void AddToCollection(EventBean[] toAdd, ICollection <EventBean> events) { if (toAdd == null) { return; } events.AddAll(toAdd); }
/// <summary> /// Replaces all items it an <see cref="ICollection{T}"/> source by the specified items. /// </summary> /// <typeparam name="T">Item type</typeparam> /// <param name="src">Source collection</param> /// <param name="newItems">Items to add</param> public static void ReplaceAll <T>(this ICollection <T> src, IEnumerable <T> newItems) { Contract.Requires(src != null); src.Clear(); src.AddAll(newItems); }
public void Format() { Assert.AreEqual("{ }", coll.ToString()); coll.AddAll(new int[] { -4, 28, 129, 65530 }); Assert.AreEqual("{ 65530, -4, 28, 129 }", coll.ToString()); Assert.AreEqual("{ FFFA, -4, 1C, 81 }", coll.ToString(null, rad16)); Assert.AreEqual("{ 65530, -4, ... }", coll.ToString("L14", null)); Assert.AreEqual("{ FFFA, -4, ... }", coll.ToString("L14", rad16)); }
/** * A contrapositive of a chain is a permutation in which a different literal * is placed at the front. The contrapositives of a chain are logically * equivalent to the original chain. * * @return a list of contrapositives for this chain. */ public ICollection <Chain> getContrapositives() { ICollection <Chain> contrapositives = CollectionFactory.CreateQueue <Chain>(); ICollection <Literal> lits = CollectionFactory.CreateQueue <Literal>(); for (int i = 1; i < literals.Size(); ++i) { lits.Clear(); lits.Add(literals.Get(i)); lits.AddAll(literals.subList(0, i)); lits.AddAll(literals.subList(i + 1, literals.Size())); Chain cont = new Chain(lits); cont.setProofStep(new ProofStepChainContrapositive(cont, this)); contrapositives.Add(cont); } return(contrapositives); }
public void CollectInto(ICollection<object> result) { if (!(Multityped is ICollection<object>)) { result.Add(Multityped); return; } result.AddAll((ICollection<object>) Multityped); }
public void Format() { Assert.AreEqual("{{ }}", coll.ToString()); coll.AddAll(new int[] { -4, 28, 129, 65530, -4, 28 }); Assert.AreEqual("{{ 65530(*1), -4(*2), 28(*2), 129(*1) }}", coll.ToString()); Assert.AreEqual("{{ FFFA(*1), -4(*2), 1C(*2), 81(*1) }}", coll.ToString(null, rad16)); Assert.AreEqual("{{ 65530(*1), -4(*2)... }}", coll.ToString("L18", null)); Assert.AreEqual("{{ FFFA(*1), -4(*2)... }}", coll.ToString("L18", rad16)); }
private void RecursivePopulateDependencies(int navigableStream, ICollection <int> deepDependencies) { var dependencies = GetDependenciesForStream(navigableStream); deepDependencies.AddAll(dependencies); foreach (int dependency in dependencies) { RecursivePopulateDependencies(dependency, deepDependencies); } }
/** * Makes a CSP consisting of binary constraints arc-consistent. * * @return An object which indicates success/failure and contains data to * undo the operation. */ public IInferenceLog <VAR, VAL> apply(CSP <VAR, VAL> csp) { ICollection <VAR> queue = CollectionFactory.CreateFifoQueueNoDuplicates <VAR>(); queue.AddAll(csp.getVariables()); DomainLog <VAR, VAL> log = new DomainLog <VAR, VAL>(); reduceDomains(queue, csp, log); return(log.compactify()); }
public object visitQuantifiedSentence(QuantifiedSentence sentence, object arg) { // Ensure I collect quantified variables too ICollection <Variable> variables = (ICollection <Variable>)arg; variables.AddAll(sentence.getVariables()); sentence.getQuantified().accept(this, arg); return(sentence); }
/// <summary> /// Returns the labels of those vertices which can be obtained by following /// the edges starting at the specified vertex. /// </summary> /// <param name="v"></param> /// <returns></returns> public ICollection <VertexLabelType> GetSuccessors(VertexLabelType v) { ICollection <VertexLabelType> result = CollectionFactory.CreateQueue <VertexLabelType>(); IMap <VertexLabelType, EdgeLabelType> localEdgeLookup = globalEdgeLookup.Get(v); if (localEdgeLookup != null) { result.AddAll(localEdgeLookup.GetKeys()); } return(result); }
public void Add(ICollection<Object> constants, Object value) { IEnumerable<object> mapKeys; if (value.GetType().IsGenericDictionary()) mapKeys = MagicMarker.GetDictionaryFactory(value.GetType()).Invoke(value).Keys; else throw new ArgumentException("invalid value", nameof(value)); constants.AddAll(mapKeys); }
public ICollection <Rule> getAllTerminalRules() { ICollection <Rule> allRules = CollectionFactory.CreateQueue <Rule>(); ICollection <string> keys = this.GetKeys(); foreach (string key in keys) { allRules.AddAll(this.getTerminalRules(key)); } return(allRules); }
private void CheckForProjectFilesNotInProjectAndWarn(ITreeNode element) { string[] filesToFind = Settings.OrphanedFilesPatterns.Split('|'); if (filesToFind.Length == 0) { filesToFind = new[] { "*.cs" } } ; var currentProject = element.GetProject(); ICollection <VirtualFileSystemPath> directoriesToSkip = currentProject.GetOutputDirectories(); directoriesToSkip.AddAll(currentProject.GetIntermidiateDirectories()); var allProjectFileLocations = currentProject.GetAllProjectFiles().Select(p => p.Location).ToList(); var allProjectFiles = allProjectFileLocations.Select(loc => loc.FullPath).ToList(); var allProjectFolders = allProjectFileLocations.Select(loc => loc.Directory).Distinct(); allProjectFolders = allProjectFolders.Where(x => !InDirectory(directoriesToSkip, x)); var filesOnDisk = new List <FileInfo>(); foreach (string regex in filesToFind) { filesOnDisk.AddRange( allProjectFolders.SelectMany( directory => new System.IO.DirectoryInfo(directory.FullPath) .EnumerateFiles(regex, System.IO.SearchOption.TopDirectoryOnly) .Select(f => f)) ); } var orphanedFiles = new List <FileInfo>(); foreach (var fileOnDisk in filesOnDisk) { if (allProjectFiles.Any( x => String.Compare(x, fileOnDisk.FullName, StringComparison.OrdinalIgnoreCase) == 0)) { continue; } orphanedFiles.Add(fileOnDisk); } if (orphanedFiles.Count > 0) { IHighlighting highlighting = new FilesNotPartOfProjectWarning(currentProject, orphanedFiles); AddHighlighting(element.GetDocumentRange(), highlighting); } }
private static int MergeChildNodes(ICollection<XYWHRectangleWValue<TL>> target, object data) { if (data == null) return 0; if (data is XYWHRectangleWValue<TL> dataRect) { target.Add(dataRect); return 1; } var coll = (ICollection<XYWHRectangleWValue<TL>>) data; target.AddAll(coll); return coll.Count; }
public SearchAgent(IProblem <S, A> p, ISearchForActions <S, A> search) { ICollection <A> actions = search.findActions(p); actionList = CollectionFactory.CreateQueue <A>(); if (null != actions) { actionList.AddAll(actions); } // actionIterator = actionList.iterator(); searchMetrics = search.getMetrics(); }
public void Add(ICollection<Object> constants, Object value) { ICollection<object> collection; if (value is ICollection<object>) collection = (ICollection<object>)value; else if (value.GetType().IsGenericCollection()) collection = MagicMarker.GetCollectionFactory(value.GetType()).Invoke(value); else throw new ArgumentException("invalid value", nameof(value)); constants.AddAll(collection); }
public IMap <Variable, Term> standardizeApart(ICollection <Literal> l1Literals, ICollection <Literal> l2Literals, StandardizeApartIndexical standardizeApartIndexical) { ISet <Variable> toRename = CollectionFactory.CreateSet <Variable>(); foreach (Literal pl in l1Literals) { toRename.AddAll(variableCollector.collectAllVariables(pl .getAtomicSentence())); } foreach (Literal nl in l2Literals) { toRename.AddAll(variableCollector.collectAllVariables(nl.getAtomicSentence())); } IMap <Variable, Term> renameSubstitution = CollectionFactory.CreateInsertionOrderedMap <Variable, Term>(); foreach (Variable var in toRename) { Variable v = null; do { v = new Variable(standardizeApartIndexical.getPrefix() + standardizeApartIndexical.getNextIndex()); // Ensure the new variable name is not already // accidentally used in the sentence } while (toRename.Contains(v)); renameSubstitution.Put(var, v); } ICollection <Literal> posLits = CollectionFactory.CreateQueue <Literal>(); ICollection <Literal> negLits = CollectionFactory.CreateQueue <Literal>(); foreach (Literal pl in l1Literals) { posLits.Add(substVisitor.subst(renameSubstitution, pl)); } foreach (Literal nl in l2Literals) { negLits.Add(substVisitor.subst(renameSubstitution, nl)); } l1Literals.Clear(); l1Literals.AddAll(posLits); l2Literals.Clear(); l2Literals.AddAll(negLits); return(renameSubstitution); }