static void Main(string[] args) { var table = new HashDictionary<string, int>(); table.Add("P", 2); table.Add("A", 3); table.Add("B", 4); Console.WriteLine(table.ContainsKey("P")); Console.WriteLine(table.ContainsKey("P2")); foreach (var pair in table) { Console.WriteLine(pair.Key + " " + pair.Value); } Console.WriteLine(); //Console.WriteLine(table.Find("B")); //Console.WriteLine(table["P"]); //table["P"] = 5; //Console.WriteLine(table["P"]); //Console.WriteLine(table["D"]); //Console.WriteLine(); Console.WriteLine("!!!!!!!!!!!!!!!!!"); table.Remove("P"); foreach (var pair in table) { Console.WriteLine(pair.Key + " " + pair.Value); } }
public void TestCompositeAggregator() { IEntryAggregator agg1 = CompositeAggregator.CreateInstance( new IEntryAggregator[] { GroupAggregator.CreateInstance(IdentityExtractor.Instance, new Count()), new LongMax((IdentityExtractor.Instance)) }); ArrayList al = new ArrayList(); al.Add(new TestInvocableCacheEntry("key1", 173)); al.Add(new TestInvocableCacheEntry("key2", 173)); al.Add(new TestInvocableCacheEntry("key3", 185)); al.Add(new TestInvocableCacheEntry("key4", 164)); al.Add(new TestInvocableCacheEntry("key5", 164)); al.Add(new TestInvocableCacheEntry("key6", 164)); object result = agg1.Aggregate(al); if (result is IList) { IDictionary results = (IDictionary)((IList)result)[0]; Assert.AreEqual(results[185], 1); Assert.AreEqual(results[164], 3); Assert.AreEqual(((IList)result)[1], 185); } // aggragation on remote cache INamedCache cache = CacheFactory.GetCache(CacheName); cache.Clear(); HashDictionary hd = new HashDictionary(); hd.Add("Key1", 435); hd.Add("Key2", 253); hd.Add("Key3", 435); hd.Add("Key4", 435); hd.Add(null, -3); cache.InsertAll(hd); IEntryAggregator aggregator = CompositeAggregator.CreateInstance( new IEntryAggregator[] { GroupAggregator.CreateInstance(IdentityExtractor.Instance, new Count()), new LongMax((IdentityExtractor.Instance)) }); result = cache.Aggregate(cache.Keys, aggregator); if (result is IList) { IDictionary results = (IDictionary)((IList)result)[0]; Assert.AreEqual(results[435], 3); Assert.AreEqual(results[-3], 1); Assert.AreEqual(((IList)result)[1], 435); } CacheFactory.Shutdown(); }
public static void SetValue(string name, ConsoleVarable value) { lock (_varables) if (_varables.Contains(name)) { _varables[name] = value; } else { _varables.Add(name, value); } }
private static void addNodeToMap(Node node) { //implement Dijkstra alghoritm: IntervalHeap <NodeEntry> sortedNodes = new IntervalHeap <NodeEntry>(); HashDictionary <Node, NodeEntry> hashedNodes = new HashDictionary <Node, NodeEntry>(); NodeEntry thisNode = new NodeEntry(); thisNode.Node = node; bool firstAdded = sortedNodes.Add(ref thisNode.Handle, thisNode); Debug.Assert(firstAdded); hashedNodes.Add(node, thisNode); while (sortedNodes.Count != 0) { NodeEntry currentNode = sortedNodes.DeleteMin(); foreach (Link link in currentNode.Node.NetworkInterfaces.Interfaces.Keys) { //get the node from the second side of link Node secondNode = (link.LinkSides[0].ConnectedNode == currentNode.Node) ? link.LinkSides[1].ConnectedNode : link.LinkSides[0].ConnectedNode; double distance = link.Metric + currentNode.Distance; if (hashedNodes.Contains(secondNode)) { NodeEntry entry = hashedNodes[secondNode]; if (entry.Distance > distance) { entry.Distance = distance; sortedNodes.Replace(entry.Handle, entry); } } else { NodeEntry newEntry = new NodeEntry(); newEntry.Node = secondNode; newEntry.Distance = distance; hashedNodes.Add(secondNode, newEntry); bool added = sortedNodes.Add(ref newEntry.Handle, newEntry); Debug.Assert(added); } } } //hashedNodes.Remove(node); HashDictionary <Node, double> finalDistances = new HashDictionary <Node, double>(); foreach (NodeEntry entry in hashedNodes.Values) { finalDistances.Add(entry.Node, entry.Distance); } distances.Add(node, finalDistances); }
/// <summary> /// Synchronously invoke the specified task on each of the specified /// members. /// </summary> /// <remarks> /// <p> /// This method will not return until the specified members have /// completed their processing, failed in their processing, or died /// trying.</p> /// <p> /// <b>IMember</b>s that are specified but are not currently running /// the <b>IInvocationService</b> will not invoke the specified /// <see cref="IInvocable"/> object.</p> /// <p> /// <b>IMember</b>s that leave (gracefully or otherwise) before the /// invocation completes will not register a result, and the amount /// of processing that completed is indeterminate. <b>IMember</b>s /// that encounter an exception during invocation will not be /// retried; whatever result has been registered by that point by /// that member for that <b>IInvocable</b> object will be returned. /// Specifically, the result for a given member will be null under /// the following conditions:</p> /// <list type="bullet"> /// <item>if the member did not exist</item> /// <item>if the member was not running the service at the time that /// the query method was invoked</item> /// <item>if the member left (via the shutdown or stop methods, or /// unexpectedly) before responding</item> /// <item>if the member encountered an exception while processing /// and had not registered a non-null result</item> /// <item>if the member completed successfully but registered no /// result</item> /// <item>if the member completed successfully but explicitly /// registered a result of null</item> /// </list> /// </remarks> /// <param name="task"> /// The <b>IInvocable</b> object to distribute to the specified /// members in order to be invoked on those members. /// </param> /// <param name="col"> /// Optional collection of cluster members to which the /// <b>IInvocable</b> object will be distributed; if <c>null</c>, the /// <b>IInvocable</b> object will be distributed to all cluster /// members that are running this service. /// </param> /// <returns> /// An <b>IDictionary</b> of result objects keyed by <see cref="IMember"/> /// object. /// </returns> public virtual IDictionary Query(IInvocable task, ICollection col) { if (task == null) { throw new ArgumentNullException("task cannot be null."); } if (col != null) { throw new ArgumentException("directed query not supported; " + "the specified IMember collection must be null."); } IChannel channel = EnsureChannel(); IMessageFactory factory = channel.MessageFactory; InvocationRequest request = (InvocationRequest)factory.CreateMessage(InvocationRequest.TYPE_ID); request.Task = task; object result = channel.Request(request); IMember member = OperationalContext.LocalMember; IDictionary resultDictionary = new HashDictionary(); resultDictionary.Add(member, result); return(resultDictionary); }
public void Add(IPatch patch) { PatchContainer pc = new PatchContainer(patch); mapping.Add(patch, pc); rootSet.Add(pc); }
public void Configure() { if (nodes == null) { nodes = new HashDictionary <Node, NodeEntry>(); foreach (Identificable identificable in Network.Identificables) { if (identificable is Node) { Node newNode = (Node)identificable; nodes.Add(newNode, new NodeEntry(newNode)); } } } foreach (Identificable identificable in Network.Identificables) { if (identificable is Link) { Link link = (Link)identificable; links.Add(link, new LinkEntry(link)); } } foreach (NetworkInterface netInt in node.NetworkInterfaces.Interfaces.Values) { netInt.LinkSide.OnBroken += onLinkBreakChange; netInt.LinkSide.OnRepaired += onLinkBreakChange; } Timer.Schedule(Timer.CurrentTime + Configuration.Protocols.Dijkstra.StateSendingPeriod, onSendState, null); }
public static void DikstrasSearch(Node[] nodes, Node source) { var table = new HashDictionary <Node, Entry>(); foreach (var node in nodes) { table.Add(node, new Entry(false, float.MaxValue, null)); } var sourceEntry = table[source]; sourceEntry.cost = 0; table[source] = sourceEntry; var priorityQueue = new IntervalHeap <NodeAndCost>( new DelegateComparer <NodeAndCost>( (nodeAndCost1, nodeAndCost2) => nodeAndCost1.cost.CompareTo(nodeAndCost2.cost))) { new NodeAndCost(source, 0) }; while (!priorityQueue.IsEmpty) { var nodeAndCost = priorityQueue.DeleteMin(); var currentNode = nodeAndCost.node; if (table[currentNode].known) { continue; } var currentNodeEntry = table[currentNode]; currentNodeEntry.known = true; table[currentNode] = currentNodeEntry; foreach (var edge in currentNode.outEdges) { var toNode = edge.ToNode; var toNodeCost = table[currentNode].cost + edge.Cost; if (!(table[toNode].cost > toNodeCost)) { continue; } var toNodeEntry = table[toNode]; toNodeEntry.cost = toNodeCost; toNodeEntry.predecessor = currentNode; table[toNode] = toNodeEntry; priorityQueue.Add(new NodeAndCost(toNode, toNodeCost)); } } foreach (var node in nodes) { _NEXT_NODE_AND_COST_TABLE[new NodePair(source, node)] = ExtractNextNodeFromTable(table, source, node); } }
//HELPERS protected override NetworkInterface getRoute(Node destination) { IntervalHeap <NodeEntry> sortedNodes = new IntervalHeap <NodeEntry>(); HashDictionary <Node, NodeEntry> hashedNodes = new HashDictionary <Node, NodeEntry>(); NodeEntry thisNode = new NodeEntry(); thisNode.Node = node; thisNode.Time = Timer.CurrentTime; bool added = sortedNodes.Add(ref thisNode.Handle, thisNode); NodeEntry temp; bool found = sortedNodes.Find(thisNode.Handle, out temp); Debug.Assert(found); Debug.Assert(added); hashedNodes.Add(node, thisNode); double currentTime = -1; while (sortedNodes.Count > 0) { NodeEntry current = sortedNodes.DeleteMin(); Debug.Assert(current.Time >= currentTime); currentTime = current.Time; if (current.Node == destination) { return(extractInterface(current, hashedNodes)); } nextMove(current, sortedNodes, hashedNodes); Debug.Assert(sortedNodes.Check()); } //route not found return(null); }
public BipartiteMatching(SCG.IEnumerable <Rec <TLeftLabel, TRightLabel> > graph) { HashDictionary <TRightLabel, RightNode> rdict = new HashDictionary <TRightLabel, RightNode>(); HashDictionary <TLeftLabel, HashSet <RightNode> > edges = new HashDictionary <TLeftLabel, HashSet <RightNode> >(); HashSet <RightNode> newrnodes = new HashSet <RightNode>(); foreach (Rec <TLeftLabel, TRightLabel> edge in graph) { var x2 = edge.X2; if (!rdict.Find(ref x2, out RightNode rnode)) { rdict.Add(edge.X2, rnode = new RightNode(edge.X2)); } HashSet <RightNode> ledges = newrnodes; if (!edges.FindOrAdd(edge.X1, ref ledges)) { newrnodes = new HashSet <RightNode>(); } ledges.Add(rnode); } rightNodes = rdict.Values.ToArray(); leftNodes = new LeftNode[edges.Count]; int li = 0; foreach (KeyValuePair <TLeftLabel, HashSet <RightNode> > les in edges) { leftNodes[li++] = new LeftNode(les.Key, les.Value.ToArray()); } Compute(); }
public bool Add(T item) { if (!_dict.Contains(item)) { _dict.Add(item, new HashSet <T>(EqualityComparer <T> .Default)); } return(_dict[item].Add(item)); }
public bool Add(T item) { if (!dict.Contains(item)) { dict.Add(item, new HashSet <T>(ReferenceEqualityComparer <T> .Default)); } return(dict[item].Add(item)); }
// Construct the transition relation of a composite-state DFA from // an NFA with start state s0 and transition relation trans (a // dictionary mapping int to arraylist of Transition). The start // state of the constructed DFA is the epsilon closure of s0, and // its transition relation is a dictionary mapping a composite state // (a set of ints) to a dictionary mapping a label (a string) to a // composite state (a set of ints). private static IDictionary <HashSet <int>, IDictionary <string, HashSet <int> > > CompositeDfaTrans(int s0, IDictionary <int, ArrayList <Transition> > trans) { var S0 = EpsilonClose(new HashSet <int> { s0 }, trans); var worklist = new CircularQueue <HashSet <int> >(); worklist.Enqueue(S0); // The transition relation of the DFA var res = new HashDictionary <HashSet <int>, IDictionary <string, HashSet <int> > >(); while (!worklist.IsEmpty) { HashSet <int> S = worklist.Dequeue(); if (!res.Contains(S)) { // The S -lab-> T transition relation being constructed for a given S IDictionary <string, HashSet <int> > STrans = new HashDictionary <string, HashSet <int> >(); // For all s in S, consider all transitions s -lab-> t foreach (int s in S) { // For all non-epsilon transitions s -lab-> t, add t to T foreach (Transition tr in trans[s]) { if (tr.Lab != null) { // Non-epsilon transition HashSet <int> toState; if (STrans.Contains(tr.Lab)) // Already a transition on lab { toState = STrans[tr.Lab]; } else // No transitions on lab yet { toState = new HashSet <int>(); STrans.Add(tr.Lab, toState); } toState.Add(tr.Target); } } } // Epsilon-close all T such that S -lab-> T, and put on worklist var STransClosed = new HashDictionary <string, HashSet <int> >(); foreach (var entry in STrans) { var Tclose = EpsilonClose(entry.Value, trans); STransClosed.Add(entry.Key, Tclose); worklist.Enqueue(Tclose); } res.Add(S, STransClosed); } } return(res); }
public void CountTest() { IDictionary <int, int> dictionary = new HashDictionary <int, int>(); Assert.AreEqual(0, dictionary.Count); dictionary.Add(1, 10); Assert.AreEqual(1, dictionary.Count); dictionary.Add(2, 11); Assert.AreEqual(2, dictionary.Count); dictionary[3] = 20; Assert.AreEqual(3, dictionary.Count); dictionary[3] = 40; Assert.AreEqual(3, dictionary.Count); }
public void AddKeyTwiceTest() { IDictionary <int, int> dictionary = new HashDictionary <int, int>() { { 1, 10 }, { 2, 20 } }; dictionary.Add(2, 30); }
static void Main(string[] args) { HashDictionary <int, string> dict = new HashDictionary <int, string>(); dict[3] = "WILLI"; dict[4] = "hugo"; dict.Add(1, "franz"); Console.WriteLine("[1] = {0}", dict[1]); }
public void Add(T item) { Q key = toKey(item); if (!dict.Contains(key)) { dict.Add(key, new HashSet <T>(ReferenceEqualityComparer <T> .Default)); } dict[key].Add(item); }
// Compute a renamer, which is a dictionary mapping set of int to int static IDictionary <Set <int>, int> MkRenamer(ICollectionValue <Set <int> > states) { IDictionary <Set <int>, int> renamer = new HashDictionary <Set <int>, int>(); int count = 0; foreach (Set <int> k in states) { renamer.Add(k, count++); } return(renamer); }
public void Add(T item) { var key = _toKey(item); if (!_dictionary.Contains(key)) { _dictionary.Add(key, new HashSet <T>(EqualityComparer <T> .Default)); } _dictionary[key].Add(item); }
// Compute a renamer, which is a dictionary mapping set of int to int private static IDictionary <HashSet <int>, int> MkRenamer(ICollectionValue <HashSet <int> > states) { var renamer = new HashDictionary <HashSet <int>, int>(); var count = 0; foreach (var k in states) { renamer.Add(k, count++); } return(renamer); }
/// <summary> /// Aggregate the results of the parallel aggregations. /// </summary> /// <param name="results"> /// Results to aggregate. /// </param> /// <returns> /// The aggregation of the parallel aggregation results. /// </returns> public virtual object AggregateResults(ICollection results) { IParallelAwareAggregator aggregator = (IParallelAwareAggregator)m_aggregator; IDictionary dictionaryResult = new HashDictionary(); foreach (IDictionary dictPart in results) { // partial aggregation results are maps with distinct values // as keys and partial aggregation results as values foreach (DictionaryEntry entry in dictPart) { object distinct = entry.Key; object result = entry.Value; // collect all the aggregation results per group ICollection group = (ICollection)dictionaryResult[distinct]; if (group == null) { dictionaryResult.Add(distinct, group = new ArrayList()); } CollectionUtils.Add(group, result); } } IDictionary newResult = new HashDictionary(dictionaryResult); if (dictionaryResult.Count == 0) { // we need to call "AggregateResults" on the underlying // aggregator to fulfill our contract, even though any result // will be discarded aggregator.AggregateResults(NullImplementation.GetCollection()); } else { IFilter filter = m_filter; foreach (DictionaryEntry entry in dictionaryResult) { ICollection group = (ICollection)entry.Value; object result = aggregator.AggregateResults(group); if (filter == null || filter.Evaluate(result)) { newResult[entry.Key] = result; } else { newResult.Remove(entry.Key); } } } return(newResult); }
public void HashDictionaryTest() { HashDictionary <Types.Transaction> dict = new HashDictionary <Types.Transaction>(); byte[] key1 = Merkle.transactionHasher.Invoke(Util.GetNewTransaction(1)); byte[] key2 = Merkle.transactionHasher.Invoke(Util.GetNewTransaction(1)); dict.Add(key1, Util.GetNewTransaction(1)); Assert.IsTrue(dict.ContainsKey(key1)); Assert.IsTrue(dict.ContainsKey(key2)); }
public LanguageStrings(string fileName) { Language = new C5.HashDictionary <string, string>(); string type, text, line; int pos; foreach (string l in File.ReadAllLines(fileName)) { line = l.Trim(); if (line.StartsWith("#")) { continue; } if (!line.Contains(" ") && !line.Contains("\t")) { continue; } // We need to stop at the first space or \t. for (pos = 0; pos < line.Length; pos++) { if (line[pos] == ' ' || line[pos] == '\t') { break; } } type = line.Substring(0, pos); if (Language.Contains(type)) { continue; } text = line.Substring(pos); text = text.Trim(); text = text.Replace("\\n", "\n"); text = text.Replace("\\t", "\t"); Language.Add(type, text); } if (Language.Contains("Error_No_Local")) { _errorNoLocal = Language["Error_No_Local"]; } if (Language.Contains("Language_Name")) { _languageName = Language["Language_Name"]; } if (Language.Contains("Language_DisplayName")) { _languageDisplayName = Language["Language_DisplayName"]; } }
public static void SetFunc(string name, ConsoleFunction func) { lock (_varables) if (_varables.Contains(name)) { _functions[name] = func; } else { _functions.Add(name, func); } }
static void Main(string[] args) { // to use hashmap you have to reference it first in the project // right click references -> add refernces -> projects -> HashDictionary // alternatively use strg + . and select add reference var dict = new HashDictionary <int, string>(); dict[3] = "Willi"; dict[5] = "Andi"; dict.Add(1, "Franz"); // V1 //Console.WriteLine($"[1] = {dict[1]}"); //Console.WriteLine($"[3] = {dict[3]}"); //Console.WriteLine($"[5] = {dict[5]}"); //if (dict.ContainsKey(7)) // Console.WriteLine($"[7] = {dict[7]}"); //else // Console.WriteLine("[7] does not exist"); // V2 string result; if (dict.TryGetValue(7, out result)) { Console.WriteLine($"[7] = {result}"); } else { Console.WriteLine("[7] does not exist"); } // V3 if (dict.TryGetValue(7, out string result2)) { Console.WriteLine($"[7] = {result2}"); } else { Console.WriteLine("[7] does not exist"); } // iterate foreach (KeyValuePair <int, string> pair in dict) { Console.WriteLine(pair); } Console.WriteLine("Press any key to exit...."); Console.ReadLine(); }
void AddTx(byte[] txHash, Consensus.Types.Transaction tx, TxStateEnum txState) { Add(txHash, new GraphNode(GraphNodeTypeEnum.Tx, txState.ToString(), txHash)); uint i = 0; foreach (var output in tx.outputs) { var outputHash = Consensus.Merkle.outputHasher.Invoke(output); string text = [email protected] ? "C" : "P"; text += " " + Convert.ToBase64String(output.spend.asset); text += " " + output.spend.amount; if (output.spend.asset.SequenceEqual(Consensus.Tests.zhash)) { text += " Kalapas"; } if (output.@lock is Consensus.Types.OutputLock.ContractLock) { var data = ((Consensus.Types.OutputLock.ContractLock)output.@lock).data; if (data != null) { text += " " + Convert.ToBase64String(data); } } var outputGraphNode = new GraphNode(GraphNodeTypeEnum.Output, text); Add(outputHash, outputGraphNode, txHash); var outpointHash = Consensus.Merkle.outpointHasher.Invoke(new Consensus.Types.Outpoint(txHash, i)); _Nodes[outpointHash] = outputHash; i++; } foreach (var outpoint in tx.inputs) { var outpointHash = Consensus.Merkle.outpointHasher.Invoke(outpoint); Add(outpointHash, new GraphNode(GraphNodeTypeEnum.Outpoint), txHash); } if (txState == TxStateEnum.Unconfirmed) { Link(txHash, memPool); } _Txs.Add(txHash, tx); }
void Add(byte[] key, GraphNode graphNode, byte[] parent = null) { if (!_Keys.ContainsKey(key)) { _Indexes.Add(key, _Indexes.Count); _Keys.Add(key, graphNode); _Graph.GraphNodes.Add(graphNode); } if (parent != null) { Link(parent, key); } }
/// <summary> /// Parse and configure serializer information. /// </summary> private void ParseSerializerConfig() { IDictionary serializerMap = new HashDictionary(); var config = Config.FindElement("cluster-config/serializers"); if (config != null) { for (var serializers = config.GetElements("serializer"); serializers.MoveNext();) { var xmlSerializer = (IXmlElement)serializers.Current; var name = xmlSerializer.GetAttribute("id").GetString(); ConfigurableSerializerFactory factory = new ConfigurableSerializerFactory(); factory.Config = xmlSerializer; serializerMap.Add(name, factory); } } // check that the well-known pof serializer is present String serializerName = "pof"; if (!serializerMap.Contains(serializerName)) { IXmlElement pofSerializer = new SimpleElement("serializer"); IXmlElement xmlInstance = pofSerializer.EnsureElement("instance"); xmlInstance.EnsureElement("class-name").SetString("Tangosol.IO.Pof.ConfigurablePofContext, Coherence"); ConfigurableSerializerFactory factory = new ConfigurableSerializerFactory(); factory.Config = pofSerializer; serializerMap.Add(serializerName, factory); } SerializerMap = serializerMap; }
public static void Main() { // var p1 = new KeyValuePair<int, int>(1, 3); // var p2 = new KeyValuePair<int, int>(1, 2); // Console.WriteLine(p1.Equals(p2)); // int[] values = { 1, 2, 4, 5 }; // var cachedValues = values; // values = new int[]{ 5, 6, 7, 8 }; // Console.WriteLine(string.Join(", ", cachedValues)); var table = new HashDictionary<string, int>(); table.Add("Pesho", 5); table.Add("Gosho", 5); //table.Add("Pesho", 5); //table.Add("Pesho", 5); Console.WriteLine(table.ContainsKey("Pesho")); Console.WriteLine(table.ContainsKey("Pesho2")); //foreach (var pair in table) //{ // Console.WriteLine(pair.Key + " -> " + pair.Value); //} }
public void ContainsTest() { IDictionary <int, int> dictionary = new HashDictionary <int, int>(); for (int i = 1; i < 20; i++) { dictionary.Add(i, 10 * i); } for (int i = 1; i < 20; i++) { Assert.IsTrue(dictionary.ContainsKey(i)); } Assert.IsFalse(dictionary.ContainsKey(0)); Assert.IsFalse(dictionary.ContainsKey(21)); }
// Using a renamer (a dictionary mapping set of int to int), replace // composite (set of int) states with simple (int) states in the // transition relation trans, which is a dictionary mapping set of // int to a dictionary mapping from string to set of int. The // result is a dictionary mapping from int to a dictionary mapping // from string to int. private static IDictionary <int, IDictionary <string, int> > Rename(IDictionary <HashSet <int>, int> renamer, IDictionary <HashSet <int>, IDictionary <string, HashSet <int> > > trans) { var newtrans = new HashDictionary <int, IDictionary <string, int> >(); foreach (var entry in trans) { var k = entry.Key; var newktrans = new HashDictionary <string, int>(); foreach (var tr in entry.Value) { newktrans.Add(tr.Key, renamer[tr.Value]); } newtrans.Add(renamer[k], newktrans); } return(newtrans); }
/// <summary> /// Process a collection of <see cref="IInvocableCacheEntry"/> /// objects using the underlying extractor to split the entries /// into non-intersecting (distinct) groups and then apply the /// underlying aggregator separately to each group. /// </summary> /// <param name="entries"> /// A collection of read-only <b>IInvocableCacheEntry</b> /// objects to aggregate. /// </param> /// <returns> /// A dictionary that has the unique tuples as keys and results of /// the corresponding subset aggregation as values. /// </returns> public virtual object Aggregate(ICollection entries) { IValueExtractor extractor = m_extractor; IEntryAggregator aggregator = m_aggregator; IFilter filter = m_filter; // create non-intersecting groups of entry sets IDictionary result = new HashDictionary(); foreach (IInvocableCacheEntry entry in entries) { if (entry.IsPresent) { // extract a distinct value (or a tuple) object distinct = entry.Extract(extractor); // add the entry to the corresponding group ICollection group = (ICollection)result[distinct]; if (group == null) { result.Add(distinct, group = new ArrayList()); } CollectionUtils.Add(group, entry); } } // run the aggregation IDictionary newResult = new HashDictionary(result); foreach (DictionaryEntry entry in result) { ICollection group = (ICollection)entry.Value; object res = aggregator.Aggregate(group); if (filter == null || filter.Evaluate(res)) { newResult[entry.Key] = res; } else { newResult.Remove(entry.Key); } } return(newResult); }