public void TestTransform() { ExtractorEventTransformer transformer = new ExtractorEventTransformer(null, IdentityExtractor.Instance); LocalCache cache = new LocalCache(); CacheEventArgs evt = new CacheEventArgs(cache, CacheEventType.Inserted, "inserted", "old value", "new value", false); CacheEventArgs evtNew = transformer.Transform(evt); Assert.IsNotNull(evtNew); Assert.AreEqual(evt.Cache, evtNew.Cache); Assert.AreEqual(evt.EventType, evtNew.EventType); Assert.AreEqual(evt.Key, evtNew.Key); Assert.AreNotEqual(evt.OldValue, evtNew.OldValue); Assert.IsNotNull(evt.OldValue); Assert.IsNull(evtNew.OldValue); Assert.AreEqual(evt.NewValue, evtNew.NewValue); Assert.AreEqual(evt.IsSynthetic, evtNew.IsSynthetic); evt = ConverterCollections.GetCacheEventArgs(cache, evt, NullImplementation.GetConverter(), NullImplementation.GetConverter()); Assert.IsNotNull(evt); Assert.IsInstanceOf(typeof(ConverterCollections.ConverterCacheEventArgs), evt); ConverterCollections.ConverterCacheEventArgs convEvt = evt as ConverterCollections.ConverterCacheEventArgs; Assert.IsNotNull(convEvt); evtNew = transformer.Transform(convEvt); Assert.IsNotNull(evtNew); }
/// <summary> /// Aggregate the results of the parallel aggregations. /// </summary> /// <param name="results"> /// Results to aggregate. /// </param> /// <returns> /// The aggregation of the parallel aggregation results. /// </returns> public virtual object AggregateResults(ICollection results) { IParallelAwareAggregator aggregator = (IParallelAwareAggregator)m_aggregator; IDictionary dictionaryResult = new HashDictionary(); foreach (IDictionary dictPart in results) { // partial aggregation results are maps with distinct values // as keys and partial aggregation results as values foreach (DictionaryEntry entry in dictPart) { object distinct = entry.Key; object result = entry.Value; // collect all the aggregation results per group ICollection group = (ICollection)dictionaryResult[distinct]; if (group == null) { dictionaryResult.Add(distinct, group = new ArrayList()); } CollectionUtils.Add(group, result); } } IDictionary newResult = new HashDictionary(dictionaryResult); if (dictionaryResult.Count == 0) { // we need to call "AggregateResults" on the underlying // aggregator to fulfill our contract, even though any result // will be discarded aggregator.AggregateResults(NullImplementation.GetCollection()); } else { IFilter filter = m_filter; foreach (DictionaryEntry entry in dictionaryResult) { ICollection group = (ICollection)entry.Value; object result = aggregator.AggregateResults(group); if (filter == null || filter.Evaluate(result)) { newResult[entry.Key] = result; } else { newResult.Remove(entry.Key); } } } return(newResult); }
/// <summary> /// Process a collection of <see cref="IInvocableCacheEntry"/> /// objects. /// </summary> /// <param name="entries"> /// A read-only collection of <b>IInvocableCacheEntry</b> /// objects to process. /// </param> /// <returns> /// An empty, immutable dictionary. /// </returns> public override IDictionary ProcessAll(ICollection entries) { IDictionary dictionary = m_dictionary; IFilter filter = m_filter; foreach (IInvocableCacheEntry entry in entries) { object key = entry.Key; if (dictionary.Contains(key) && InvocableCacheHelper.EvaluateEntry(filter, entry)) { entry.SetValue(dictionary[key], false); } } return(NullImplementation.GetDictionary()); }
/// <summary> /// Obtain the result of the aggregation. /// </summary> /// <remarks> /// If the <paramref name="isFinal"/> parameter is <b>true</b>, the /// returned object must be the final result of the aggregation; /// otherwise, the returned object will be treated as a partial /// result that should be incorporated into the final result. /// </remarks> /// <param name="isFinal"> /// <b>true</b> to indicate that the final result of the aggregation /// process should be returned; this will only be <b>false</b> if a /// parallel approach is being used. /// </param> /// <returns> /// The result of the aggregation process. /// </returns> protected override object FinalizeResult(bool isFinal) { ICollection coll = m_coll; m_coll = null; // COHNET-181 if (isFinal) { // return the final aggregated result return(coll == null?NullImplementation.GetCollection() : coll); } else { // return partial aggregation data return(coll); } }
/// <summary> /// Obtain the result of the aggregation. /// </summary> /// <remarks> /// If the <paramref name="isFinal"/> parameter is <b>true</b>, the /// returned object must be the final result of the aggregation; /// otherwise, the returned object will be treated as a partial /// result that should be incorporated into the final result. /// </remarks> /// <param name="isFinal"> /// <b>true</b> to indicate that the final result of the aggregation /// process should be returned; this will only be <b>false</b> if a /// parallel approach is being used. /// </param> /// <returns> /// The result of the aggregation process. /// </returns> protected override object FinalizeResult(bool isFinal) { IDictionary map = m_map; m_map = null; if (isFinal) { // return the final aggregated result return(map == null?NullImplementation.GetDictionary() : map); } else { // return partial aggregation data return(map); } }
/// <summary> /// Invoke the passed <see cref="IEntryProcessor"/> against the /// entries specified by the passed cache and entries. /// </summary> /// <remarks> /// The invocation is made thread safe by locking the corresponding /// keys on the cache. If an attempt to lock all the entries at once /// fails, they will be processed individually one-by-one. /// </remarks> /// <param name="cache"> /// The <see cref="IConcurrentCache"/> that the /// <b>IEntryProcessor</b> works against. /// </param> /// <param name="entries"> /// A collection of <see cref="IInvocableCacheEntry"/> objects to /// process. /// </param> /// <param name="agent"> /// The <b>IEntryProcessor</b> to use to process the specified keys. /// </param> /// <returns> /// An <b>IDictionary</b> containing the results of invoking the /// <b>IEntryProcessor</b> against each of the specified entry. /// </returns> public static IDictionary InvokeAllLocked(IConcurrentCache cache, ICollection entries, IEntryProcessor agent) { ICollection keys = ConverterCollections.GetCollection(entries, ENTRY_TO_KEY_CONVERTER, NullImplementation.GetConverter()); // try to lock them all at once var listLocked = LockAll(cache, keys, 0); if (listLocked == null) { // the attempt failed; do it one-by-one var result = new HashDictionary(entries.Count); foreach (IInvocableCacheEntry entry in entries) { result[entry.Key] = InvokeLocked(cache, entry, agent); } return(result); } try { return(agent.ProcessAll(entries)); } finally { UnlockAll(cache, listLocked); } }
/// <summary> /// Process a collection of <see cref="IInvocableCacheEntry"/> /// objects. /// </summary> /// <param name="entries"> /// A read-only collection of <b>IInvocableCacheEntry</b> /// objects to process. /// </param> /// <returns> /// An empty, immutable dictionary. /// </returns> public override IDictionary ProcessAll(ICollection entries) { base.ProcessAll(entries); return(NullImplementation.GetDictionary()); }
protected void DoTestTopN(string sCache) { INamedCache cache = CacheFactory.GetCache(sCache); cache.Clear(); TopNAggregator agent = new TopNAggregator( IdentityExtractor.Instance, SafeComparer.Instance, 10); object[] aoEmpty = new Object[0]; object[] aoResult; aoResult = (object[])cache.Aggregate( NullImplementation.GetCollection(), agent); AssertArrayEquals(aoEmpty, aoResult, "null collection"); aoResult = (object[])cache.Aggregate( new ArrayList(new object[] { "1" }), agent); AssertArrayEquals(aoEmpty, aoResult, "singleton collection"); aoResult = (object[])cache.Aggregate((IFilter)null, agent); AssertArrayEquals(aoEmpty, aoResult, "null filter"); aoResult = (object[])cache.Aggregate(AlwaysFilter.Instance, agent); AssertArrayEquals(aoEmpty, aoResult, "AlwaysFilter"); Hashtable ht = new Hashtable(); int cKeys = 10000; for (int i = 1; i <= cKeys; i++) { ht.Add(i.ToString(), i); } cache.InsertAll(ht); object[] aoTop10 = new object[10]; for (int i = 0; i < 10; i++) { aoTop10[i] = cKeys - i; } aoResult = (object[])cache.Aggregate( NullImplementation.GetCollection(), agent); AssertArrayEquals(aoEmpty, aoResult); aoResult = (object[])cache.Aggregate( new ArrayList(new object[] { "1" }), agent); AssertArrayEquals(new object[] { 1 }, aoResult); aoResult = (object[])cache.Aggregate( new ArrayList(new object[] { "1" }), agent); AssertArrayEquals(new object[] { 1 }, aoResult); aoResult = (object[])cache.Aggregate((IFilter)null, agent); AssertArrayEquals(aoTop10, aoResult); aoResult = (object[])cache.Aggregate(AlwaysFilter.Instance, agent); AssertArrayEquals(aoTop10, aoResult); // test duplicate values cache.Clear(); cKeys = 100; ht = new Hashtable(cKeys); for (int i = 1; i <= cKeys; ++i) { ht.Add(i.ToString(), i / 2); } cache.InsertAll(ht); aoTop10 = new object[10]; for (int i = 0; i < 10; ++i) { aoTop10[i] = (cKeys - i) / 2; } aoResult = (object[])cache.Aggregate((IFilter)null, agent); AssertArrayEquals(aoTop10, aoResult); CacheFactory.Shutdown(); }