Beispiel #1
0
            /// <summary>
            /// Aggregate the results of the parallel aggregations.
            /// </summary>
            /// <param name="results">
            /// Results to aggregate.
            /// </param>
            /// <returns>
            /// The aggregation of the parallel aggregation results.
            /// </returns>
            public virtual object AggregateResults(ICollection results)
            {
                IParallelAwareAggregator aggregator = (IParallelAwareAggregator)m_aggregator;

                IDictionary dictionaryResult = new HashDictionary();

                foreach (IDictionary dictPart in results)
                {
                    // partial aggregation results are maps with distinct values
                    // as keys and partial aggregation results as values
                    foreach (DictionaryEntry entry in dictPart)
                    {
                        object distinct = entry.Key;
                        object result   = entry.Value;

                        // collect all the aggregation results per group
                        ICollection group = (ICollection)dictionaryResult[distinct];
                        if (group == null)
                        {
                            dictionaryResult.Add(distinct, group = new ArrayList());
                        }
                        CollectionUtils.Add(group, result);
                    }
                }

                IDictionary newResult = new HashDictionary(dictionaryResult);

                if (dictionaryResult.Count == 0)
                {
                    // we need to call "AggregateResults" on the underlying
                    // aggregator to fulfill our contract, even though any result
                    // will be discarded
                    aggregator.AggregateResults(NullImplementation.GetCollection());
                }
                else
                {
                    IFilter filter = m_filter;
                    foreach (DictionaryEntry entry in dictionaryResult)
                    {
                        ICollection group  = (ICollection)entry.Value;
                        object      result = aggregator.AggregateResults(group);
                        if (filter == null || filter.Evaluate(result))
                        {
                            newResult[entry.Key] = result;
                        }
                        else
                        {
                            newResult.Remove(entry.Key);
                        }
                    }
                }
                return(newResult);
            }
        /// <summary>
        /// Obtain the result of the aggregation.
        /// </summary>
        /// <remarks>
        /// If the <paramref name="isFinal"/> parameter is <b>true</b>, the
        /// returned object must be the final result of the aggregation;
        /// otherwise, the returned object will be treated as a partial
        /// result that should be incorporated into the final result.
        /// </remarks>
        /// <param name="isFinal">
        /// <b>true</b> to indicate that the final result of the aggregation
        /// process should be returned; this will only be <b>false</b> if a
        /// parallel approach is being used.
        /// </param>
        /// <returns>
        /// The result of the aggregation process.
        /// </returns>
        protected override object FinalizeResult(bool isFinal)
        {
            ICollection coll = m_coll;

            m_coll = null; // COHNET-181

            if (isFinal)
            {
                // return the final aggregated result
                return(coll == null?NullImplementation.GetCollection() : coll);
            }
            else
            {
                // return partial aggregation data
                return(coll);
            }
        }
        protected void DoTestTopN(string sCache)
        {
            INamedCache cache = CacheFactory.GetCache(sCache);

            cache.Clear();

            TopNAggregator agent = new TopNAggregator(
                IdentityExtractor.Instance, SafeComparer.Instance, 10);

            object[] aoEmpty = new Object[0];
            object[] aoResult;

            aoResult = (object[])cache.Aggregate(
                NullImplementation.GetCollection(), agent);
            AssertArrayEquals(aoEmpty, aoResult, "null collection");

            aoResult = (object[])cache.Aggregate(
                new ArrayList(new object[] { "1" }), agent);
            AssertArrayEquals(aoEmpty, aoResult, "singleton collection");

            aoResult = (object[])cache.Aggregate((IFilter)null, agent);
            AssertArrayEquals(aoEmpty, aoResult, "null filter");

            aoResult = (object[])cache.Aggregate(AlwaysFilter.Instance, agent);
            AssertArrayEquals(aoEmpty, aoResult, "AlwaysFilter");

            Hashtable ht    = new Hashtable();
            int       cKeys = 10000;

            for (int i = 1; i <= cKeys; i++)
            {
                ht.Add(i.ToString(), i);
            }
            cache.InsertAll(ht);

            object[] aoTop10 = new object[10];
            for (int i = 0; i < 10; i++)
            {
                aoTop10[i] = cKeys - i;
            }

            aoResult = (object[])cache.Aggregate(
                NullImplementation.GetCollection(), agent);
            AssertArrayEquals(aoEmpty, aoResult);

            aoResult = (object[])cache.Aggregate(
                new ArrayList(new object[] { "1" }), agent);
            AssertArrayEquals(new object[] { 1 }, aoResult);

            aoResult = (object[])cache.Aggregate(
                new ArrayList(new object[] { "1" }), agent);
            AssertArrayEquals(new object[] { 1 }, aoResult);

            aoResult = (object[])cache.Aggregate((IFilter)null, agent);
            AssertArrayEquals(aoTop10, aoResult);

            aoResult = (object[])cache.Aggregate(AlwaysFilter.Instance, agent);
            AssertArrayEquals(aoTop10, aoResult);

            // test duplicate values
            cache.Clear();

            cKeys = 100;
            ht    = new Hashtable(cKeys);
            for (int i = 1; i <= cKeys; ++i)
            {
                ht.Add(i.ToString(), i / 2);
            }
            cache.InsertAll(ht);

            aoTop10 = new object[10];
            for (int i = 0; i < 10; ++i)
            {
                aoTop10[i] = (cKeys - i) / 2;
            }

            aoResult = (object[])cache.Aggregate((IFilter)null, agent);
            AssertArrayEquals(aoTop10, aoResult);

            CacheFactory.Shutdown();
        }