/// <summary> Internal helper method used by check that iterates over
        /// valMismatchKeys and generates a Collection of Insanity
        /// instances accordingly.  The MapOfSets are used to populate
        /// the Insantiy objects.
        /// </summary>
        /// <seealso cref="InsanityType.VALUEMISMATCH">
        /// </seealso>
        private List <Insanity> CheckValueMismatch(MapOfSets <int, CacheEntry> valIdToItems,
                                                   MapOfSets <ReaderField, int> readerFieldToValIds,
                                                   HashSet <ReaderField> valMismatchKeys)
        {
            List <Insanity> insanity = new List <Insanity>(valMismatchKeys.Count * 3);

            if (!(valMismatchKeys.Count == 0))
            {
                // we have multiple values for some ReaderFields

                IDictionary <ReaderField, HashSet <int> > rfMap  = readerFieldToValIds.Map;
                IDictionary <int, HashSet <CacheEntry> >  valMap = valIdToItems.Map;
                foreach (ReaderField rf in valMismatchKeys)
                {
                    List <CacheEntry> badEntries = new List <CacheEntry>(valMismatchKeys.Count * 2);
                    foreach (int val in rfMap[rf])
                    {
                        foreach (CacheEntry entry in valMap[val])
                        {
                            badEntries.Add(entry);
                        }
                    }

                    insanity.Add(new Insanity(InsanityType.VALUEMISMATCH, "Multiple distinct value objects for " + rf.ToString(), badEntries.ToArray()));
                }
            }
            return(insanity);
        }
Пример #2
0
        /// <summary>
        /// Internal helper method used by check that iterates over
        /// <paramref name="valMismatchKeys"/> and generates a <see cref="ICollection{T}"/> of <see cref="Insanity"/>
        /// instances accordingly.  The <see cref="MapOfSets{TKey, TValue}"/> are used to populate
        /// the <see cref="Insanity"/> objects. </summary>
        /// <seealso cref="InsanityType.VALUEMISMATCH"/>
        private static ICollection <Insanity> CheckValueMismatch( // LUCENENET: CA1822: Mark members as static
            MapOfSets <int, FieldCache.CacheEntry> valIdToItems,
            MapOfSets <ReaderField, int> readerFieldToValIds,
            ISet <ReaderField> valMismatchKeys)
        {
            List <Insanity> insanity = new List <Insanity>(valMismatchKeys.Count * 3);

            if (valMismatchKeys.Count != 0)
            {
                // we have multiple values for some ReaderFields

                IDictionary <ReaderField, ISet <int> >           rfMap  = readerFieldToValIds.Map;
                IDictionary <int, ISet <FieldCache.CacheEntry> > valMap = valIdToItems.Map;
                foreach (ReaderField rf in valMismatchKeys)
                {
                    IList <FieldCache.CacheEntry> badEntries = new List <FieldCache.CacheEntry>(valMismatchKeys.Count * 2);
                    foreach (int value in rfMap[rf])
                    {
                        foreach (FieldCache.CacheEntry cacheEntry in valMap[value])
                        {
                            badEntries.Add(cacheEntry);
                        }
                    }

                    FieldCache.CacheEntry[] badness = new FieldCache.CacheEntry[badEntries.Count];
                    badEntries.CopyTo(badness, 0);

                    insanity.Add(new Insanity(InsanityType.VALUEMISMATCH, "Multiple distinct value objects for " + rf.ToString(), badness));
                }
            }
            return(insanity);
        }
Пример #3
0
        /// <summary> Tests a CacheEntry[] for indication of "insane" cache usage.
        /// <p/>
        /// NOTE:FieldCache CreationPlaceholder objects are ignored.
        /// (:TODO: is this a bad idea? are we masking a real problem?)
        /// <p/>
        /// </summary>
        public Insanity[] Check(params CacheEntry[] cacheEntries)
        {
            if (null == cacheEntries || 0 == cacheEntries.Length)
            {
                return(new Insanity[0]);
            }

            if (null != ramCalc)
            {
                for (int i = 0; i < cacheEntries.Length; i++)
                {
                    cacheEntries[i].EstimateSize(ramCalc);
                }
            }

            // the indirect mapping lets MapOfSet dedup identical valIds for us
            //
            // maps the (valId) identityhashCode of cache values to
            // sets of CacheEntry instances
            MapOfSets <int, CacheEntry> valIdToItems = new MapOfSets <int, CacheEntry>(new Dictionary <int, HashSet <CacheEntry> >(17));
            // maps ReaderField keys to Sets of ValueIds
            MapOfSets <ReaderField, int> readerFieldToValIds = new MapOfSets <ReaderField, int>(new Dictionary <ReaderField, HashSet <int> >(17));
            //

            // any keys that we know result in more then one valId
            HashSet <ReaderField> valMismatchKeys = new HashSet <ReaderField>();

            // iterate over all the cacheEntries to get the mappings we'll need
            for (int i = 0; i < cacheEntries.Length; i++)
            {
                CacheEntry    item = cacheEntries[i];
                System.Object val  = item.Value;

                if (val.GetType().IsGenericType&&
                    val.GetType().GetGenericTypeDefinition() == typeof(Lazy <>))
                {
                    continue;
                }

                ReaderField rf = new ReaderField(item.ReaderKey, item.FieldName);

                System.Int32 valId = val.GetHashCode();

                // indirect mapping, so the MapOfSet will dedup identical valIds for us
                valIdToItems.Put(valId, item);
                if (1 < readerFieldToValIds.Put(rf, valId))
                {
                    valMismatchKeys.Add(rf);
                }
            }

            List <Insanity> insanity = new List <Insanity>(valMismatchKeys.Count * 3);

            insanity.AddRange(CheckValueMismatch(valIdToItems, readerFieldToValIds, valMismatchKeys));
            insanity.AddRange(CheckSubreaders(valIdToItems, readerFieldToValIds));

            return(insanity.ToArray());
        }
        /// <summary> Internal helper method used by check that iterates over
        /// the keys of readerFieldToValIds and generates a Collection
        /// of Insanity instances whenever two (or more) ReaderField instances are
        /// found that have an ancestery relationships.
        ///
        /// </summary>
        /// <seealso cref="InsanityType.SUBREADER">
        /// </seealso>
        private List <Insanity> CheckSubreaders(MapOfSets <int, CacheEntry> valIdToItems,
                                                MapOfSets <ReaderField, int> readerFieldToValIds)
        {
            List <Insanity> insanity = new List <Insanity>(23);

            Dictionary <ReaderField, HashSet <ReaderField> > badChildren = new Dictionary <ReaderField, HashSet <ReaderField> >(17);
            MapOfSets <ReaderField, ReaderField>             badKids     = new MapOfSets <ReaderField, ReaderField>(badChildren); // wrapper

            IDictionary <int, HashSet <CacheEntry> >  viToItemSets  = valIdToItems.Map;
            IDictionary <ReaderField, HashSet <int> > rfToValIdSets = readerFieldToValIds.Map;

            HashSet <ReaderField> seen = new HashSet <ReaderField>();

            foreach (ReaderField rf in rfToValIdSets.Keys)
            {
                if (seen.Contains(rf))
                {
                    continue;
                }

                System.Collections.IList kids = GetAllDecendentReaderKeys(rf.readerKey);
                foreach (Object kidKey in kids)
                {
                    ReaderField kid = new ReaderField(kidKey, rf.fieldName);

                    if (badChildren.ContainsKey(kid))
                    {
                        // we've already process this kid as RF and found other problems
                        // track those problems as our own
                        badKids.Put(rf, kid);
                        badKids.PutAll(rf, badChildren[kid]);
                        badChildren.Remove(kid);
                    }
                    else if (rfToValIdSets.ContainsKey(kid))
                    {
                        // we have cache entries for the kid
                        badKids.Put(rf, kid);
                    }
                    seen.Add(kid);
                }
                seen.Add(rf);
            }

            // every mapping in badKids represents an Insanity
            foreach (ReaderField parent in badChildren.Keys)
            {
                HashSet <ReaderField> kids = badChildren[parent];

                List <CacheEntry> badEntries = new List <CacheEntry>(kids.Count * 2);

                // put parent entr(ies) in first
                {
                    foreach (int val in rfToValIdSets[parent])
                    {
                        badEntries.AddRange(viToItemSets[val]);
                    }
                }

                // now the entries for the descendants
                foreach (ReaderField kid in kids)
                {
                    foreach (int val in rfToValIdSets[kid])
                    {
                        badEntries.AddRange(viToItemSets[val]);
                    }
                }

                insanity.Add(new Insanity(InsanityType.SUBREADER, "Found caches for decendents of " + parent.ToString(), badEntries.ToArray()));
            }

            return(insanity);
        }
Пример #5
0
        /// <summary>
        /// Internal helper method used by check that iterates over
        /// the keys of <paramref name="readerFieldToValIds"/> and generates a <see cref="ICollection{T}"/>
        /// of <see cref="Insanity"/> instances whenever two (or more) <see cref="ReaderField"/> instances are
        /// found that have an ancestry relationships.
        /// </summary>
        /// <seealso cref="InsanityType.SUBREADER"/>
        private static ICollection <Insanity> CheckSubreaders(MapOfSets <int, FieldCache.CacheEntry> valIdToItems, MapOfSets <ReaderField, int> readerFieldToValIds) // LUCENENET: CA1822: Mark members as static
        {
            List <Insanity> insanity = new List <Insanity>(23);

            Dictionary <ReaderField, ISet <ReaderField> > badChildren = new Dictionary <ReaderField, ISet <ReaderField> >(17);
            MapOfSets <ReaderField, ReaderField>          badKids     = new MapOfSets <ReaderField, ReaderField>(badChildren); // wrapper

            IDictionary <int, ISet <FieldCache.CacheEntry> > viToItemSets  = valIdToItems.Map;
            IDictionary <ReaderField, ISet <int> >           rfToValIdSets = readerFieldToValIds.Map;

            HashSet <ReaderField> seen = new HashSet <ReaderField>();

            //IDictionary<ReaderField, ISet<int>>.KeyCollection readerFields = rfToValIdSets.Keys;
            foreach (ReaderField rf in rfToValIdSets.Keys)
            {
                if (seen.Contains(rf))
                {
                    continue;
                }

                IList <object> kids = GetAllDescendantReaderKeys(rf.ReaderKey);
                foreach (object kidKey in kids)
                {
                    ReaderField kid = new ReaderField(kidKey, rf.FieldName);

                    // LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey
                    if (badChildren.TryGetValue(kid, out ISet <ReaderField> badKid))
                    {
                        // we've already process this kid as RF and found other problems
                        // track those problems as our own
                        badKids.Put(rf, kid);
                        badKids.PutAll(rf, badKid);
                        badChildren.Remove(kid);
                    }
                    else if (rfToValIdSets.ContainsKey(kid))
                    {
                        // we have cache entries for the kid
                        badKids.Put(rf, kid);
                    }
                    seen.Add(kid);
                }
                seen.Add(rf);
            }

            // every mapping in badKids represents an Insanity
            foreach (ReaderField parent in badChildren.Keys)
            {
                ISet <ReaderField> kids = badChildren[parent];

                List <FieldCache.CacheEntry> badEntries = new List <FieldCache.CacheEntry>(kids.Count * 2);

                // put parent entr(ies) in first
                {
                    foreach (int value in rfToValIdSets[parent])
                    {
                        badEntries.AddRange(viToItemSets[value]);
                    }
                }

                // now the entries for the descendants
                foreach (ReaderField kid in kids)
                {
                    foreach (int value in rfToValIdSets[kid])
                    {
                        badEntries.AddRange(viToItemSets[value]);
                    }
                }

                FieldCache.CacheEntry[] badness = badEntries.ToArray();

                insanity.Add(new Insanity(InsanityType.SUBREADER, "Found caches for descendants of " + parent.ToString(), badness));
            }

            return(insanity);
        }
Пример #6
0
        /// <summary>
        /// Tests a CacheEntry[] for indication of "insane" cache usage.
        /// <para>
        /// <b>NOTE:</b>FieldCache CreationPlaceholder objects are ignored.
        /// (:TODO: is this a bad idea? are we masking a real problem?)
        /// </para>
        /// </summary>
        public Insanity[] Check(params FieldCache.CacheEntry[] cacheEntries)
        {
            if (null == cacheEntries || 0 == cacheEntries.Length)
            {
                return(Arrays.Empty <Insanity>());
            }

            if (estimateRam)
            {
                for (int i = 0; i < cacheEntries.Length; i++)
                {
                    cacheEntries[i].EstimateSize();
                }
            }

            // the indirect mapping lets MapOfSet dedup identical valIds for us
            // maps the (valId) identityhashCode of cache values to
            // sets of CacheEntry instances
            MapOfSets <int, FieldCache.CacheEntry> valIdToItems = new MapOfSets <int, FieldCache.CacheEntry>(new Dictionary <int, ISet <FieldCache.CacheEntry> >(17));
            // maps ReaderField keys to Sets of ValueIds
            MapOfSets <ReaderField, int> readerFieldToValIds = new MapOfSets <ReaderField, int>(new Dictionary <ReaderField, ISet <int> >(17));

            // any keys that we know result in more then one valId
            ISet <ReaderField> valMismatchKeys = new JCG.HashSet <ReaderField>();

            // iterate over all the cacheEntries to get the mappings we'll need
            for (int i = 0; i < cacheEntries.Length; i++)
            {
                FieldCache.CacheEntry item = cacheEntries[i];
                object val = item.Value;

                // It's OK to have dup entries, where one is eg
                // float[] and the other is the Bits (from
                // getDocWithField())
                if (val is IBits)
                {
                    continue;
                }

                if (val is FieldCache.ICreationPlaceholder)
                {
                    continue;
                }

                ReaderField rf = new ReaderField(item.ReaderKey, item.FieldName);

                int valId = RuntimeHelpers.GetHashCode(val);

                // indirect mapping, so the MapOfSet will dedup identical valIds for us
                valIdToItems.Put(valId, item);
                if (1 < readerFieldToValIds.Put(rf, valId))
                {
                    valMismatchKeys.Add(rf);
                }
            }

            List <Insanity> insanity = new List <Insanity>(valMismatchKeys.Count * 3);

            insanity.AddRange(CheckValueMismatch(valIdToItems, readerFieldToValIds, valMismatchKeys));
            insanity.AddRange(CheckSubreaders(valIdToItems, readerFieldToValIds));

            return(insanity.ToArray());
        }
Пример #7
0
        /// <summary> Tests a CacheEntry[] for indication of "insane" cache usage.
        /// <p/>
        /// NOTE:FieldCache CreationPlaceholder objects are ignored.
        /// (:TODO: is this a bad idea? are we masking a real problem?)
        /// <p/>
        /// </summary>
        public Insanity[] Check(params CacheEntry[] cacheEntries)
        {
            if (null == cacheEntries || 0 == cacheEntries.Length)
            {
                return(new Insanity[0]);
            }

            if (null != ramCalc)
            {
                for (int i = 0; i < cacheEntries.Length; i++)
                {
                    cacheEntries[i].EstimateSize(ramCalc);
                }
            }

            // the indirect mapping lets MapOfSet dedup identical valIds for us
            //
            // maps the (valId) identityhashCode of cache values to
            // sets of CacheEntry instances
            MapOfSets <int, CacheEntry> valIdToItems = new MapOfSets <int, CacheEntry>(new Dictionary <int, Dictionary <CacheEntry, CacheEntry> >(17));
            // maps ReaderField keys to Sets of ValueIds
            MapOfSets <ReaderField, int> readerFieldToValIds = new MapOfSets <ReaderField, int>(new Dictionary <ReaderField, Dictionary <int, int> >(17));
            //

            // any keys that we know result in more then one valId
            // TODO: This will be a HashSet<T> when we start using .NET Framework 3.5
            Dictionary <ReaderField, ReaderField> valMismatchKeys = new Dictionary <ReaderField, ReaderField>();

            // iterate over all the cacheEntries to get the mappings we'll need
            for (int i = 0; i < cacheEntries.Length; i++)
            {
                CacheEntry    item = cacheEntries[i];
                System.Object val  = item.GetValue();

                if (val is Lucene.Net.Search.CreationPlaceholder)
                {
                    continue;
                }

                ReaderField rf = new ReaderField(item.GetReaderKey(), item.GetFieldName());

                System.Int32 valId = val.GetHashCode();

                // indirect mapping, so the MapOfSet will dedup identical valIds for us
                valIdToItems.Put(valId, item);
                if (1 < readerFieldToValIds.Put(rf, valId))
                {
                    if (!valMismatchKeys.ContainsKey(rf))
                    {
                        valMismatchKeys.Add(rf, rf);
                    }
                }
            }

            List <Insanity> insanity = new List <Insanity>(valMismatchKeys.Count * 3);

            insanity.AddRange(CheckValueMismatch(valIdToItems, readerFieldToValIds, valMismatchKeys));
            insanity.AddRange(CheckSubreaders(valIdToItems, readerFieldToValIds));

            return(insanity.ToArray());
        }