/// <summary>
        /// Internal helper method used by check that iterates over
        /// the keys of <paramref name="readerFieldToValIds"/> and generates a <see cref="ICollection{T}"/>
        /// of <see cref="Insanity"/> instances whenever two (or more) <see cref="ReaderField"/> instances are
        /// found that have an ancestry relationships.
        /// </summary>
        /// <seealso cref="InsanityType.SUBREADER"/>
        private ICollection <Insanity> CheckSubreaders(MapOfSets <int, FieldCache.CacheEntry> valIdToItems, MapOfSets <ReaderField, int> readerFieldToValIds)
        {
            List <Insanity> insanity = new List <Insanity>(23);

            Dictionary <ReaderField, ISet <ReaderField> > badChildren = new Dictionary <ReaderField, ISet <ReaderField> >(17);
            MapOfSets <ReaderField, ReaderField>          badKids     = new MapOfSets <ReaderField, ReaderField>(badChildren); // wrapper

            IDictionary <int, ISet <FieldCache.CacheEntry> > viToItemSets  = valIdToItems.Map;
            IDictionary <ReaderField, ISet <int> >           rfToValIdSets = readerFieldToValIds.Map;

            HashSet <ReaderField> seen = new HashSet <ReaderField>();

            //IDictionary<ReaderField, ISet<int>>.KeyCollection readerFields = rfToValIdSets.Keys;
            foreach (ReaderField rf in rfToValIdSets.Keys)
            {
                if (seen.Contains(rf))
                {
                    continue;
                }

                IList <object> kids = GetAllDescendantReaderKeys(rf.ReaderKey);
                foreach (object kidKey in kids)
                {
                    ReaderField kid = new ReaderField(kidKey, rf.FieldName);

                    // LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey
                    if (badChildren.TryGetValue(kid, out ISet <ReaderField> badKid))
                    {
                        // we've already process this kid as RF and found other problems
                        // track those problems as our own
                        badKids.Put(rf, kid);
                        badKids.PutAll(rf, badKid);
                        badChildren.Remove(kid);
                    }
                    else if (rfToValIdSets.ContainsKey(kid))
                    {
                        // we have cache entries for the kid
                        badKids.Put(rf, kid);
                    }
                    seen.Add(kid);
                }
                seen.Add(rf);
            }

            // every mapping in badKids represents an Insanity
            foreach (ReaderField parent in badChildren.Keys)
            {
                ISet <ReaderField> kids = badChildren[parent];

                List <FieldCache.CacheEntry> badEntries = new List <FieldCache.CacheEntry>(kids.Count * 2);

                // put parent entr(ies) in first
                {
                    foreach (int value in rfToValIdSets[parent])
                    {
                        badEntries.AddRange(viToItemSets[value]);
                    }
                }

                // now the entries for the descendants
                foreach (ReaderField kid in kids)
                {
                    foreach (int value in rfToValIdSets[kid])
                    {
                        badEntries.AddRange(viToItemSets[value]);
                    }
                }

                FieldCache.CacheEntry[] badness = badEntries.ToArray();

                insanity.Add(new Insanity(InsanityType.SUBREADER, "Found caches for descendants of " + parent.ToString(), badness));
            }

            return(insanity);
        }
        /// <summary>
        /// Tests a CacheEntry[] for indication of "insane" cache usage.
        /// <para>
        /// <b>NOTE:</b>FieldCache CreationPlaceholder objects are ignored.
        /// (:TODO: is this a bad idea? are we masking a real problem?)
        /// </para>
        /// </summary>
        public Insanity[] Check(params FieldCache.CacheEntry[] cacheEntries)
        {
            if (null == cacheEntries || 0 == cacheEntries.Length)
            {
                return(new Insanity[0]);
            }

            if (estimateRam)
            {
                for (int i = 0; i < cacheEntries.Length; i++)
                {
                    cacheEntries[i].EstimateSize();
                }
            }

            // the indirect mapping lets MapOfSet dedup identical valIds for us
            // maps the (valId) identityhashCode of cache values to
            // sets of CacheEntry instances
            MapOfSets <int, FieldCache.CacheEntry> valIdToItems = new MapOfSets <int, FieldCache.CacheEntry>(new Dictionary <int, ISet <FieldCache.CacheEntry> >(17));
            // maps ReaderField keys to Sets of ValueIds
            MapOfSets <ReaderField, int> readerFieldToValIds = new MapOfSets <ReaderField, int>(new Dictionary <ReaderField, ISet <int> >(17));

            // any keys that we know result in more then one valId
            ISet <ReaderField> valMismatchKeys = new HashSet <ReaderField>();

            // iterate over all the cacheEntries to get the mappings we'll need
            for (int i = 0; i < cacheEntries.Length; i++)
            {
                FieldCache.CacheEntry item = cacheEntries[i];
                object val = item.Value;

                // It's OK to have dup entries, where one is eg
                // float[] and the other is the Bits (from
                // getDocWithField())
                if (val is IBits)
                {
                    continue;
                }

                if (val is Lucene.Net.Search.FieldCache.CreationPlaceholder)
                {
                    continue;
                }

                ReaderField rf = new ReaderField(item.ReaderKey, item.FieldName);

                int valId = RuntimeHelpers.GetHashCode(val);

                // indirect mapping, so the MapOfSet will dedup identical valIds for us
                valIdToItems.Put(valId, item);
                if (1 < readerFieldToValIds.Put(rf, valId))
                {
                    valMismatchKeys.Add(rf);
                }
            }

            List <Insanity> insanity = new List <Insanity>(valMismatchKeys.Count * 3);

            insanity.AddRange(CheckValueMismatch(valIdToItems, readerFieldToValIds, valMismatchKeys));
            insanity.AddRange(CheckSubreaders(valIdToItems, readerFieldToValIds));

            return(insanity.ToArray());
        }