/// <summary> Internal helper method used by check that iterates over /// valMismatchKeys and generates a Collection of Insanity /// instances accordingly. The MapOfSets are used to populate /// the Insantiy objects. /// </summary> /// <seealso cref="InsanityType.VALUEMISMATCH"> /// </seealso> private List <Insanity> CheckValueMismatch(MapOfSets <int, CacheEntry> valIdToItems, MapOfSets <ReaderField, int> readerFieldToValIds, Dictionary <ReaderField, ReaderField> valMismatchKeys) { List <Insanity> insanity = new List <Insanity>(valMismatchKeys.Count * 3); if (!(valMismatchKeys.Count == 0)) { // we have multiple values for some ReaderFields IDictionary <ReaderField, Dictionary <int, int> > rfMap = readerFieldToValIds.GetMap(); IDictionary <int, Dictionary <CacheEntry, CacheEntry> > valMap = valIdToItems.GetMap(); foreach (ReaderField rf in valMismatchKeys.Keys) { List <CacheEntry> badEntries = new List <CacheEntry>(valMismatchKeys.Count * 2); foreach (int val in rfMap[rf].Keys) { foreach (CacheEntry entry in valMap[val].Keys) { badEntries.Add(entry); } } insanity.Add(new Insanity(InsanityType.VALUEMISMATCH, "Multiple distinct value objects for " + rf.ToString(), badEntries.ToArray())); } } return(insanity); }
/// <summary> Tests a CacheEntry[] for indication of "insane" cache usage. /// <p/> /// NOTE:FieldCache CreationPlaceholder objects are ignored. /// (:TODO: is this a bad idea? are we masking a real problem?) /// <p/> /// </summary> public Insanity[] Check(CacheEntry[] cacheEntries) { if (null == cacheEntries || 0 == cacheEntries.Length) { return(new Insanity[0]); } if (null != ramCalc) { for (int i = 0; i < cacheEntries.Length; i++) { cacheEntries[i].EstimateSize(ramCalc); } } // the indirect mapping lets MapOfSet dedup identical valIds for us // // maps the (valId) identityhashCode of cache values to // sets of CacheEntry instances MapOfSets <int, CacheEntry> valIdToItems = new MapOfSets <int, CacheEntry>(new Dictionary <int, Dictionary <CacheEntry, CacheEntry> >(17)); // maps ReaderField keys to Sets of ValueIds MapOfSets <ReaderField, int> readerFieldToValIds = new MapOfSets <ReaderField, int>(new Dictionary <ReaderField, Dictionary <int, int> >(17)); // // any keys that we know result in more then one valId // TODO: This will be a HashSet<T> when we start using .NET Framework 3.5 Dictionary <ReaderField, ReaderField> valMismatchKeys = new Dictionary <ReaderField, ReaderField>(); // iterate over all the cacheEntries to get the mappings we'll need for (int i = 0; i < cacheEntries.Length; i++) { CacheEntry item = cacheEntries[i]; System.Object val = item.GetValue(); if (val is Mono.Lucene.Net.Search.CreationPlaceholder) { continue; } ReaderField rf = new ReaderField(item.GetReaderKey(), item.GetFieldName()); System.Int32 valId = val.GetHashCode(); // indirect mapping, so the MapOfSet will dedup identical valIds for us valIdToItems.Put(valId, item); if (1 < readerFieldToValIds.Put(rf, valId)) { if (!valMismatchKeys.ContainsKey(rf)) { valMismatchKeys.Add(rf, rf); } } } List <Insanity> insanity = new List <Insanity>(valMismatchKeys.Count * 3); insanity.AddRange(CheckValueMismatch(valIdToItems, readerFieldToValIds, valMismatchKeys)); insanity.AddRange(CheckSubreaders(valIdToItems, readerFieldToValIds)); return(insanity.ToArray()); }
/// <summary> Internal helper method used by check that iterates over /// the keys of readerFieldToValIds and generates a Collection /// of Insanity instances whenever two (or more) ReaderField instances are /// found that have an ancestery relationships. /// /// </summary> /// <seealso cref="InsanityType.SUBREADER"> /// </seealso> private List <Insanity> CheckSubreaders(MapOfSets <int, CacheEntry> valIdToItems, MapOfSets <ReaderField, int> readerFieldToValIds) { List <Insanity> insanity = new List <Insanity>(23); Dictionary <ReaderField, Dictionary <ReaderField, ReaderField> > badChildren = new Dictionary <ReaderField, Dictionary <ReaderField, ReaderField> >(17); MapOfSets <ReaderField, ReaderField> badKids = new MapOfSets <ReaderField, ReaderField>(badChildren); // wrapper IDictionary <int, Dictionary <CacheEntry, CacheEntry> > viToItemSets = valIdToItems.GetMap(); IDictionary <ReaderField, Dictionary <int, int> > rfToValIdSets = readerFieldToValIds.GetMap(); Dictionary <ReaderField, ReaderField> seen = new Dictionary <ReaderField, ReaderField>(17); foreach (ReaderField rf in rfToValIdSets.Keys) { if (seen.ContainsKey(rf)) { continue; } System.Collections.IList kids = GetAllDecendentReaderKeys(rf.readerKey); for (int i = 0; i < kids.Count; i++) { ReaderField kid = new ReaderField(kids[i], rf.fieldName); if (badChildren.ContainsKey(kid)) { // we've already process this kid as RF and found other problems // track those problems as our own badKids.Put(rf, kid); badKids.PutAll(rf, badChildren[kid]); badChildren.Remove(kid); } else if (rfToValIdSets.ContainsKey(kid)) { // we have cache entries for the kid badKids.Put(rf, kid); } if (!seen.ContainsKey(kid)) { seen.Add(kid, kid); } } if (!seen.ContainsKey(rf)) { seen.Add(rf, rf); } } // every mapping in badKids represents an Insanity foreach (ReaderField parent in badChildren.Keys) { Dictionary <ReaderField, ReaderField> kids = badChildren[parent]; List <CacheEntry> badEntries = new List <CacheEntry>(kids.Count * 2); // put parent entr(ies) in first { foreach (int val in rfToValIdSets[parent].Keys) { badEntries.AddRange(viToItemSets[val].Keys); } } // now the entries for the descendants foreach (ReaderField kid in kids.Keys) { foreach (int val in rfToValIdSets[kid].Keys) { badEntries.AddRange(viToItemSets[val].Keys); } } insanity.Add(new Insanity(InsanityType.SUBREADER, "Found caches for decendents of " + parent.ToString(), badEntries.ToArray())); } return(insanity); }