Ejemplo n.º 1
0
		/// <summary> Internal helper method used by check that iterates over 
		/// the keys of readerFieldToValIds and generates a Collection 
		/// of Insanity instances whenever two (or more) ReaderField instances are 
		/// found that have an ancestery relationships.  
		/// 
		/// </summary>
		/// <seealso cref="InsanityType.SUBREADER">
		/// </seealso>
		private List<Insanity> CheckSubreaders(MapOfSets<int,CacheEntry> valIdToItems, MapOfSets<ReaderField,int> readerFieldToValIds)
		{
			
            List<Insanity> insanity = new List<Insanity>(23);

            Dictionary<ReaderField, Dictionary<ReaderField, ReaderField>> badChildren = new Dictionary<ReaderField, Dictionary<ReaderField, ReaderField>>(17);
			MapOfSets<ReaderField, ReaderField> badKids = new MapOfSets<ReaderField, ReaderField>(badChildren); // wrapper

            IDictionary<int, Dictionary<CacheEntry, CacheEntry>> viToItemSets = valIdToItems.GetMap();
            IDictionary<ReaderField, Dictionary<int, int>> rfToValIdSets = readerFieldToValIds.GetMap();

            Dictionary<ReaderField, ReaderField> seen = new Dictionary<ReaderField, ReaderField>(17);

            foreach (ReaderField rf in rfToValIdSets.Keys)
            {
                if (seen.ContainsKey(rf))
                    continue;

                System.Collections.IList kids = GetAllDecendentReaderKeys(rf.readerKey);
				for (int i = 0; i < kids.Count; i++)
				{
					ReaderField kid = new ReaderField(kids[i], rf.fieldName);

					if (badChildren.ContainsKey(kid))
					{
						// we've already process this kid as RF and found other problems
						// track those problems as our own
						badKids.Put(rf, kid);
						badKids.PutAll(rf, badChildren[kid]);
						badChildren.Remove(kid);
					}
					else if (rfToValIdSets.ContainsKey(kid))
					{
						// we have cache entries for the kid
						badKids.Put(rf, kid);
					}
                    if (!seen.ContainsKey(kid))
                    {
                        seen.Add(kid, kid);
                    }
				}
                if (!seen.ContainsKey(rf))
                {
                    seen.Add(rf, rf);
                }
			}
			
			// every mapping in badKids represents an Insanity
			foreach (ReaderField parent in badChildren.Keys)
			{
				Dictionary<ReaderField,ReaderField> kids = badChildren[parent];
				
				List<CacheEntry> badEntries = new List<CacheEntry>(kids.Count * 2);
				
				// put parent entr(ies) in first
				{
					foreach (int val in rfToValIdSets[parent].Keys)
					{
						badEntries.AddRange(viToItemSets[val].Keys);
					}
				}
				
				// now the entries for the descendants
				foreach (ReaderField kid in kids.Keys)
				{
					foreach (int val in rfToValIdSets[kid].Keys)
					{
						badEntries.AddRange(viToItemSets[val].Keys);
					}
				}
				
				insanity.Add(new Insanity(InsanityType.SUBREADER, "Found caches for decendents of " + parent.ToString(), badEntries.ToArray()));
			}
			
			return insanity;
		}
Ejemplo n.º 2
0
		/// <summary> Internal helper method used by check that iterates over 
		/// valMismatchKeys and generates a Collection of Insanity 
		/// instances accordingly.  The MapOfSets are used to populate 
		/// the Insantiy objects. 
		/// </summary>
		/// <seealso cref="InsanityType.VALUEMISMATCH">
		/// </seealso>
		private List<Insanity> CheckValueMismatch(MapOfSets<int,CacheEntry> valIdToItems, MapOfSets<ReaderField,int> readerFieldToValIds, Dictionary<ReaderField,ReaderField> valMismatchKeys)
		{
			
			List<Insanity> insanity = new List<Insanity>(valMismatchKeys.Count * 3);
			
			if (!(valMismatchKeys.Count == 0))
			{
				// we have multiple values for some ReaderFields
				
                IDictionary<ReaderField,Dictionary<int,int>> rfMap = readerFieldToValIds.GetMap();
                IDictionary<int,Dictionary<CacheEntry,CacheEntry>> valMap = valIdToItems.GetMap();
                foreach (ReaderField rf in valMismatchKeys.Keys)
                {
                    List<CacheEntry> badEntries = new List<CacheEntry>(valMismatchKeys.Count * 2);
                    foreach (int val in rfMap[rf].Keys)
                    {
                        foreach (CacheEntry entry in valMap[val].Keys)
                        {
                            badEntries.Add(entry);
                        }
                    }

                    insanity.Add(new Insanity(InsanityType.VALUEMISMATCH, "Multiple distinct value objects for " + rf.ToString(), badEntries.ToArray()));
                }
            }
			return insanity;
		}
Ejemplo n.º 3
0
		/// <summary> Tests a CacheEntry[] for indication of "insane" cache usage.
		/// <p/>
		/// NOTE:FieldCache CreationPlaceholder objects are ignored.
		/// (:TODO: is this a bad idea? are we masking a real problem?)
		/// <p/>
		/// </summary>
		public Insanity[] Check(CacheEntry[] cacheEntries)
		{
			if (null == cacheEntries || 0 == cacheEntries.Length)
				return new Insanity[0];
			
			if (null != ramCalc)
			{
				for (int i = 0; i < cacheEntries.Length; i++)
				{
					cacheEntries[i].EstimateSize(ramCalc);
				}
			}
			
			// the indirect mapping lets MapOfSet dedup identical valIds for us
			//
			// maps the (valId) identityhashCode of cache values to 
			// sets of CacheEntry instances
			MapOfSets<int,CacheEntry> valIdToItems = new MapOfSets<int,CacheEntry>(new Dictionary<int,Dictionary<CacheEntry,CacheEntry>>(17));
			// maps ReaderField keys to Sets of ValueIds
			MapOfSets<ReaderField,int> readerFieldToValIds = new MapOfSets<ReaderField,int>(new Dictionary<ReaderField,Dictionary<int,int>>(17));
			//
			
			// any keys that we know result in more then one valId
            // TODO: This will be a HashSet<T> when we start using .NET Framework 3.5
            Dictionary<ReaderField, ReaderField> valMismatchKeys = new Dictionary<ReaderField, ReaderField>();
			
			// iterate over all the cacheEntries to get the mappings we'll need
			for (int i = 0; i < cacheEntries.Length; i++)
			{
				CacheEntry item = cacheEntries[i];
				System.Object val = item.GetValue();
				
				if (val is Mono.Lucene.Net.Search.CreationPlaceholder)
					continue;
				
				ReaderField rf = new ReaderField(item.GetReaderKey(), item.GetFieldName());
				
				System.Int32 valId = val.GetHashCode();
				
				// indirect mapping, so the MapOfSet will dedup identical valIds for us
				valIdToItems.Put(valId, item);
				if (1 < readerFieldToValIds.Put(rf, valId))
				{
                    if (!valMismatchKeys.ContainsKey(rf))
                    {
                        valMismatchKeys.Add(rf, rf);
                    }
				}
			}
			
			List<Insanity> insanity = new List<Insanity>(valMismatchKeys.Count * 3);
			
			insanity.AddRange(CheckValueMismatch(valIdToItems, readerFieldToValIds, valMismatchKeys));
			insanity.AddRange(CheckSubreaders(valIdToItems, readerFieldToValIds));
			
			return insanity.ToArray();
		}
Ejemplo n.º 4
0
        /// <summary>
        /// Tests a CacheEntry[] for indication of "insane" cache usage.
        /// <p>
        /// <B>NOTE:</b>FieldCache CreationPlaceholder objects are ignored.
        /// (:TODO: is this a bad idea? are we masking a real problem?)
        /// </p>
        /// </summary>
        public Insanity[] Check(params FieldCache.CacheEntry[] cacheEntries)
        {
            if (null == cacheEntries || 0 == cacheEntries.Length)
            {
                return new Insanity[0];
            }

            if (EstimateRam)
            {
                for (int i = 0; i < cacheEntries.Length; i++)
                {
                    cacheEntries[i].EstimateSize();
                }
            }

            // the indirect mapping lets MapOfSet dedup identical valIds for us
            // maps the (valId) identityhashCode of cache values to
            // sets of CacheEntry instances
            MapOfSets<int, FieldCache.CacheEntry> valIdToItems = new MapOfSets<int, FieldCache.CacheEntry>(new Dictionary<int, HashSet<FieldCache.CacheEntry>>(17));
            // maps ReaderField keys to Sets of ValueIds
            MapOfSets<ReaderField, int> readerFieldToValIds = new MapOfSets<ReaderField, int>(new Dictionary<ReaderField, HashSet<int>>(17));

            // any keys that we know result in more then one valId
            ISet<ReaderField> valMismatchKeys = new HashSet<ReaderField>();

            // iterate over all the cacheEntries to get the mappings we'll need
            for (int i = 0; i < cacheEntries.Length; i++)
            {
                FieldCache.CacheEntry item = cacheEntries[i];
                object val = item.Value;

                // It's OK to have dup entries, where one is eg
                // float[] and the other is the Bits (from
                // getDocWithField())
                if (val is Bits)
                {
                    continue;
                }

                if (val is Lucene.Net.Search.FieldCache.CreationPlaceholder)
                {
                    continue;
                }

                ReaderField rf = new ReaderField(item.ReaderKey, item.FieldName);

                int valId = val.GetHashCode();

                // indirect mapping, so the MapOfSet will dedup identical valIds for us
                valIdToItems.Put(valId, item);
                if (1 < readerFieldToValIds.Put(rf, valId))
                {
                    valMismatchKeys.Add(rf);
                }
            }

            List<Insanity> insanity = new List<Insanity>(valMismatchKeys.Count * 3);

            insanity.AddRange(CheckValueMismatch(valIdToItems, readerFieldToValIds, valMismatchKeys));
            insanity.AddRange(CheckSubreaders(valIdToItems, readerFieldToValIds));

            return insanity.ToArray();
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Internal helper method used by check that iterates over
        /// the keys of readerFieldToValIds and generates a Collection
        /// of Insanity instances whenever two (or more) ReaderField instances are
        /// found that have an ancestry relationships.
        /// </summary>
        /// <seealso cref= InsanityType#SUBREADER </seealso>
        private ICollection<Insanity> CheckSubreaders(MapOfSets<int, FieldCache.CacheEntry> valIdToItems, MapOfSets<ReaderField, int> readerFieldToValIds)
        {
            List<Insanity> insanity = new List<Insanity>(23);

            Dictionary<ReaderField, HashSet<ReaderField>> badChildren = new Dictionary<ReaderField, HashSet<ReaderField>>(17);
            MapOfSets<ReaderField, ReaderField> badKids = new MapOfSets<ReaderField, ReaderField>(badChildren); // wrapper

            IDictionary<int, HashSet<FieldCache.CacheEntry>> viToItemSets = valIdToItems.Map;
            IDictionary<ReaderField, HashSet<int>> rfToValIdSets = readerFieldToValIds.Map;

            HashSet<ReaderField> seen = new HashSet<ReaderField>();

            //IDictionary<ReaderField, ISet<int>>.KeyCollection readerFields = rfToValIdSets.Keys;
            foreach (ReaderField rf in rfToValIdSets.Keys)
            {
                if (seen.Contains(rf))
                {
                    continue;
                }

                IList<object> kids = GetAllDescendantReaderKeys(rf.ReaderKey);
                foreach (object kidKey in kids)
                {
                    ReaderField kid = new ReaderField(kidKey, rf.FieldName);

                    if (badChildren.ContainsKey(kid))
                    {
                        // we've already process this kid as RF and found other problems
                        // track those problems as our own
                        badKids.Put(rf, kid);
                        badKids.PutAll(rf, badChildren[kid]);
                        badChildren.Remove(kid);
                    }
                    else if (rfToValIdSets.ContainsKey(kid))
                    {
                        // we have cache entries for the kid
                        badKids.Put(rf, kid);
                    }
                    seen.Add(kid);
                }
                seen.Add(rf);
            }

            // every mapping in badKids represents an Insanity
            foreach (ReaderField parent in badChildren.Keys)
            {
                HashSet<ReaderField> kids = badChildren[parent];

                List<FieldCache.CacheEntry> badEntries = new List<FieldCache.CacheEntry>(kids.Count * 2);

                // put parent entr(ies) in first
                {
                    foreach (int value in rfToValIdSets[parent])
                    {
                        badEntries.AddRange(viToItemSets[value]);
                    }
                }

                // now the entries for the descendants
                foreach (ReaderField kid in kids)
                {
                    foreach (int value in rfToValIdSets[kid])
                    {
                        badEntries.AddRange(viToItemSets[value]);
                    }
                }

                FieldCache.CacheEntry[] badness = new FieldCache.CacheEntry[badEntries.Count];
                badness = badEntries.ToArray();//LUCENE TO-DO had param of badness first

                insanity.Add(new Insanity(InsanityType.SUBREADER, "Found caches for descendants of " + parent.ToString(), badness));
            }

            return insanity;
        }
Ejemplo n.º 6
0
        /// <summary>
        /// Internal helper method used by check that iterates over
        /// valMismatchKeys and generates a Collection of Insanity
        /// instances accordingly.  The MapOfSets are used to populate
        /// the Insanity objects. </summary>
        /// <seealso cref= InsanityType#VALUEMISMATCH </seealso>
        private ICollection<Insanity> CheckValueMismatch(MapOfSets<int, FieldCache.CacheEntry> valIdToItems, MapOfSets<ReaderField, int> readerFieldToValIds, ISet<ReaderField> valMismatchKeys)
        {
            List<Insanity> insanity = new List<Insanity>(valMismatchKeys.Count * 3);

            if (valMismatchKeys.Count != 0)
            {
                // we have multiple values for some ReaderFields

                IDictionary<ReaderField, HashSet<int>> rfMap = readerFieldToValIds.Map;
                IDictionary<int, HashSet<FieldCache.CacheEntry>> valMap = valIdToItems.Map;
                foreach (ReaderField rf in valMismatchKeys)
                {
                    IList<FieldCache.CacheEntry> badEntries = new List<FieldCache.CacheEntry>(valMismatchKeys.Count * 2);
                    foreach (int value in rfMap[rf])
                    {
                        foreach (FieldCache.CacheEntry cacheEntry in valMap[value])
                        {
                            badEntries.Add(cacheEntry);
                        }
                    }

                    FieldCache.CacheEntry[] badness = new FieldCache.CacheEntry[badEntries.Count];
                    badness = badEntries.ToArray(); //LUCENE TO-DO had param of badness before

                    insanity.Add(new Insanity(InsanityType.VALUEMISMATCH, "Multiple distinct value objects for " + rf.ToString(), badness));
                }
            }
            return insanity;
        }