private void PrintNewInsanity(System.IO.StreamWriter infoStream, System.Object value_Renamed) { FieldCacheSanityChecker.Insanity[] insanities = FieldCacheSanityChecker.CheckSanity(wrapper); for (int i = 0; i < insanities.Length; i++) { FieldCacheSanityChecker.Insanity insanity = insanities[i]; CacheEntry[] entries = insanity.GetCacheEntries(); for (int j = 0; j < entries.Length; j++) { if (entries[j].GetValue() == value_Renamed) { // OK this insanity involves our entry infoStream.WriteLine("WARNING: new FieldCache insanity created\nDetails: " + insanity.ToString()); infoStream.WriteLine("\nStack:\n"); infoStream.WriteLine(new System.Exception()); break; } } } }
public virtual void TestSort() { this.r = NewRandom(); // reverse & not // all types // restrictive & non restrictive searches (on contents) Create(); Sort[] sorts = new Sort[50]; int sortCount = 0; for (int r = 0; r < 2; r++) { Sort sort; bool reverse = 1 == r; sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[] { new SortField("byte", SortField.BYTE, reverse) }); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[] { new SortField("short", SortField.SHORT, reverse) }); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[] { new SortField("int", SortField.INT, reverse) }); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[] { new SortField("long", SortField.LONG, reverse) }); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[] { new SortField("float", SortField.FLOAT, reverse) }); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[] { new SortField("double", SortField.DOUBLE, reverse) }); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[] { new SortField("string", SortField.STRING_VAL, reverse) }); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[] { new SortField("stringIdx", SortField.STRING, reverse) }); //sorts[sortCount++] = sort = new Sort(); //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD, reverse)}); //sorts[sortCount++] = sort = new Sort(); //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD_VAL, reverse)}); //sorts[sortCount++] = sort = new Sort(); //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD_VAL_DEM, reverse)}); //sorts[sortCount++] = sort = new Sort(); //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD_VAL_DEM2, reverse)}); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[] { new SortField(null, SortField.SCORE, reverse) }); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[] { new SortField(null, SortField.DOC, reverse) }); } Query[] queries = new Query[4]; queries[0] = new MatchAllDocsQuery(); queries[1] = new TermQuery(new Term("contents", "x")); // matches every 10th doc queries[2] = new TermQuery(new Term("contents", "y")); // matches every 100th doc queries[3] = new TermQuery(new Term("contents", "z")); // matches every 1000th doc for (int sx = 0; sx < 3; sx++) { IndexSearcher searcher; if (sx == 0) { searcher = searcherSingleSegment; } else if (sx == 1) { searcher = searcherFewSegment; } else { searcher = searcherMultiSegment; } for (int qx = 0; qx < queries.Length; qx++) { Query query = queries[qx]; for (int q = 0; q < 3; q++) { int queueSize; if (q == 0) { queueSize = 10; } else if (q == 1) { queueSize = 100; } else { queueSize = 1000; } for (int s = 0; s < sortCount; s++) { Sort sort1 = sorts[s]; for (int s2 = -1; s2 < sortCount; s2++) { Sort sort; if (s2 == -1) { // Single field sort sort = sort1; } else { sort = new Sort(new SortField[] { sort1.GetSort()[0], sorts[s2].GetSort()[0] }); } // Old Sort oldSort = GetOldSort(sort); if (VERBOSE) { System.Console.Out.WriteLine("query=" + query); if (sx == 0) { System.Console.Out.WriteLine(" single-segment index"); } else if (sx == 1) { System.Console.Out.WriteLine(" few-segment index"); } else { System.Console.Out.WriteLine(" many-segment index"); } System.Console.Out.WriteLine(" numHit=" + queueSize); System.Console.Out.WriteLine(" old=" + oldSort); System.Console.Out.WriteLine(" new=" + sort); } TopDocs newHits = searcher.Search(query, null, queueSize, sort); TopDocs oldHits = searcher.Search(query, null, queueSize, oldSort); Compare(oldHits, newHits); } } } } } // we explicitly test the old sort method and // compare with the new, so we expect to see SUBREADER // sanity checks fail. Insanity[] insanity = FieldCacheSanityChecker.CheckSanity(Lucene.Net.Search.FieldCache_Fields.DEFAULT); try { int ignored = 0; for (int i = 0; i < insanity.Length; i++) { if (insanity[i].GetType() == InsanityType.SUBREADER) { insanity[i] = new Insanity(InsanityType.EXPECTED, insanity[i].GetMsg(), insanity[i].GetCacheEntries()); ignored++; } } Assert.AreEqual(ignored, insanity.Length, "Not all insane field cache usage was expected"); insanity = null; } finally { // report this in the event of any exception/failure // if no failure, then insanity will be null if (null != insanity) { System.IO.StreamWriter temp_writer; temp_writer = new System.IO.StreamWriter(System.Console.OpenStandardError(), System.Console.Error.Encoding); temp_writer.AutoFlush = true; DumpArray(GetTestLabel() + ": Insane FieldCache usage(s)", insanity, temp_writer); } } // we've already checked FieldCache, purge so tearDown doesn't complain PurgeFieldCache(Lucene.Net.Search.FieldCache_Fields.DEFAULT); // so Close(); }