public virtual void TestGetScores() { Directory directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone); writer.Commit(); IndexReader ir = writer.Reader; writer.Dispose(); IndexSearcher searcher = NewSearcher(ir); Weight fake = (new TermQuery(new Term("fake", "weight"))).CreateWeight(searcher); Scorer s = new SimpleScorer(fake); ScoreCachingCollector scc = new ScoreCachingCollector(Scores.Length); scc.Scorer = s; // We need to iterate on the scorer so that its doc() advances. int doc; while ((doc = s.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { scc.Collect(doc); } for (int i = 0; i < Scores.Length; i++) { Assert.AreEqual(Scores[i], scc.Mscores[i], 0f); } ir.Dispose(); directory.Dispose(); }
public virtual void TestGetScores() { Directory directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, directory); writer.Commit(); IndexReader ir = writer.GetReader(); writer.Dispose(); IndexSearcher searcher = NewSearcher(ir); Weight fake = (new TermQuery(new Term("fake", "weight"))).CreateWeight(searcher); Scorer s = new SimpleScorer(fake); ScoreCachingCollector scc = new ScoreCachingCollector(Scores.Length); scc.SetScorer(s); // We need to iterate on the scorer so that its doc() advances. int doc; while ((doc = s.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { scc.Collect(doc); } for (int i = 0; i < Scores.Length; i++) { Assert.AreEqual(Scores[i], scc.Mscores[i], 0f); } ir.Dispose(); directory.Dispose(); }
public virtual void TestGetScores() { Directory directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), directory); writer.Commit(); IndexReader ir = writer.Reader; writer.Dispose(); IndexSearcher searcher = NewSearcher(ir); Weight fake = (new TermQuery(new Term("fake", "weight"))).CreateWeight(searcher); Scorer s = new SimpleScorer(fake); ScoreCachingCollector scc = new ScoreCachingCollector(Scores.Length); scc.Scorer = s; // We need to iterate on the scorer so that its doc() advances. int doc; while ((doc = s.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { scc.Collect(doc); } for (int i = 0; i < Scores.Length; i++) { Assert.AreEqual(Scores[i], scc.Mscores[i], 0f); } ir.Dispose(); directory.Dispose(); }
public virtual void TestNegativeScores() { // The Top*Collectors previously filtered out documents with <= scores. this // behavior has changed. this test checks that if PositiveOnlyScoresFilter // wraps one of these collectors, documents with <= 0 scores are indeed // filtered. int numPositiveScores = 0; for (int i = 0; i < Scores.Length; i++) { if (Scores[i] > 0) { ++numPositiveScores; } } Directory directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, directory); writer.Commit(); IndexReader ir = writer.GetReader(); writer.Dispose(); IndexSearcher searcher = NewSearcher(ir); Weight fake = (new TermQuery(new Term("fake", "weight"))).CreateWeight(searcher); Scorer s = new SimpleScorer(fake); TopDocsCollector <ScoreDoc> tdc = TopScoreDocCollector.Create(Scores.Length, true); ICollector c = new PositiveScoresOnlyCollector(tdc); c.SetScorer(s); while (s.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { c.Collect(0); } TopDocs td = tdc.GetTopDocs(); ScoreDoc[] sd = td.ScoreDocs; Assert.AreEqual(numPositiveScores, td.TotalHits); for (int i = 0; i < sd.Length; i++) { Assert.IsTrue(sd[i].Score > 0, "only positive scores should return: " + sd[i].Score); } ir.Dispose(); directory.Dispose(); }
public override void SetUp() { base.SetUp(); Dir = NewDirectory(); Writer = new RandomIndexWriter(Random(), Dir, Similarity, TimeZone); int numDocs = AtLeast(100); for (int i = 0; i < numDocs; i++) { Writer.AddDocument(new Document()); if (Rarely()) { Writer.Commit(); } } }
public virtual void TestBasic() { AssumeTrue("Test requires SortedSetDV support", DefaultCodecSupportsSortedSet()); Directory dir = NewDirectory(); FacetsConfig config = new FacetsConfig(); config.SetMultiValued("a", true); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir); Document doc = new Document(); doc.Add(new SortedSetDocValuesFacetField("a", "foo")); doc.Add(new SortedSetDocValuesFacetField("a", "bar")); doc.Add(new SortedSetDocValuesFacetField("a", "zoo")); doc.Add(new SortedSetDocValuesFacetField("b", "baz")); writer.AddDocument(config.Build(doc)); if (Random().NextBoolean()) { writer.Commit(); } doc = new Document(); doc.Add(new SortedSetDocValuesFacetField("a", "foo")); writer.AddDocument(config.Build(doc)); // NRT open IndexSearcher searcher = NewSearcher(writer.Reader); // Per-top-reader state: SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(searcher.IndexReader); FacetsCollector c = new FacetsCollector(); searcher.Search(new MatchAllDocsQuery(), c); SortedSetDocValuesFacetCounts facets = new SortedSetDocValuesFacetCounts(state, c); Assert.AreEqual("dim=a path=[] value=4 childCount=3\n foo (2)\n bar (1)\n zoo (1)\n", facets.GetTopChildren(10, "a").ToString()); Assert.AreEqual("dim=b path=[] value=1 childCount=1\n baz (1)\n", facets.GetTopChildren(10, "b").ToString()); // DrillDown: DrillDownQuery q = new DrillDownQuery(config); q.Add("a", "foo"); q.Add("b", "baz"); TopDocs hits = searcher.Search(q, 1); Assert.AreEqual(1, hits.TotalHits); IOUtils.Close(writer, searcher.IndexReader, dir); }
public override void SetUp() { base.SetUp(); Dir = NewDirectory(); Writer = new RandomIndexWriter(Random, Dir, Similarity, TimeZone); int numDocs = AtLeast(100); for (int i = 0; i < numDocs; i++) { Writer.AddDocument(new Document()); if (Rarely()) { Writer.Commit(); } } }
public virtual void TestDeletePartiallyWrittenFilesIfAbort() { Directory dir = NewDirectory(); IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30)); iwConf.SetCodec(CompressingCodec.RandomInstance(Random())); // disable CFS because this test checks file names iwConf.SetMergePolicy(NewLogMergePolicy(false)); iwConf.SetUseCompoundFile(false); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf); Document validDoc = new Document(); validDoc.Add(new IntField("id", 0, Field.Store.YES)); iw.AddDocument(validDoc); iw.Commit(); // make sure that #writeField will fail to trigger an abort Document invalidDoc = new Document(); FieldType fieldType = new FieldType(); fieldType.Stored = true; invalidDoc.Add(new FieldAnonymousInnerClassHelper(this, fieldType)); try { iw.AddDocument(invalidDoc); iw.Commit(); } finally { int counter = 0; foreach (string fileName in dir.ListAll()) { if (fileName.EndsWith(".fdt") || fileName.EndsWith(".fdx")) { counter++; } } // Only one .fdt and one .fdx files must have been found Assert.AreEqual(2, counter); iw.Dispose(); dir.Dispose(); } }
public override void SetUp() { base.SetUp(); Dir = NewDirectory(); Writer = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, Dir); int numDocs = AtLeast(100); for (int i = 0; i < numDocs; i++) { Writer.AddDocument(new Document()); if (Rarely()) { Writer.Commit(); } } }
public virtual void TestNegativeScores() { // The Top*Collectors previously filtered out documents with <= scores. this // behavior has changed. this test checks that if PositiveOnlyScoresFilter // wraps one of these collectors, documents with <= 0 scores are indeed // filtered. int numPositiveScores = 0; for (int i = 0; i < Scores.Length; i++) { if (Scores[i] > 0) { ++numPositiveScores; } } Directory directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), directory); writer.Commit(); IndexReader ir = writer.Reader; writer.Dispose(); IndexSearcher searcher = NewSearcher(ir); Weight fake = (new TermQuery(new Term("fake", "weight"))).CreateWeight(searcher); Scorer s = new SimpleScorer(fake); TopDocsCollector<ScoreDoc> tdc = TopScoreDocCollector.Create(Scores.Length, true); Collector c = new PositiveScoresOnlyCollector(tdc); c.Scorer = s; while (s.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { c.Collect(0); } TopDocs td = tdc.TopDocs(); ScoreDoc[] sd = td.ScoreDocs; Assert.AreEqual(numPositiveScores, td.TotalHits); for (int i = 0; i < sd.Length; i++) { Assert.IsTrue(sd[i].Score > 0, "only positive scores should return: " + sd[i].Score); } ir.Dispose(); directory.Dispose(); }
private int[] BuildIndex(RandomIndexWriter writer, int docs) { int[] docStates = new int[docs]; for (int i = 0; i < docs; i++) { Document doc = new Document(); if (Random().NextBoolean()) { docStates[i] = 1; doc.Add(NewTextField("some", "value", Field.Store.YES)); } doc.Add(NewTextField("all", "test", Field.Store.NO)); doc.Add(NewTextField("id", "" + i, Field.Store.YES)); writer.AddDocument(doc); } writer.Commit(); int numDeletes = Random().Next(docs); for (int i = 0; i < numDeletes; i++) { int docID = Random().Next(docs); writer.DeleteDocuments(new Term("id", "" + docID)); docStates[docID] = 2; } writer.Dispose(); return docStates; }
public virtual void TestSparseFacets() { Store.Directory dir = NewDirectory(); Store.Directory taxoDir = NewDirectory(); // Writes facet ords to a separate directory from the // main index: var taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode_e.CREATE); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir); FacetsConfig config = new FacetsConfig(); Document doc = new Document(); doc.Add(new FacetField("a", "foo1")); writer.AddDocument(config.Build(taxoWriter, doc)); if (Random().NextBoolean()) { writer.Commit(); } doc = new Document(); doc.Add(new FacetField("a", "foo2")); doc.Add(new FacetField("b", "bar1")); writer.AddDocument(config.Build(taxoWriter, doc)); if (Random().NextBoolean()) { writer.Commit(); } doc = new Document(); doc.Add(new FacetField("a", "foo3")); doc.Add(new FacetField("b", "bar2")); doc.Add(new FacetField("c", "baz1")); writer.AddDocument(config.Build(taxoWriter, doc)); // NRT open IndexSearcher searcher = NewSearcher(writer.Reader); // NRT open var taxoReader = new DirectoryTaxonomyReader(taxoWriter); FacetsCollector c = new FacetsCollector(); searcher.Search(new MatchAllDocsQuery(), c); Facets facets = GetTaxonomyFacetCounts(taxoReader, new FacetsConfig(), c); // Ask for top 10 labels for any dims that have counts: IList<FacetResult> results = facets.GetAllDims(10); Assert.AreEqual(3, results.Count); Assert.AreEqual("dim=a path=[] value=3 childCount=3\n foo1 (1)\n foo2 (1)\n foo3 (1)\n", results[0].ToString()); Assert.AreEqual("dim=b path=[] value=2 childCount=2\n bar1 (1)\n bar2 (1)\n", results[1].ToString()); Assert.AreEqual("dim=c path=[] value=1 childCount=1\n baz1 (1)\n", results[2].ToString()); IOUtils.Close(writer, taxoWriter, searcher.IndexReader, taxoReader, taxoDir, dir); }
public virtual void TestSparseFacets() { AssumeTrue("Test requires SortedSetDV support", DefaultCodecSupportsSortedSet()); Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir); FacetsConfig config = new FacetsConfig(); Document doc = new Document(); doc.Add(new SortedSetDocValuesFacetField("a", "foo1")); writer.AddDocument(config.Build(doc)); if (Random().NextBoolean()) { writer.Commit(); } doc = new Document(); doc.Add(new SortedSetDocValuesFacetField("a", "foo2")); doc.Add(new SortedSetDocValuesFacetField("b", "bar1")); writer.AddDocument(config.Build(doc)); if (Random().NextBoolean()) { writer.Commit(); } doc = new Document(); doc.Add(new SortedSetDocValuesFacetField("a", "foo3")); doc.Add(new SortedSetDocValuesFacetField("b", "bar2")); doc.Add(new SortedSetDocValuesFacetField("c", "baz1")); writer.AddDocument(config.Build(doc)); // NRT open IndexSearcher searcher = NewSearcher(writer.Reader); writer.Dispose(); // Per-top-reader state: SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(searcher.IndexReader); FacetsCollector c = new FacetsCollector(); searcher.Search(new MatchAllDocsQuery(), c); SortedSetDocValuesFacetCounts facets = new SortedSetDocValuesFacetCounts(state, c); // Ask for top 10 labels for any dims that have counts: IList<FacetResult> results = facets.GetAllDims(10); Assert.AreEqual(3, results.Count); Assert.AreEqual("dim=a path=[] value=3 childCount=3\n foo1 (1)\n foo2 (1)\n foo3 (1)\n", results[0].ToString()); Assert.AreEqual("dim=b path=[] value=2 childCount=2\n bar1 (1)\n bar2 (1)\n", results[1].ToString()); Assert.AreEqual("dim=c path=[] value=1 childCount=1\n baz1 (1)\n", results[2].ToString()); searcher.IndexReader.Dispose(); dir.Dispose(); }
public virtual void TestReuseDocsEnumSameBitsOrNull() { Directory dir = NewDirectory(); Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat()); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(cp)); int numdocs = AtLeast(20); CreateRandomIndex(numdocs, writer, Random()); writer.Commit(); DirectoryReader open = DirectoryReader.Open(dir); foreach (AtomicReaderContext ctx in open.Leaves) { Terms terms = ((AtomicReader)ctx.Reader).Terms("body"); TermsEnum iterator = terms.Iterator(null); IdentityHashMap<DocsEnum, bool?> enums = new IdentityHashMap<DocsEnum, bool?>(); MatchNoBits bits = new MatchNoBits(open.MaxDoc); DocsEnum docs = null; while ((iterator.Next()) != null) { docs = iterator.Docs(bits, docs, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE); enums[docs] = true; } Assert.AreEqual(1, enums.Count); enums.Clear(); iterator = terms.Iterator(null); docs = null; while ((iterator.Next()) != null) { docs = iterator.Docs(new MatchNoBits(open.MaxDoc), docs, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE); enums[docs] = true; } Assert.AreEqual(terms.Size(), enums.Count); enums.Clear(); iterator = terms.Iterator(null); docs = null; while ((iterator.Next()) != null) { docs = iterator.Docs(null, docs, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE); enums[docs] = true; } Assert.AreEqual(1, enums.Count); } IOUtils.Close(writer, open, dir); }
public virtual void TestReuseDocsEnumDifferentReader() { Directory dir = NewDirectory(); Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat()); MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetCodec(cp)); int numdocs = AtLeast(20); CreateRandomIndex(numdocs, writer, Random()); writer.Commit(); DirectoryReader firstReader = DirectoryReader.Open(dir); DirectoryReader secondReader = DirectoryReader.Open(dir); IList<AtomicReaderContext> leaves = firstReader.Leaves; IList<AtomicReaderContext> leaves2 = secondReader.Leaves; foreach (AtomicReaderContext ctx in leaves) { Terms terms = ((AtomicReader)ctx.Reader).Terms("body"); TermsEnum iterator = terms.Iterator(null); IdentityHashMap<DocsEnum, bool?> enums = new IdentityHashMap<DocsEnum, bool?>(); MatchNoBits bits = new MatchNoBits(firstReader.MaxDoc); iterator = terms.Iterator(null); DocsEnum docs = null; BytesRef term = null; while ((term = iterator.Next()) != null) { docs = iterator.Docs(null, RandomDocsEnum("body", term, leaves2, bits), Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE); enums[docs] = true; } Assert.AreEqual(terms.Size(), enums.Count); iterator = terms.Iterator(null); enums.Clear(); docs = null; while ((term = iterator.Next()) != null) { docs = iterator.Docs(bits, RandomDocsEnum("body", term, leaves2, bits), Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE); enums[docs] = true; } Assert.AreEqual(terms.Size(), enums.Count); } IOUtils.Close(writer, firstReader, secondReader, dir); }
public override void SetUp() { base.SetUp(); AllSortFields = new List<SortField>(Arrays.AsList(new SortField[] { new SortField("byte", SortField.Type_e.BYTE, false), new SortField("short", SortField.Type_e.SHORT, false), new SortField("int", SortField.Type_e.INT, false), new SortField("long", SortField.Type_e.LONG, false), new SortField("float", SortField.Type_e.FLOAT, false), new SortField("double", SortField.Type_e.DOUBLE, false), new SortField("bytes", SortField.Type_e.STRING, false), new SortField("bytesval", SortField.Type_e.STRING_VAL, false), new SortField("byte", SortField.Type_e.BYTE, true), new SortField("short", SortField.Type_e.SHORT, true), new SortField("int", SortField.Type_e.INT, true), new SortField("long", SortField.Type_e.LONG, true), new SortField("float", SortField.Type_e.FLOAT, true), new SortField("double", SortField.Type_e.DOUBLE, true), new SortField("bytes", SortField.Type_e.STRING, true), new SortField("bytesval", SortField.Type_e.STRING_VAL, true), SortField.FIELD_SCORE, SortField.FIELD_DOC })); if (SupportsDocValues) { AllSortFields.AddRange(Arrays.AsList(new SortField[] { new SortField("intdocvalues", SortField.Type_e.INT, false), new SortField("floatdocvalues", SortField.Type_e.FLOAT, false), new SortField("sortedbytesdocvalues", SortField.Type_e.STRING, false), new SortField("sortedbytesdocvaluesval", SortField.Type_e.STRING_VAL, false), new SortField("straightbytesdocvalues", SortField.Type_e.STRING_VAL, false), new SortField("intdocvalues", SortField.Type_e.INT, true), new SortField("floatdocvalues", SortField.Type_e.FLOAT, true), new SortField("sortedbytesdocvalues", SortField.Type_e.STRING, true), new SortField("sortedbytesdocvaluesval", SortField.Type_e.STRING_VAL, true), new SortField("straightbytesdocvalues", SortField.Type_e.STRING_VAL, true) })); } // Also test missing first / last for the "string" sorts: foreach (string field in new string[] { "bytes", "sortedbytesdocvalues" }) { for (int rev = 0; rev < 2; rev++) { bool reversed = rev == 0; SortField sf = new SortField(field, SortField.Type_e.STRING, reversed); sf.MissingValue = SortField.STRING_FIRST; AllSortFields.Add(sf); sf = new SortField(field, SortField.Type_e.STRING, reversed); sf.MissingValue = SortField.STRING_LAST; AllSortFields.Add(sf); } } int limit = AllSortFields.Count; for (int i = 0; i < limit; i++) { SortField sf = AllSortFields[i]; if (sf.Type == SortField.Type_e.INT) { SortField sf2 = new SortField(sf.Field, SortField.Type_e.INT, sf.Reverse); sf2.MissingValue = Random().Next(); AllSortFields.Add(sf2); } else if (sf.Type == SortField.Type_e.LONG) { SortField sf2 = new SortField(sf.Field, SortField.Type_e.LONG, sf.Reverse); sf2.MissingValue = Random().NextLong(); AllSortFields.Add(sf2); } else if (sf.Type == SortField.Type_e.FLOAT) { SortField sf2 = new SortField(sf.Field, SortField.Type_e.FLOAT, sf.Reverse); sf2.MissingValue = (float)Random().NextDouble(); AllSortFields.Add(sf2); } else if (sf.Type == SortField.Type_e.DOUBLE) { SortField sf2 = new SortField(sf.Field, SortField.Type_e.DOUBLE, sf.Reverse); sf2.MissingValue = Random().NextDouble(); AllSortFields.Add(sf2); } } Dir = NewDirectory(); RandomIndexWriter iw = new RandomIndexWriter(Random(), Dir); int numDocs = AtLeast(200); for (int i = 0; i < numDocs; i++) { IList<Field> fields = new List<Field>(); fields.Add(NewTextField("english", English.IntToEnglish(i), Field.Store.NO)); fields.Add(NewTextField("oddeven", (i % 2 == 0) ? "even" : "odd", Field.Store.NO)); fields.Add(NewStringField("byte", "" + ((sbyte)Random().Next()), Field.Store.NO)); fields.Add(NewStringField("short", "" + ((short)Random().Next()), Field.Store.NO)); fields.Add(new IntField("int", Random().Next(), Field.Store.NO)); fields.Add(new LongField("long", Random().NextLong(), Field.Store.NO)); fields.Add(new FloatField("float", (float)Random().NextDouble(), Field.Store.NO)); fields.Add(new DoubleField("double", Random().NextDouble(), Field.Store.NO)); fields.Add(NewStringField("bytes", TestUtil.RandomRealisticUnicodeString(Random()), Field.Store.NO)); fields.Add(NewStringField("bytesval", TestUtil.RandomRealisticUnicodeString(Random()), Field.Store.NO)); fields.Add(new DoubleField("double", Random().NextDouble(), Field.Store.NO)); if (SupportsDocValues) { fields.Add(new NumericDocValuesField("intdocvalues", Random().Next())); fields.Add(new FloatDocValuesField("floatdocvalues", (float)Random().NextDouble())); fields.Add(new SortedDocValuesField("sortedbytesdocvalues", new BytesRef(TestUtil.RandomRealisticUnicodeString(Random())))); fields.Add(new SortedDocValuesField("sortedbytesdocvaluesval", new BytesRef(TestUtil.RandomRealisticUnicodeString(Random())))); fields.Add(new BinaryDocValuesField("straightbytesdocvalues", new BytesRef(TestUtil.RandomRealisticUnicodeString(Random())))); } Document document = new Document(); document.Add(new StoredField("id", "" + i)); if (VERBOSE) { Console.WriteLine(" add doc id=" + i); } foreach (Field field in fields) { // So we are sometimes missing that field: if (Random().Next(5) != 4) { document.Add(field); if (VERBOSE) { Console.WriteLine(" " + field); } } } iw.AddDocument(document); if (Random().Next(50) == 17) { iw.Commit(); } } Reader = iw.Reader; iw.Dispose(); Searcher = NewSearcher(Reader); if (VERBOSE) { Console.WriteLine(" searcher=" + Searcher); } }
public virtual void TestEmptyBucketWithMoreDocs() { // this test checks the logic of nextDoc() when all sub scorers have docs // beyond the first bucket (for example). Currently, the code relies on the // 'more' variable to work properly, and this test ensures that if the logic // changes, we have a test to back it up. Directory directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), directory); writer.Commit(); IndexReader ir = writer.Reader; writer.Dispose(); IndexSearcher searcher = NewSearcher(ir); BooleanWeight weight = (BooleanWeight)(new BooleanQuery()).CreateWeight(searcher); BulkScorer[] scorers = new BulkScorer[] { new BulkScorerAnonymousInnerClassHelper() }; BooleanScorer bs = new BooleanScorer(weight, false, 1, Arrays.AsList(scorers), new List<BulkScorer>(), scorers.Length); IList<int> hits = new List<int>(); bs.Score(new CollectorAnonymousInnerClassHelper(this, hits)); Assert.AreEqual(1, hits.Count, "should have only 1 hit"); Assert.AreEqual(3000, (int)hits[0], "hit should have been docID=3000"); ir.Dispose(); directory.Dispose(); }
public virtual void TestEvilSearcherFactory() { Directory dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random(), dir); w.Commit(); IndexReader other = DirectoryReader.Open(dir); SearcherFactory theEvilOne = new SearcherFactoryAnonymousInnerClassHelper2(this, other); try { new SearcherManager(w.w, false, theEvilOne); Assert.Fail("didn't hit expected exception"); } catch (InvalidOperationException ise) { // expected } w.Dispose(); other.Dispose(); dir.Dispose(); }
public virtual void TestEvilSearcherFactory() { Random random = Random(); Directory dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(random, dir); w.Commit(); IndexReader other = DirectoryReader.Open(dir); SearcherFactory theEvilOne = new SearcherFactoryAnonymousInnerClassHelper3(this, other); try { new SearcherManager(dir, theEvilOne); } catch (InvalidOperationException ise) { // expected } try { new SearcherManager(w.w, random.NextBoolean(), theEvilOne); } catch (InvalidOperationException ise) { // expected } w.Dispose(); other.Dispose(); dir.Dispose(); }
public virtual void TestSomeSegmentsMissing() { AssumeTrue("Test requires SortedSetDV support", DefaultCodecSupportsSortedSet()); Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir); FacetsConfig config = new FacetsConfig(); Document doc = new Document(); doc.Add(new SortedSetDocValuesFacetField("a", "foo1")); writer.AddDocument(config.Build(doc)); writer.Commit(); doc = new Document(); writer.AddDocument(config.Build(doc)); writer.Commit(); doc = new Document(); doc.Add(new SortedSetDocValuesFacetField("a", "foo2")); writer.AddDocument(config.Build(doc)); writer.Commit(); // NRT open IndexSearcher searcher = NewSearcher(writer.Reader); writer.Dispose(); // Per-top-reader state: SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(searcher.IndexReader); FacetsCollector c = new FacetsCollector(); searcher.Search(new MatchAllDocsQuery(), c); SortedSetDocValuesFacetCounts facets = new SortedSetDocValuesFacetCounts(state, c); // Ask for top 10 labels for any dims that have counts: Assert.AreEqual("dim=a path=[] value=2 childCount=2\n foo1 (1)\n foo2 (1)\n", facets.GetTopChildren(10, "a").ToString()); searcher.IndexReader.Dispose(); dir.Dispose(); }