private static void Search(Query query) { // try to search the published index Lucene.Net.Search.Searchable[] searchables = new Lucene.Net.Search.Searchable[] { Remote }; Searcher searcher = new MultiSearcher(searchables); Hits result = searcher.Search(query); Assert.AreEqual(1, result.Length()); Assert.AreEqual("test text", result.Doc(0).Get("test")); }
private void DoTestSearchHitsDeleteEvery(int k, bool deleteInFront) { bool intermittent = k < 0; Log("Test search hits with " + (intermittent?"intermittent deletions.":"deletions of every " + k + " hit.")); IndexSearcher searcher = new IndexSearcher(directory); IndexReader reader = searcher.GetIndexReader(); Query q = new TermQuery(new Term(TEXT_FIELD, "text")); // matching all docs Hits hits = searcher.Search(q); Log("Got " + hits.Length() + " results"); Assert.AreEqual(N, hits.Length(), "must match all " + N + " docs, not only " + hits.Length() + " docs!"); if (deleteInFront) { Log("deleting hits that was not yet retrieved!"); reader.DeleteDocument(reader.MaxDoc() - 1); reader.DeleteDocument(reader.MaxDoc() - 2); reader.DeleteDocument(reader.MaxDoc() - 3); } try { for (int i = 0; i < hits.Length(); i++) { int id = hits.Id(i); Assert.AreEqual(i, hits.Id(i), "Hit " + i + " has doc id " + hits.Id(i) + " instead of " + i); if ((intermittent && (i == 50 || i == 250 || i == 950)) || (!intermittent && (k < 2 || (i > 0 && i % k == 0)))) { Document doc = hits.Doc(id); Log("Deleting hit " + i + " - doc " + doc + " with id " + id); reader.DeleteDocument(id); } if (intermittent) { // check internal behavior of Hits (go 50 ahead of getMoreDocs points because the deletions cause to use more of the available hits) if (i == 150 || i == 450 || i == 1650) { Assert.IsTrue(hits.debugCheckedForDeletions, "Hit " + i + ": hits should have checked for deletions in last call to getMoreDocs()"); } else if (i == 50 || i == 250 || i == 850) { Assert.IsFalse(hits.debugCheckedForDeletions, "Hit " + i + ": hits should have NOT checked for deletions in last call to getMoreDocs()"); } } } } catch (System.Exception e) { // this is the only valid exception, and only when deletng in front. Assert.IsTrue(deleteInFront, e.Message + " not expected unless deleting hits that were not yet seen!"); } searcher.Close(); }
public void TestDefaultFilter() { DuplicateFilter df = new DuplicateFilter(KEY_FIELD); Hashtable results = new Hashtable(); Hits h = searcher.Search(tq, df); for (int i = 0; i < h.Length(); i++) { Document d = h.Doc(i); String url = d.Get(KEY_FIELD); Assert.IsFalse(results.Contains(url), "No duplicate urls should be returned"); results.Add(url,url); } }
private System.Collections.Hashtable GetScores(Hits hits) { System.Collections.Hashtable scoreMap = new System.Collections.Hashtable(); int n = hits.Length(); for (int i = 0; i < n; ++i) { Document doc = hits.Doc(i); System.String[] v = doc.GetValues("tracer"); Assert.AreEqual(v.Length, 1); scoreMap[v[0]] = (float)hits.Score(i); } return(scoreMap); }
public EntityInfo Extract(Hits hits, int index) { Document doc = hits.Doc(index); //TODO if we are lonly looking for score (unlikely), avoid accessing doc (lazy load) EntityInfo entityInfo = Extract(doc); object[] eip = entityInfo.Projection; if (eip != null && eip.Length > 0) { for (int x = 0; x < projection.Length; x++) { switch (projection[x]) { case ProjectionConstants.SCORE: eip[x] = hits.Score(index); break; case ProjectionConstants.ID: eip[x] = entityInfo.Id; break; case ProjectionConstants.DOCUMENT: eip[x] = doc; break; case ProjectionConstants.DOCUMENT_ID: eip[x] = hits.Id(index); break; case ProjectionConstants.BOOST: eip[x] = doc.GetBoost(); break; case ProjectionConstants.THIS: //THIS could be projected more than once //THIS loading delayed to the Loader phase if (entityInfo.IndexesOfThis == null) { entityInfo.IndexesOfThis = new List<int>(1); } entityInfo.IndexesOfThis.Add(x); break; } } } return entityInfo; }
public void TestFastFilter() { DuplicateFilter df = new DuplicateFilter(KEY_FIELD); df.SetProcessingMode(DuplicateFilter.PM_FAST_INVALIDATION); Hashtable results = new Hashtable(); Hits h = searcher.Search(tq, df); Assert.IsTrue(h.Length() > 0, "Filtered searching should have found some matches"); for (int i = 0; i < h.Length(); i++) { Document d = h.Doc(i); String url = d.Get(KEY_FIELD); Assert.IsFalse(results.Contains(url), "No duplicate urls should be returned"); results.Add(url,url); } Assert.AreEqual(2, results.Count, "Two urls found"); }
public void TestNoFilter() { Hashtable results = new Hashtable(); Hits h = searcher.Search(tq); Assert.IsTrue(h.Length() > 0, "Default searching should have found some matches"); bool dupsFound = false; for (int i = 0; i < h.Length(); i++) { Document d = h.Doc(i); String url = d.Get(KEY_FIELD); if (!dupsFound) dupsFound = results.Contains(url); results[url]=url; } Assert.IsTrue(dupsFound, "Default searching should have found duplicate urls"); }
public void TestKeepsFirstFilter() { DuplicateFilter df = new DuplicateFilter(KEY_FIELD); df.SetKeepMode(DuplicateFilter.KM_USE_FIRST_OCCURRENCE); Hits h = searcher.Search(tq, df); Assert.IsTrue(h.Length() > 0, "Filtered searching should have found some matches"); for (int i = 0; i < h.Length(); i++) { Document d = h.Doc(i); String url = d.Get(KEY_FIELD); TermDocs td = reader.TermDocs(new Term(KEY_FIELD, url)); int lastDoc = 0; td.Next(); lastDoc = td.Doc(); Assert.AreEqual(lastDoc, h.Id((i)), "Duplicate urls should return first doc"); } }
// make sure the documents returned by the search match the expected list private void AssertMatches(Searcher searcher, Query query, Sort sort, System.String expectedResult) { Hits result = searcher.Search(query, sort); System.Text.StringBuilder buff = new System.Text.StringBuilder(10); int n = result.Length(); for (int i = 0; i < n; ++i) { Document doc = result.Doc(i); System.String[] v = doc.GetValues("tracer"); for (int j = 0; j < v.Length; ++j) { buff.Append(v[j]); } } Assert.AreEqual(expectedResult, buff.ToString()); }
// make sure the documents returned by the search match the expected list pattern private void AssertMatchesPattern(Searcher searcher, Query query, Sort sort, System.String pattern) { Hits result = searcher.Search(query, sort); System.Text.StringBuilder buff = new System.Text.StringBuilder(10); int n = result.Length(); for (int i = 0; i < n; ++i) { Document doc = result.Doc(i); System.String[] v = doc.GetValues("tracer"); for (int j = 0; j < v.Length; ++j) { buff.Append(v[j]); } } // System.out.println ("matching \""+buff+"\" against pattern \""+pattern+"\""); System.Text.RegularExpressions.Regex regex = new System.Text.RegularExpressions.Regex(pattern); Assert.IsTrue(regex.IsMatch(buff.ToString())); }
internal static Hit FromRaw(Hits raw, int index) { var doc = raw.Doc(index); var fields = new FieldCollection(); foreach (var f in doc.GetFields()) { var rf = (Lucene.Net.Documents.Field)f; fields.Add(new Field { Name = rf.Name(), Values = { rf.StringValue() } }); } return new Hit { Score = raw.Score(index), SchemaName = doc.GetValues(IndexHelper.SchemaNameField).Join(""), SchemaVersion = doc.GetValues(IndexHelper.SchemaVersionField).Join(""), Fields = fields }; }
public Hashtable GetStoredUriStrings(string server, string file) { Hashtable uris = new Hashtable(); Term term = new Term(PropertyToFieldName(PropertyType.Keyword, "fixme:file"), file); LNS.QueryFilter filter = new LNS.QueryFilter(new LNS.TermQuery(term)); term = new Term(PropertyToFieldName(PropertyType.Keyword, "fixme:account"), server); LNS.TermQuery query = new LNS.TermQuery(term); LNS.IndexSearcher searcher = LuceneCommon.GetSearcher(PrimaryStore); LNS.Hits hits = searcher.Search(query, filter); for (int i = 0; i < hits.Length(); i++) { StoredInfo info = DocumentToStoredInfo(hits.Doc(i)); uris.Add(info.Uri.ToString(), info.FullyIndexed); } LuceneCommon.ReleaseSearcher(searcher); return(uris); }
protected internal virtual void PrintHits(System.String test, Hits h) { System.Console.Error.WriteLine("------- " + test + " -------"); for (int i = 0; i < h.Length(); i++) { Lucene.Net.Documents.Document d = h.Doc(i); float score = h.Score(i); System.Console.Error.WriteLine("#" + i + ": {0.000000000}" + score + " - " + d.Get("id")); } }
private SearchResult[] _prepareSearchResultsFromHits(string query, Hits hits) { List<SearchResult> results = new List<SearchResult>(); for (int i = 0; i < hits.Length(); i++) { SearchResult sr = new SearchResult(hits.Doc(i), _analyzer, query, _fragmentSize); sr.QueryParser = _parser; results.Add(sr); } return results.ToArray(); }
public static void Main(String[] a) { String indexName = "localhost_index"; String fn = "c:/Program Files/Apache Group/Apache/htdocs/manual/vhosts/index.html.en"; Uri url = null; for (int i = 0; i < a.Length; i++) { if (a[i].Equals("-i")) { indexName = a[++i]; } else if (a[i].Equals("-f")) { fn = a[++i]; } else if (a[i].Equals("-url")) { url = new Uri(a[++i]); } } StreamWriter temp_writer; temp_writer = new StreamWriter(Console.OpenStandardOutput(), Console.Out.Encoding); temp_writer.AutoFlush = true; StreamWriter o = temp_writer; IndexReader r = IndexReader.Open(indexName); o.WriteLine("Open index " + indexName + " which has " + r.NumDocs() + " docs"); LuceneMoreLikeThis mlt = new LuceneMoreLikeThis(r); o.WriteLine("Query generation parameters:"); o.WriteLine(mlt.DescribeParams()); o.WriteLine(); Query query = null; if (url != null) { o.WriteLine("Parsing URL: " + url); query = mlt.Like(url); } else if (fn != null) { o.WriteLine("Parsing file: " + fn); query = mlt.Like(new FileInfo(fn)); } o.WriteLine("q: " + query); o.WriteLine(); Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(indexName); Lucene.Net.Search.Hits hits = searcher.Search(query); int len = hits.Length(); o.WriteLine("found: " + len + " documents matching"); o.WriteLine(); for (int i = 0; i < Math.Min(25, len); i++) { Lucene.Net.Documents.Document d = hits.Doc(i); String summary = d.Get("summary"); o.WriteLine("score : " + hits.Score(i)); o.WriteLine("url : " + d.Get("url")); o.WriteLine("\ttitle : " + d.Get("title")); if (summary != null) { o.WriteLine("\tsummary: " + d.Get("summary")); } o.WriteLine(); } }
private void btnSearch_Click(object sender, EventArgs e) { lstResults.Items.Clear(); searcher = new IndexSearcher(new RAMDirectory(_indexTarget)); PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new StandardAnalyzer()); analyzer.AddAnalyzer("ayat_arabic", new DiacriticAnalyzer(FilterData.stopWords)); //MyQueryParser parser = new MyQueryParser(new string[] { "ayat_desc", "ayat_urdu", "ayat_arabic" }, analyzer); //parser.SetDefaultOperator(QueryParser.Operator.AND); //Query q = parser.Parse(txtSearch.Text); //Query q = new TermQuery(new Term("ayatno", NumberTools.LongToString(long.Parse(txtSearch.Text)))); BooleanQuery q = new BooleanQuery(); long l1 = 1; long l2 = 500; long l3 = 1; long l4 = 1; //RangeQuery rq = new RangeQuery(new Term("ayatno", l1.ToString("00000")), new Term("ayatno", l2.ToString("00000")), true); //q.Add(rq, true, false); q.Add(new TermQuery(new Term("sid", l3.ToString("00000"))), true, false); q.Add(new TermQuery(new Term("ayatno", l4.ToString("00000"))), true, false); MessageBox.Show(q.ToString()); Sort sort = new Sort(new string[] { "pid", "sid", "ayatno" }); hits = searcher.Search(q, sort); lblHits.Text = hits.Length() + " hit(s)."; Application.DoEvents(); for (int i = 0; i < hits.Length(); i++) { StringBuilder sb = new StringBuilder(); sb.Append("Para: ").Append(hits.Doc(i).Get("pid")); sb.Append(", Surat: ").Append(hits.Doc(i).Get("sid")); sb.Append(", Verse: ").Append(hits.Doc(i).Get("ayatno")); lstResults.Items.Add(sb.ToString()); } }
private static void PrintHits(Hits hits) { System.Console.Out.WriteLine(hits.Length() + " total results\n"); for (int i = 0; i < hits.Length(); i++) { if (i < 10 || (i > 94 && i < 105)) { Lucene.Net.Documents.Document d = hits.Doc(i); System.Console.Out.WriteLine(i + " " + d.Get(ID_FIELD)); } } }
private void button2_Click(object sender, EventArgs e) { if (_indexTarget == string.Empty) return; if (textBox4.Text == string.Empty) { return; } listBox4.Items.Clear(); if (IndexReader.IndexExists(_indexTarget)) { searcher = new IndexSearcher(_indexTarget); Query q = QueryParser.Parse(textBox4.Text, "contents", analyzer); hits = searcher.Search(q); if (hits.Length() == 0) { label8.Text = "No Hits :P"; return; } label8.Text = hits.Length() + " hit(s)."; for (int i = 0; i < hits.Length(); i++) { Document d = hits.Doc(i); listBox4.Items.Add(d.Get("filename")); } } }
public void AddResult(SearchInfo info, Hits hits, int maxMatches) { if ( docs == null) docs = new Dictionary<SearchInfo, List<ExDocument>>(); if (hits == null) return; if (info == null) return; List<ExDocument> exdl = new List<ExDocument>(); for (int i = 0; i < maxMatches && i < hits.Length(); i++) { exdl.Add(new ExDocument(hits.Doc(i),hits.Score(i))); } if ( exdl.Count > 0) docs.Add(info, exdl); }
private System.Collections.Hashtable GetScores(Hits hits) { System.Collections.Hashtable scoreMap = new System.Collections.Hashtable(); int n = hits.Length(); for (int i = 0; i < n; ++i) { Document doc = hits.Doc(i); System.String[] v = doc.GetValues("tracer"); Assert.AreEqual(v.Length, 1); scoreMap[v[0]] = (float) hits.Score(i); } return scoreMap; }
private void button2_Click(object sender, EventArgs e) { listBox1.Items.Clear(); searcher = new IndexSearcher(new RAMDirectory(indexpath)); Query q = MultiFieldQueryParser.Parse(textBox1.Text, new string[] { "ayat_desc", "ayat_urdu", "ayat_arabic" }, new DiacriticAnalyzer(FilterData.stopWords)); //Query q = QueryParser.Parse(textBox1.Text, "contents", new DiacriticAnalyzer(FilterData.stopWords)); //Query q = QueryParser.Parse(textBox1.Text, "ayat_desc", new DiacriticAnalyzer(FilterData.stopWords)); hits = searcher.Search(q); label2.Text = string.Format("{0} hit(s).", hits.Length().ToString()); Application.DoEvents(); //for (int i = 0; i < hits.Length(); i++) //{ // foreach (Field f in hits.Doc(i).Fields()) // { // listBox1.Items.Add(f.Name()); // } //} for (int i = 0; i < hits.Length(); i++) { StringBuilder sb = new StringBuilder(); sb.Append("Para: ").Append(hits.Doc(i).Get("pid")); sb.Append(", Surat: ").Append(hits.Doc(i).Get("sid")); sb.Append(", Verse: ").Append(hits.Doc(i).Get("ayatno")); listBox1.Items.Add(sb.ToString()); } }
public void AddResult(Hits hits, int maxMatches) { if (docList == null) docList = new List<ExDocument>(); if (hits == null) return; for (int i = 0; i < maxMatches && i < hits.Length(); i++) { docList.Add(new ExDocument(hits.Doc(i), hits.Score(i))); } }
private void PrintHits(System.IO.StringWriter out_Renamed, Hits hits) { out_Renamed.WriteLine(hits.Length() + " total results\n"); for (int i = 0; i < hits.Length(); i++) { if (i < 10 || (i > 94 && i < 105)) { Document d = hits.Doc(i); out_Renamed.WriteLine(i + " " + d.Get(ID_FIELD)); } } }
private void FetchTheHit() { doc = hits.Doc(hitNumber); resolved = true; }
private void CheckHits(Hits hits, int expectedCount) { Assert.AreEqual(expectedCount, hits.Length(), "total results"); for (int i = 0; i < hits.Length(); i++) { if (i < 10 || (i > 94 && i < 105)) { Document d = hits.Doc(i); Assert.AreEqual(System.Convert.ToString(i), d.Get(ID_FIELD), "check " + i); } } }
public virtual void TestKnownSetOfDocuments() { System.String[] termArray = new System.String[] { "eating", "chocolate", "in", "a", "computer", "lab", "grows", "old", "colored", "with", "an" }; System.String test1 = "eating chocolate in a computer lab"; //6 terms System.String test2 = "computer in a computer lab"; //5 terms System.String test3 = "a chocolate lab grows old"; //5 terms System.String test4 = "eating chocolate with a chocolate lab in an old chocolate colored computer lab"; //13 terms System.Collections.IDictionary test4Map = new System.Collections.Hashtable(); test4Map["chocolate"] = 3; test4Map["lab"] = 2; test4Map["eating"] = 1; test4Map["computer"] = 1; test4Map["with"] = 1; test4Map["a"] = 1; test4Map["colored"] = 1; test4Map["in"] = 1; test4Map["an"] = 1; test4Map["computer"] = 1; test4Map["old"] = 1; Document testDoc1 = new Document(); SetupDoc(testDoc1, test1); Document testDoc2 = new Document(); SetupDoc(testDoc2, test2); Document testDoc3 = new Document(); SetupDoc(testDoc3, test3); Document testDoc4 = new Document(); SetupDoc(testDoc4, test4); Directory dir = new RAMDirectory(); try { IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true); Assert.IsTrue(writer != null); writer.AddDocument(testDoc1); writer.AddDocument(testDoc2); writer.AddDocument(testDoc3); writer.AddDocument(testDoc4); writer.Close(); IndexSearcher knownSearcher = new IndexSearcher(dir); TermEnum termEnum = knownSearcher.reader.Terms(); TermDocs termDocs = knownSearcher.reader.TermDocs(); //System.out.println("Terms: " + termEnum.size() + " Orig Len: " + termArray.length); Similarity sim = knownSearcher.GetSimilarity(); while (termEnum.Next() == true) { Term term = termEnum.Term(); //System.out.println("Term: " + term); termDocs.Seek(term); while (termDocs.Next()) { int docId = termDocs.Doc(); int freq = termDocs.Freq(); //System.out.println("Doc Id: " + docId + " freq " + freq); TermFreqVector vector = knownSearcher.reader.GetTermFreqVector(docId, "Field"); float tf = sim.Tf(freq); float idf = sim.Idf(term, knownSearcher); //float qNorm = sim.queryNorm() //This is fine since we don't have stop words float lNorm = sim.LengthNorm("Field", vector.GetTerms().Length); //float coord = sim.coord() //System.out.println("TF: " + tf + " IDF: " + idf + " LenNorm: " + lNorm); Assert.IsTrue(vector != null); System.String[] vTerms = vector.GetTerms(); int[] freqs = vector.GetTermFrequencies(); for (int i = 0; i < vTerms.Length; i++) { if (term.Text().Equals(vTerms[i]) == true) { Assert.IsTrue(freqs[i] == freq); } } } //System.out.println("--------"); } Query query = new TermQuery(new Term("Field", "chocolate")); Hits hits = knownSearcher.Search(query); //doc 3 should be the first hit b/c it is the shortest match Assert.IsTrue(hits.Length() == 3); float score = hits.Score(0); /*System.out.println("Hit 0: " + hits.id(0) + " Score: " + hits.score(0) + " String: " + hits.doc(0).toString()); * System.out.println("Explain: " + knownSearcher.explain(query, hits.id(0))); * System.out.println("Hit 1: " + hits.id(1) + " Score: " + hits.score(1) + " String: " + hits.doc(1).toString()); * System.out.println("Explain: " + knownSearcher.explain(query, hits.id(1))); * System.out.println("Hit 2: " + hits.id(2) + " Score: " + hits.score(2) + " String: " + hits.doc(2).toString()); * System.out.println("Explain: " + knownSearcher.explain(query, hits.id(2)));*/ Assert.IsTrue(testDoc3.ToString().Equals(hits.Doc(0).ToString())); Assert.IsTrue(testDoc4.ToString().Equals(hits.Doc(1).ToString())); Assert.IsTrue(testDoc1.ToString().Equals(hits.Doc(2).ToString())); TermFreqVector vector2 = knownSearcher.reader.GetTermFreqVector(hits.Id(1), "Field"); Assert.IsTrue(vector2 != null); //System.out.println("Vector: " + vector); System.String[] terms = vector2.GetTerms(); int[] freqs2 = vector2.GetTermFrequencies(); Assert.IsTrue(terms != null && terms.Length == 10); for (int i = 0; i < terms.Length; i++) { System.String term = terms[i]; //System.out.println("Term: " + term); int freq = freqs2[i]; Assert.IsTrue(test4.IndexOf(term) != -1); System.Int32 freqInt = (System.Int32)test4Map[term]; System.Object tmpFreqInt = test4Map[term]; Assert.IsTrue(tmpFreqInt != null); Assert.IsTrue(freqInt == freq); } knownSearcher.Close(); } catch (System.IO.IOException e) { System.Console.Error.WriteLine(e.StackTrace); Assert.IsTrue(false); } }
public virtual void TestEmptyIndex() { // creating two directories for indices Directory indexStoreA = new RAMDirectory(); Directory indexStoreB = new RAMDirectory(); // creating a document to store Document lDoc = new Document(); lDoc.Add(Field.Text("fulltext", "Once upon a time.....")); lDoc.Add(Field.Keyword("id", "doc1")); lDoc.Add(Field.Keyword("handle", "1")); // creating a document to store Document lDoc2 = new Document(); lDoc2.Add(Field.Text("fulltext", "in a galaxy far far away.....")); lDoc2.Add(Field.Keyword("id", "doc2")); lDoc2.Add(Field.Keyword("handle", "1")); // creating a document to store Document lDoc3 = new Document(); lDoc3.Add(Field.Text("fulltext", "a bizarre bug manifested itself....")); lDoc3.Add(Field.Keyword("id", "doc3")); lDoc3.Add(Field.Keyword("handle", "1")); // creating an index writer for the first index IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true); // creating an index writer for the second index, but writing nothing IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), true); //-------------------------------------------------------------------- // scenario 1 //-------------------------------------------------------------------- // writing the documents to the first index writerA.AddDocument(lDoc); writerA.AddDocument(lDoc2); writerA.AddDocument(lDoc3); writerA.Optimize(); writerA.Close(); // closing the second index writerB.Close(); // creating the query Query query = Lucene.Net.QueryParsers.QueryParser.Parse("handle:1", "fulltext", new StandardAnalyzer()); // building the searchables Searcher[] searchers = new Searcher[2]; // VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index searchers[0] = new IndexSearcher(indexStoreB); searchers[1] = new IndexSearcher(indexStoreA); // creating the multiSearcher Searcher mSearcher = GetMultiSearcherInstance(searchers); // performing the search Hits hits = mSearcher.Search(query); Assert.AreEqual(3, hits.Length()); try { // iterating over the hit documents for (int i = 0; i < hits.Length(); i++) { Document d = hits.Doc(i); } } catch (System.IndexOutOfRangeException e) { Assert.Fail("ArrayIndexOutOfBoundsException thrown: " + e.Message); System.Console.Error.WriteLine(e.Source); } finally { mSearcher.Close(); } //-------------------------------------------------------------------- // scenario 2 //-------------------------------------------------------------------- // adding one document to the empty index writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false); writerB.AddDocument(lDoc); writerB.Optimize(); writerB.Close(); // building the searchables Searcher[] searchers2 = new Searcher[2]; // VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index searchers2[0] = new IndexSearcher(indexStoreB); searchers2[1] = new IndexSearcher(indexStoreA); // creating the mulitSearcher Searcher mSearcher2 = GetMultiSearcherInstance(searchers2); // performing the same search Hits hits2 = mSearcher2.Search(query); Assert.AreEqual(4, hits2.Length()); try { // iterating over the hit documents for (int i = 0; i < hits2.Length(); i++) { // no exception should happen at this point Document d = hits2.Doc(i); } } catch (System.Exception e) { Assert.Fail("Exception thrown: " + e.Message); System.Console.Error.WriteLine(e.Source); } finally { mSearcher2.Close(); } //-------------------------------------------------------------------- // scenario 3 //-------------------------------------------------------------------- // deleting the document just added, this will cause a different exception to take place Term term = new Term("id", "doc1"); IndexReader readerB = IndexReader.Open(indexStoreB); readerB.Delete(term); readerB.Close(); // optimizing the index with the writer writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false); writerB.Optimize(); writerB.Close(); // building the searchables Searcher[] searchers3 = new Searcher[2]; searchers3[0] = new IndexSearcher(indexStoreB); searchers3[1] = new IndexSearcher(indexStoreA); // creating the mulitSearcher Searcher mSearcher3 = GetMultiSearcherInstance(searchers3); // performing the same search Hits hits3 = mSearcher3.Search(query); Assert.AreEqual(3, hits3.Length()); try { // iterating over the hit documents for (int i = 0; i < hits3.Length(); i++) { Document d = hits3.Doc(i); } } catch (System.IO.IOException e) { Assert.Fail("IOException thrown: " + e.Message); System.Console.Error.WriteLine(e.Source); } finally { mSearcher3.Close(); } }