public void Demo_Optimize_Test() { for (int i = 0; i < 3; i++) { var directory = new SqlServerDirectory(Connection, new Options()); var indexWriter = new IndexWriter(directory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30), !IndexReader.IndexExists(directory), new Lucene.Net.Index.IndexWriter.MaxFieldLength(IndexWriter.DEFAULT_MAX_FIELD_LENGTH)); indexWriter.SetMergeScheduler(new ConcurrentMergeScheduler()); indexWriter.SetMaxBufferedDocs(1000); for (int iDoc = 0; iDoc < 1000 * 10; iDoc++) { Document doc = new Document(); doc.Add(new Field("id", DateTime.Now.ToFileTimeUtc().ToString(), Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO)); doc.Add(new Field("Title", "dog " + " microsoft rules", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO)); doc.Add(new Field("Body", "dog " + " microsoft rules", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO)); indexWriter.AddDocument(doc); } indexWriter.Flush(true, true, true); indexWriter.Optimize(true); indexWriter.Dispose(true); var searcher = new IndexSearcher(directory); Console.WriteLine("Number of docs: {0}", searcher.IndexReader.NumDocs()); SearchForPhrase(searcher, "microsoft", 999); searcher.Dispose(); } }
public void Test_Lock_Is_Released() { var directory = new SqlServerDirectory(Connection, new Options() { LockTimeoutInSeconds = 3 }); new IndexWriter(directory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30), !IndexReader.IndexExists(directory), new Lucene.Net.Index.IndexWriter.MaxFieldLength(IndexWriter.DEFAULT_MAX_FIELD_LENGTH)); IndexWriter indexWriter = null; while (indexWriter == null) { try { indexWriter = new IndexWriter(directory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30), !IndexReader.IndexExists(directory), new Lucene.Net.Index.IndexWriter.MaxFieldLength(IndexWriter.DEFAULT_MAX_FIELD_LENGTH)); } catch (LockObtainFailedException) { Console.WriteLine("Lock is taken, waiting for timeout...{0}", DateTime.Now); Thread.Sleep(1000); } } }
public virtual void TestDemo_Renamed() { Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_30); // Store the index in memory: using (var directory = new SqlServerDirectory(Connection, new Options())) { // To store an index on disk, use this instead: //Directory directory = FSDirectory.open("/tmp/testindex"); using (var iwriter = new IndexWriter(directory, analyzer, true, new IndexWriter.MaxFieldLength(25000))) { var doc = new Document(); var text = "This is the text to be indexed."; doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.ANALYZED)); iwriter.AddDocument(doc); } // Now search the index: using (IndexSearcher isearcher = new IndexSearcher(directory, true)) { // read-only=true // Parse a simple query that searches for "text": QueryParser parser = new QueryParser(Version.LUCENE_30, "fieldname", analyzer); Query query = parser.Parse("text"); ScoreDoc[] hits = isearcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); // Iterate through the results: for (int i = 0; i < hits.Length; i++) { Document hitDoc = isearcher.Doc(hits[i].Doc); Assert.AreEqual(hitDoc.Get("fieldname"), "This is the text to be indexed."); } } } }
private void AddDoc(SqlServerDirectory ramDir1, System.String s, bool create) { IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), create, IndexWriter.MaxFieldLength.LIMITED); Document doc = new Document(); doc.Add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED)); iw.AddDocument(doc); iw.Close(); }
public override void TestInitialize() { base.TestInitialize(); _testDoc = new Document(); _dir = new SqlServerDirectory(Connection, new Options()); DocHelper.SetupDoc(_testDoc); SegmentInfo info = DocHelper.WriteDoc(_dir, _testDoc); _reader = SegmentReader.Get(true, info, 1); }
public void Demo_Usage_Test() { var directory = new SqlServerDirectory(Connection, new Options()); for (int outer = 0; outer < 10; outer++) { IndexWriter indexWriter = null; while (indexWriter == null) { try { indexWriter = new IndexWriter(directory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30), !IndexReader.IndexExists(directory), new Lucene.Net.Index.IndexWriter.MaxFieldLength(IndexWriter.DEFAULT_MAX_FIELD_LENGTH)); } catch (LockObtainFailedException) { Console.WriteLine("Lock is taken, waiting for timeout..."); Thread.Sleep(1000); } } ; Console.WriteLine("IndexWriter lock obtained, this process has exclusive write access to index"); indexWriter.SetRAMBufferSizeMB(100.0); //indexWriter.SetInfoStream(new StreamWriter(Console.OpenStandardOutput())); indexWriter.SetMergeScheduler(new SerialMergeScheduler()); indexWriter.SetMaxBufferedDocs(500); for (int iDoc = 0; iDoc < 1000; iDoc++) { Document doc = new Document(); doc.Add(new Field("id", DateTime.Now.ToFileTimeUtc().ToString(), Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO)); doc.Add(new Field("Title", "dog " + GeneratePhrase(50), Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO)); doc.Add(new Field("Body", "dog " + GeneratePhrase(50), Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO)); indexWriter.AddDocument(doc); } Console.WriteLine("Total docs is {0}", indexWriter.NumDocs()); Console.Write("Flushing and disposing writer..."); indexWriter.Flush(true, true, true); //indexWriter.Optimize(); indexWriter.Commit(); indexWriter.Dispose(); } var searcher = new IndexSearcher(directory); Console.WriteLine("Number of docs: {0}", searcher.IndexReader.NumDocs()); SearchForPhrase(searcher, "microsoft", 2); }
public override void TestInitialize() { base.TestInitialize(); Dir = new SqlServerDirectory(Connection, new Options()); _doc1 = new Document(); _doc2 = new Document(); DocHelper.SetupDoc(_doc1); DocHelper.SetupDoc(_doc2); DocHelper.WriteDoc(Dir, _doc1); DocHelper.WriteDoc(Dir, _doc2); Sis = new SegmentInfos(); Sis.Read(Dir); }
public virtual void TestMultiTermDocs() { SqlServerDirectory.ProvisionDatabase(Connection, "test1", true); SqlServerDirectory.ProvisionDatabase(Connection, "test2", true); SqlServerDirectory.ProvisionDatabase(Connection, "test3", true); var ramDir1 = new SqlServerDirectory(Connection, new Options() { SchemaName = "test1" }); AddDoc(ramDir1, "test foo", true); var ramDir2 = new SqlServerDirectory(Connection, new Options() { SchemaName = "test2" }); AddDoc(ramDir2, "test blah", true); var ramDir3 = new SqlServerDirectory(Connection, new Options() { SchemaName = "test3" }); AddDoc(ramDir3, "test wow", true); IndexReader[] readers1 = new[] { IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir3, false) }; IndexReader[] readers2 = new[] { IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir2, false), IndexReader.Open(ramDir3, false) }; MultiReader mr2 = new MultiReader(readers1); MultiReader mr3 = new MultiReader(readers2); // test mixing up TermDocs and TermEnums from different readers. TermDocs td2 = mr2.TermDocs(); TermEnum te3 = mr3.Terms(new Term("body", "wow")); td2.Seek(te3); int ret = 0; // This should blow up if we forget to check that the TermEnum is from the same // reader as the TermDocs. while (td2.Next()) { ret += td2.Doc; } td2.Close(); te3.Close(); // really a dummy assert to ensure that we got some docs and to ensure that // nothing is optimized out. Assert.IsTrue(ret > 0); }
private SqlServerDirectory GetIndexStore(System.String field, System.String[] contents) { var indexStore = new SqlServerDirectory(Connection, new Options()); IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); for (int i = 0; i < contents.Length; ++i) { Document doc = new Document(); doc.Add(new Field(field, contents[i], Field.Store.YES, Field.Index.ANALYZED)); writer.AddDocument(doc); } writer.Optimize(); writer.Close(); return(indexStore); }
public override void TestInitialize() { base.TestInitialize(); Directory = new SqlServerDirectory(Connection, new Options()); IndexWriter writer = new IndexWriter(Directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); for (int i = 0; i < Values.Length; i++) { Document doc = new Document(); doc.Add(new Field(FIELD, Values[i], Field.Store.YES, Field.Index.ANALYZED)); writer.AddDocument(doc); } writer.Close(); IndexSearcher = new IndexSearcher(Directory, false); indexReader = IndexSearcher.IndexReader; }
public IActionResult Search(string querry) { //index bd //IndexAllData(); List <int> Final; using (var connection = new SqlConnection(@"Data Source=SQL6003.SmarterASP.NET;Initial Catalog=DB_A296DE_shchepko;User Id=DB_A296DE_shchepko_admin;Password=15879569pg;MultipleActiveResultSets=True")) { connection.Open(); var directory = new SqlServerDirectory(connection, new Options() { SchemaName = "[search]" }); var exists = !IndexReader.IndexExists(directory); var parser = new QueryParser(Lucene.Net.Util.Version.LUCENE_30, "Title", new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30)); var KeywordsQuery = parser.Parse(querry); var searcher = new IndexSearcher(directory); var query = new BooleanQuery { { KeywordsQuery, Occur.MUST } }; var hits = searcher.Search(query, 20); List <int> projectsId = new List <int>(); foreach (var project in hits.ScoreDocs) { var doc = searcher.Doc(project.Doc); var Id = int.Parse(doc.Get("Id")); projectsId.Add(Id); } Final = projectsId.Distinct().ToList(); } List <Project> FinalProjects = new List <Project>(); foreach (var id in Final) { FinalProjects.AddRange(db.Projects.Where(f => f.Id == id).Include(p => p.User).Include("ProjectTags.Tag").ToList()); } return(View("~/Views/Project/Index.cshtml", FinalProjects)); }
public static void AddSearchIndex(List <FullTextSearchModelcs> Adding) { using (var connection = new SqlConnection(@"Server=DESKTOP-S0G3F8R\SQLEXPRESS;Database=KraudFan2_0;Trusted_Connection=True;MultipleActiveResultSets=true")) { connection.Open(); var directory = new SqlServerDirectory(connection, new Options() { SchemaName = "[search]" }); var exists = IndexReader.IndexExists(directory); if (exists == false) { SqlServerDirectory.ProvisionDatabase(connection, schemaName: "[search]", dropExisting: true); } var indexWriter = new IndexWriter(directory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30), exists, new Lucene.Net.Index.IndexWriter.MaxFieldLength(IndexWriter.DEFAULT_MAX_FIELD_LENGTH)); foreach (var item in Adding) { var doc = new Document(); doc.Add(new NumericField("Id", Field.Store.YES, true).SetIntValue(item.ProjectId)); if (item.Content == null) { doc.Add(new Field("Title", "Govnina", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO)); } else { doc.Add(new Field("Title", item.Content, Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO)); } indexWriter.AddDocument(doc); } indexWriter.Flush(true, true, true); indexWriter.Dispose(); } }
public IActionResult IndexAllData() { using (var connection = new SqlConnection(@"Data Source=SQL6003.SmarterASP.NET;Initial Catalog=DB_A296DE_shchepko;User Id=DB_A296DE_shchepko_admin;Password=15879569pg;MultipleActiveResultSets=True")) { connection.Open(); SqlServerDirectory.ProvisionDatabase(connection, schemaName: "[search]", dropExisting: true); var directory = new SqlServerDirectory(connection, new Options() { SchemaName = "[search]" }); var exists = !IndexReader.IndexExists(directory); var indexWriter = new IndexWriter(directory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30), exists, new Lucene.Net.Index.IndexWriter.MaxFieldLength(IndexWriter.DEFAULT_MAX_FIELD_LENGTH)); var coll = TransformAll(); foreach (var item in coll) { var doc = new Document(); doc.Add(new NumericField("Id", Field.Store.YES, true).SetIntValue(item.ProjectId)); if (item.Content == null) { doc.Add(new Field("Title", "Govnina", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO)); } else { doc.Add(new Field("Title", item.Content, Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO)); } indexWriter.AddDocument(doc); } // indexWriter.Optimize(); indexWriter.Flush(true, true, true); indexWriter.Commit(); indexWriter.Dispose(); } return(RedirectToAction("Index", "Home")); }
public void TestEnforceDeletions() { var dir = new SqlServerDirectory(Connection, new Options()); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED); IndexReader reader = writer.GetReader(); IndexSearcher searcher = new IndexSearcher(reader); // add a doc, refresh the reader, and check that its there Document doc = new Document(); doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NOT_ANALYZED)); writer.AddDocument(doc); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1); Assert.AreEqual(1, docs.TotalHits, "Should find a hit..."); SpanFilter startFilter = new SpanQueryFilter(new SpanTermQuery(new Term("id", "1"))); // ignore deletions CachingSpanFilter filter = new CachingSpanFilter(startFilter, CachingWrapperFilter.DeletesMode.IGNORE); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit..."); ConstantScoreQuery constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // now delete the doc, refresh the reader, and see that it's not there writer.DeleteDocuments(new Term("id", "1")); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit..."); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // force cache to regenerate: filter = new CachingSpanFilter(startFilter, CachingWrapperFilter.DeletesMode.RECACHE); writer.AddDocument(doc); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit..."); constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // make sure we get a cache hit when we reopen readers // that had no new deletions IndexReader newReader = RefreshReader(reader); Assert.IsTrue(reader != newReader); reader = newReader; searcher = new IndexSearcher(reader); int missCount = filter.missCount; docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); Assert.AreEqual(missCount, filter.missCount); // now delete the doc, refresh the reader, and see that it's not there writer.DeleteDocuments(new Term("id", "1")); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit..."); docs = searcher.Search(constantScore, 1); Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit..."); }
public virtual void TestInitialize() { Connection = new SqlConnection(System.Configuration.ConfigurationManager.ConnectionStrings["databaseForTests"].ConnectionString); Connection.Open(); SqlServerDirectory.ProvisionDatabase(Connection, schemaName: new Options().SchemaName, dropExisting: true); }
static void Do() { //var directory = new SimpleFSDirectory(new DirectoryInfo(@"c:\temp\lucene")); using (var connection = new SqlConnection(@"MultipleActiveResultSets=True;Data Source=(localdb)\v11.0;Initial Catalog=TestLucene;Integrated Security=True;Connect Timeout=30;Encrypt=False;TrustServerCertificate=False")) { connection.Open(); var directory = new SqlServerDirectory(connection, new Options() { SchemaName = "[search]" }); for (int outer = 0; outer < 1000; outer++) { IndexWriter indexWriter = null; while (indexWriter == null) { try { indexWriter = new IndexWriter(directory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30), !IndexReader.IndexExists(directory), new Lucene.Net.Index.IndexWriter.MaxFieldLength(IndexWriter.DEFAULT_MAX_FIELD_LENGTH)); } catch (LockObtainFailedException) { Console.WriteLine("Lock is taken, waiting for timeout..."); Thread.Sleep(1000); } } ; Console.WriteLine("IndexWriter lock obtained, this process has exclusive write access to index"); indexWriter.SetRAMBufferSizeMB(100.0); indexWriter.SetInfoStream(new StreamWriter(Console.OpenStandardOutput())); indexWriter.UseCompoundFile = false; for (int iDoc = 0; iDoc < 1000; iDoc++) { if (iDoc % 10 == 0) Console.WriteLine(iDoc); Document doc = new Document(); doc.Add(new Field("id", DateTime.Now.ToFileTimeUtc().ToString(), Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO)); doc.Add(new Field("Title", "dog " + GeneratePhrase(50), Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO)); doc.Add(new Field("Body", "dog " + GeneratePhrase(50), Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO)); indexWriter.AddDocument(doc); } Console.WriteLine("Total docs is {0}", indexWriter.NumDocs()); Console.Write("Flushing and disposing writer..."); indexWriter.Flush(true, true, true); indexWriter.Dispose(); } IndexSearcher searcher; using (new AutoStopWatch("Creating searcher")) { searcher = new IndexSearcher(directory); } using (new AutoStopWatch("Count")) Console.WriteLine("Number of docs: {0}", searcher.IndexReader.NumDocs()); while (true) { SearchForPhrase(searcher, "microsoft"); Thread.Sleep(1000); //Console.WriteLine("Press a key to search again"); //Console.ReadKey(); } } }
static void LockCanBeReleased() { using (var connection = new SqlConnection(@"MultipleActiveResultSets=True;Data Source=(localdb)\v11.0;Initial Catalog=TestLucene;Integrated Security=True;Connect Timeout=30;Encrypt=False;TrustServerCertificate=False")) { connection.Open(); var directory = new SqlServerDirectory(connection, new Options() { SchemaName = "[search]", LockTimeoutInMinutes = 1 }); new IndexWriter(directory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30), !IndexReader.IndexExists(directory), new Lucene.Net.Index.IndexWriter.MaxFieldLength(IndexWriter.DEFAULT_MAX_FIELD_LENGTH)); IndexWriter indexWriter = null; while (indexWriter == null) { try { indexWriter = new IndexWriter(directory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30), !IndexReader.IndexExists(directory), new Lucene.Net.Index.IndexWriter.MaxFieldLength(IndexWriter.DEFAULT_MAX_FIELD_LENGTH)); } catch (LockObtainFailedException) { Console.WriteLine("Lock is taken, waiting for timeout...{0}", DateTime.Now); Thread.Sleep(1000); } } } }
public virtual void TestParsingAndSearching() { System.String field = "content"; bool dbg = false; QueryParser qp = new QueryParser(Lucene.Net.Util.Version.LUCENE_CURRENT, field, new WhitespaceAnalyzer()); qp.AllowLeadingWildcard = true; System.String[] docs = new System.String[] { "\\ abcdefg1", "\\79 hijklmn1", "\\\\ opqrstu1" }; // queries that should find all docs System.String[] matchAll = new System.String[] { "*", "*1", "**1", "*?", "*?1", "?*1", "**", "***", "\\\\*" }; // queries that should find no docs System.String[] matchNone = new System.String[] { "a*h", "a?h", "*a*h", "?a", "a?" }; // queries that should be parsed to prefix queries System.String[][] matchOneDocPrefix = new System.String[][] { new System.String[] { "a*", "ab*", "abc*" }, new System.String[] { "h*", "hi*", "hij*", "\\\\7*" }, new System.String[] { "o*", "op*", "opq*", "\\\\\\\\*" } }; // queries that should be parsed to wildcard queries System.String[][] matchOneDocWild = new System.String[][] { new System.String[] { "*a*", "*ab*", "*abc**", "ab*e*", "*g?", "*f?1", "abc**" }, new System.String[] { "*h*", "*hi*", "*hij**", "hi*k*", "*n?", "*m?1", "hij**" }, new System.String[] { "*o*", "*op*", "*opq**", "op*q*", "*u?", "*t?1", "opq**" } }; // prepare the index var dir = new SqlServerDirectory(Connection, new Options()); IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED); for (int i = 0; i < docs.Length; i++) { Document doc = new Document(); doc.Add(new Field(field, docs[i], Field.Store.NO, Field.Index.ANALYZED)); iw.AddDocument(doc); } iw.Close(); IndexSearcher searcher = new IndexSearcher(dir, true); // test queries that must find all for (int i = 0; i < matchAll.Length; i++) { System.String qtxt = matchAll[i]; Query q = qp.Parse(qtxt); if (dbg) { System.Console.Out.WriteLine("matchAll: qtxt=" + qtxt + " q=" + q + " " + q.GetType().FullName); } ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs; Assert.AreEqual(docs.Length, hits.Length); } // test queries that must find none for (int i = 0; i < matchNone.Length; i++) { System.String qtxt = matchNone[i]; Query q = qp.Parse(qtxt); if (dbg) { System.Console.Out.WriteLine("matchNone: qtxt=" + qtxt + " q=" + q + " " + q.GetType().FullName); } ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); } // test queries that must be prefix queries and must find only one doc for (int i = 0; i < matchOneDocPrefix.Length; i++) { for (int j = 0; j < matchOneDocPrefix[i].Length; j++) { System.String qtxt = matchOneDocPrefix[i][j]; Query q = qp.Parse(qtxt); if (dbg) { System.Console.Out.WriteLine("match 1 prefix: doc=" + docs[i] + " qtxt=" + qtxt + " q=" + q + " " + q.GetType().FullName); } Assert.AreEqual(typeof(PrefixQuery), q.GetType()); ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); Assert.AreEqual(i, hits[0].Doc); } } // test queries that must be wildcard queries and must find only one doc for (int i = 0; i < matchOneDocPrefix.Length; i++) { for (int j = 0; j < matchOneDocWild[i].Length; j++) { System.String qtxt = matchOneDocWild[i][j]; Query q = qp.Parse(qtxt); if (dbg) { System.Console.Out.WriteLine("match 1 wild: doc=" + docs[i] + " qtxt=" + qtxt + " q=" + q + " " + q.GetType().FullName); } Assert.AreEqual(typeof(WildcardQuery), q.GetType()); ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); Assert.AreEqual(i, hits[0].Doc); } } searcher.Close(); }