private void DoTestSearchHitsDeleteEvery(int k, bool deleteInFront) { bool intermittent = k < 0; Log("Test search hits with " + (intermittent?"intermittent deletions.":"deletions of every " + k + " hit.")); IndexSearcher searcher = new IndexSearcher(directory); IndexReader reader = searcher.GetIndexReader(); Query q = new TermQuery(new Term(TEXT_FIELD, "text")); // matching all docs Hits hits = searcher.Search(q); Log("Got " + hits.Length() + " results"); Assert.AreEqual(N, hits.Length(), "must match all " + N + " docs, not only " + hits.Length() + " docs!"); if (deleteInFront) { Log("deleting hits that was not yet retrieved!"); reader.DeleteDocument(reader.MaxDoc() - 1); reader.DeleteDocument(reader.MaxDoc() - 2); reader.DeleteDocument(reader.MaxDoc() - 3); } try { for (int i = 0; i < hits.Length(); i++) { int id = hits.Id(i); Assert.AreEqual(i, hits.Id(i), "Hit " + i + " has doc id " + hits.Id(i) + " instead of " + i); if ((intermittent && (i == 50 || i == 250 || i == 950)) || (!intermittent && (k < 2 || (i > 0 && i % k == 0)))) { Document doc = hits.Doc(id); Log("Deleting hit " + i + " - doc " + doc + " with id " + id); reader.DeleteDocument(id); } if (intermittent) { // check internal behavior of Hits (go 50 ahead of getMoreDocs points because the deletions cause to use more of the available hits) if (i == 150 || i == 450 || i == 1650) { Assert.IsTrue(hits.debugCheckedForDeletions, "Hit " + i + ": hits should have checked for deletions in last call to getMoreDocs()"); } else if (i == 50 || i == 250 || i == 850) { Assert.IsFalse(hits.debugCheckedForDeletions, "Hit " + i + ": hits should have NOT checked for deletions in last call to getMoreDocs()"); } } } } catch (System.Exception e) { // this is the only valid exception, and only when deletng in front. Assert.IsTrue(deleteInFront, e.Message + " not expected unless deleting hits that were not yet seen!"); } searcher.Close(); }
public virtual void TestSetBufferSize() { System.IO.FileInfo indexDir = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), "testSetBufferSize")); MockFSDirectory dir = new MockFSDirectory(indexDir, NewRandom()); try { IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); writer.SetUseCompoundFile(false); for (int i = 0; i < 37; i++) { Document doc = new Document(); doc.Add(new Field("content", "aaa bbb ccc ddd" + i, Field.Store.YES, Field.Index.ANALYZED)); doc.Add(new Field("id", "" + i, Field.Store.YES, Field.Index.ANALYZED)); writer.AddDocument(doc); } writer.Close(); dir.allIndexInputs.Clear(); IndexReader reader = IndexReader.Open(dir); Term aaa = new Term("content", "aaa"); Term bbb = new Term("content", "bbb"); Term ccc = new Term("content", "ccc"); Assert.AreEqual(37, reader.DocFreq(ccc)); reader.DeleteDocument(0); Assert.AreEqual(37, reader.DocFreq(aaa)); dir.tweakBufferSizes(); reader.DeleteDocument(4); Assert.AreEqual(reader.DocFreq(bbb), 37); dir.tweakBufferSizes(); IndexSearcher searcher = new IndexSearcher(reader); ScoreDoc[] hits = searcher.Search(new TermQuery(bbb), null, 1000).scoreDocs; dir.tweakBufferSizes(); Assert.AreEqual(35, hits.Length); dir.tweakBufferSizes(); hits = searcher.Search(new TermQuery(new Term("id", "33")), null, 1000).scoreDocs; dir.tweakBufferSizes(); Assert.AreEqual(1, hits.Length); hits = searcher.Search(new TermQuery(aaa), null, 1000).scoreDocs; dir.tweakBufferSizes(); Assert.AreEqual(35, hits.Length); searcher.Close(); reader.Close(); } finally { _TestUtil.RmDir(indexDir); } }
public virtual void TestBinaryFieldInIndex() { IFieldable binaryFldStored = new Field("binaryStored", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValStored), Field.Store.YES); IFieldable stringFldStored = new Field("stringStored", binaryValStored, Field.Store.YES, Field.Index.NO, Field.TermVector.NO); // binary fields with store off are not allowed Assert.Throws <ArgumentException>( () => new Field("fail", System.Text.Encoding.UTF8.GetBytes(binaryValStored), Field.Store.NO)); Document doc = new Document(); doc.Add(binaryFldStored); doc.Add(stringFldStored); /* test for field count */ Assert.AreEqual(2, doc.fields_ForNUnit.Count); /* add the doc to a ram index */ MockRAMDirectory dir = new MockRAMDirectory(); IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); writer.AddDocument(doc); writer.Close(); /* open a reader and fetch the document */ IndexReader reader = IndexReader.Open(dir, false); Document docFromReader = reader.Document(0); Assert.IsTrue(docFromReader != null); /* fetch the binary stored field and compare it's content with the original one */ System.String binaryFldStoredTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(docFromReader.GetBinaryValue("binaryStored"))); Assert.IsTrue(binaryFldStoredTest.Equals(binaryValStored)); /* fetch the string field and compare it's content with the original one */ System.String stringFldStoredTest = docFromReader.Get("stringStored"); Assert.IsTrue(stringFldStoredTest.Equals(binaryValStored)); /* delete the document from index */ reader.DeleteDocument(0); Assert.AreEqual(0, reader.NumDocs()); reader.Close(); dir.Close(); }
public virtual void TestBinaryFieldInIndex() { Fieldable binaryFldStored = new Field("binaryStored", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValStored), Field.Store.YES); Fieldable binaryFldCompressed = new Field("binaryCompressed", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValCompressed), Field.Store.COMPRESS); Fieldable stringFldStored = new Field("stringStored", binaryValStored, Field.Store.YES, Field.Index.NO, Field.TermVector.NO); Fieldable stringFldCompressed = new Field("stringCompressed", binaryValCompressed, Field.Store.COMPRESS, Field.Index.NO, Field.TermVector.NO); try { // binary fields with store off are not allowed new Field("fail", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValCompressed), Field.Store.NO); Assert.Fail(); } catch (System.ArgumentException iae) { ; } Document doc = new Document(); doc.Add(binaryFldStored); doc.Add(binaryFldCompressed); doc.Add(stringFldStored); doc.Add(stringFldCompressed); /** test for field count */ Assert.AreEqual(4, doc.fields_ForNUnit.Count); /** add the doc to a ram index */ MockRAMDirectory dir = new MockRAMDirectory(); IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); writer.AddDocument(doc); writer.Close(); /** open a reader and fetch the document */ IndexReader reader = IndexReader.Open(dir); Document docFromReader = reader.Document(0); Assert.IsTrue(docFromReader != null); /** fetch the binary stored field and compare it's content with the original one */ System.String binaryFldStoredTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(docFromReader.GetBinaryValue("binaryStored"))); Assert.IsTrue(binaryFldStoredTest.Equals(binaryValStored)); /** fetch the binary compressed field and compare it's content with the original one */ System.String binaryFldCompressedTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(docFromReader.GetBinaryValue("binaryCompressed"))); Assert.IsTrue(binaryFldCompressedTest.Equals(binaryValCompressed)); /** fetch the string field and compare it's content with the original one */ System.String stringFldStoredTest = docFromReader.Get("stringStored"); Assert.IsTrue(stringFldStoredTest.Equals(binaryValStored)); /** fetch the compressed string field and compare it's content with the original one */ System.String stringFldCompressedTest = docFromReader.Get("stringCompressed"); Assert.IsTrue(stringFldCompressedTest.Equals(binaryValCompressed)); /** delete the document from index */ reader.DeleteDocument(0); Assert.AreEqual(0, reader.NumDocs()); reader.Close(); dir.Close(); }