private void VerifyDocFreq() { IndexReader reader = IndexReader.Open(dir); TermEnum termEnum = null; // create enumeration of all terms termEnum = reader.Terms(); // go to the first term (aaa) termEnum.Next(); // assert that term is 'aaa' Assert.AreEqual("aaa", termEnum.Term().Text()); Assert.AreEqual(200, termEnum.DocFreq()); // go to the second term (bbb) termEnum.Next(); // assert that term is 'bbb' Assert.AreEqual("bbb", termEnum.Term().Text()); Assert.AreEqual(100, termEnum.DocFreq()); termEnum.Close(); // create enumeration of terms after term 'aaa', including 'aaa' termEnum = reader.Terms(new Term("content", "aaa")); // assert that term is 'aaa' Assert.AreEqual("aaa", termEnum.Term().Text()); Assert.AreEqual(200, termEnum.DocFreq()); // go to term 'bbb' termEnum.Next(); // assert that term is 'bbb' Assert.AreEqual("bbb", termEnum.Term().Text()); Assert.AreEqual(100, termEnum.DocFreq()); termEnum.Close(); }
public virtual void TestTerms() { try { TermEnum terms = reader.Terms(); Assert.IsTrue(terms != null); while (terms.Next() == true) { Term term = terms.Term(); Assert.IsTrue(term != null); //System.out.println("Term: " + term); System.String fieldValue = (System.String)DocHelper.nameValues[term.Field()]; Assert.IsTrue(fieldValue.IndexOf(term.Text()) != -1); } TermDocs termDocs = reader.TermDocs(); Assert.IsTrue(termDocs != null); termDocs.Seek(new Term(DocHelper.TEXT_FIELD_1_KEY, "Field")); Assert.IsTrue(termDocs.Next() == true); TermPositions positions = reader.TermPositions(); positions.Seek(new Term(DocHelper.TEXT_FIELD_1_KEY, "Field")); Assert.IsTrue(positions != null); Assert.IsTrue(positions.Doc() == 0); Assert.IsTrue(positions.NextPosition() >= 0); } catch (System.IO.IOException e) { System.Console.Error.WriteLine(e.StackTrace); Assert.IsTrue(false); } }
public override bool Next() { if (field == null) { return(false); } bool next = termEnum.Next(); // still within field? if (next && (System.Object)termEnum.Term().Field() == (System.Object)field) { return(true); // yes, keep going } termEnum.Close(); // close old termEnum // find the next field, if any field = ((System.String)SupportClass.TailMap(Enclosing_Instance.fieldToReader, field).GetKey(0)); if (field != null) { termEnum = ((IndexReader)Enclosing_Instance.fieldToReader[field]).Terms(); return(true); } return(false); // no more fields }
public virtual void TestTerms() { TermEnum terms = reader.Terms(); Assert.IsTrue(terms != null); while (terms.Next() == true) { Term term = terms.Term(); Assert.IsTrue(term != null); //System.out.println("Term: " + term); System.String fieldValue = (System.String)DocHelper.nameValues[term.Field()]; Assert.IsTrue(fieldValue.IndexOf(term.Text()) != -1); } TermDocs termDocs = reader.TermDocs(); Assert.IsTrue(termDocs != null); termDocs.Seek(new Term(DocHelper.TEXT_FIELD_1_KEY, "field")); Assert.IsTrue(termDocs.Next() == true); termDocs.Seek(new Term(DocHelper.NO_NORMS_KEY, DocHelper.NO_NORMS_TEXT)); Assert.IsTrue(termDocs.Next() == true); TermPositions positions = reader.TermPositions(); positions.Seek(new Term(DocHelper.TEXT_FIELD_1_KEY, "field")); Assert.IsTrue(positions != null); Assert.IsTrue(positions.Doc() == 0); Assert.IsTrue(positions.NextPosition() >= 0); }
public virtual void TestPhrasePrefix() { RAMDirectory indexStore = new RAMDirectory(); IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null); Document doc1 = new Document(); Document doc2 = new Document(); Document doc3 = new Document(); Document doc4 = new Document(); Document doc5 = new Document(); doc1.Add(new Field("body", "blueberry pie", Field.Store.YES, Field.Index.ANALYZED)); doc2.Add(new Field("body", "blueberry strudel", Field.Store.YES, Field.Index.ANALYZED)); doc3.Add(new Field("body", "blueberry pizza", Field.Store.YES, Field.Index.ANALYZED)); doc4.Add(new Field("body", "blueberry chewing gum", Field.Store.YES, Field.Index.ANALYZED)); doc5.Add(new Field("body", "piccadilly circus", Field.Store.YES, Field.Index.ANALYZED)); writer.AddDocument(doc1, null); writer.AddDocument(doc2, null); writer.AddDocument(doc3, null); writer.AddDocument(doc4, null); writer.AddDocument(doc5, null); writer.Optimize(null); writer.Close(); IndexSearcher searcher = new IndexSearcher(indexStore, true, null); //PhrasePrefixQuery query1 = new PhrasePrefixQuery(); MultiPhraseQuery query1 = new MultiPhraseQuery(); //PhrasePrefixQuery query2 = new PhrasePrefixQuery(); MultiPhraseQuery query2 = new MultiPhraseQuery(); query1.Add(new Term("body", "blueberry")); query2.Add(new Term("body", "strawberry")); System.Collections.ArrayList termsWithPrefix = new System.Collections.ArrayList(); IndexReader ir = IndexReader.Open((Directory)indexStore, true, null); // this TermEnum gives "piccadilly", "pie" and "pizza". System.String prefix = "pi"; TermEnum te = ir.Terms(new Term("body", prefix + "*"), null); do { if (te.Term.Text.StartsWith(prefix)) { termsWithPrefix.Add(te.Term); } }while (te.Next(null)); query1.Add((Term[])termsWithPrefix.ToArray(typeof(Term))); query2.Add((Term[])termsWithPrefix.ToArray(typeof(Term))); ScoreDoc[] result; result = searcher.Search(query1, null, 1000, null).ScoreDocs; Assert.AreEqual(2, result.Length); result = searcher.Search(query2, null, 1000, null).ScoreDocs; Assert.AreEqual(0, result.Length); }
public override bool Next() { if (termEnum == null) { return(false); } // another term in this field? if (termEnum.Next() && (System.Object)termEnum.Term().Field() == (System.Object)field) { return(true); // yes, keep going } termEnum.Close(); // close old termEnum // find the next field with terms, if any if (fieldIterator == null) { System.Collections.Comparer comparer = System.Collections.Comparer.Default; System.Collections.SortedList newList = new System.Collections.SortedList(); if (Enclosing_Instance.fieldToReader != null) { if (Enclosing_Instance.fieldToReader.Count > 0) { int index = 0; while (comparer.Compare(Enclosing_Instance.fieldToReader.GetKey(index), field) < 0) { index++; } for (; index < Enclosing_Instance.fieldToReader.Count; index++) { newList.Add(Enclosing_Instance.fieldToReader.GetKey(index), Enclosing_Instance.fieldToReader[Enclosing_Instance.fieldToReader.GetKey(index)]); } } } fieldIterator = newList.Keys.GetEnumerator(); fieldIterator.MoveNext(); System.Object generatedAux = fieldIterator.Current; // Skip field to get next one } while (fieldIterator.MoveNext()) { field = ((System.String)fieldIterator.Current); termEnum = ((IndexReader)Enclosing_Instance.fieldToReader[field]).Terms(new Term(field)); Term term = termEnum.Term(); if (term != null && (System.Object)term.Field() == (System.Object)field) { return(true); } else { termEnum.Close(); } } return(false); // no more fields }
public virtual void TestFilterIndexReader_Renamed() { RAMDirectory directory = new MockRAMDirectory(); IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); Document d1 = new Document(); d1.Add(new Field("default", "one two", Field.Store.YES, Field.Index.ANALYZED)); writer.AddDocument(d1); Document d2 = new Document(); d2.Add(new Field("default", "one three", Field.Store.YES, Field.Index.ANALYZED)); writer.AddDocument(d2); Document d3 = new Document(); d3.Add(new Field("default", "two four", Field.Store.YES, Field.Index.ANALYZED)); writer.AddDocument(d3); writer.Close(); IndexReader reader = new TestReader(IndexReader.Open(directory)); Assert.IsTrue(reader.IsOptimized()); TermEnum terms = reader.Terms(); while (terms.Next()) { Assert.IsTrue(terms.Term().Text().IndexOf('e') != -1); } terms.Close(); TermPositions positions = reader.TermPositions(new Term("default", "one")); while (positions.Next()) { Assert.IsTrue((positions.Doc() % 2) == 1); } int NUM_DOCS = 3; TermDocs td = reader.TermDocs(null); for (int i = 0; i < NUM_DOCS; i++) { Assert.IsTrue(td.Next()); Assert.AreEqual(i, td.Doc()); Assert.AreEqual(1, td.Freq()); } td.Close(); reader.Close(); directory.Close(); }
internal bool Next() { if (termEnum.Next()) { term = termEnum.Term; return(true); } else { term = null; return(false); } }
public override bool Next() { if (termEnum == null) { return(false); } // another term in this field? if (termEnum.Next() && (System.Object)termEnum.Term().Field() == (System.Object)field) { return(true); // yes, keep going } termEnum.Close(); // close old termEnum // find the next field with terms, if any if (fieldIterator == null) { List <string> tmpList = new List <string>(); bool m = false; //JAVA: fieldIterator = fieldToReader.tailMap(field).keySet().iterator(); //JAVA: fieldIterator.next(); // Skip field to get next one foreach (string key in Enclosing_Instance.fieldToReader.Keys) { if (key == field && m == false) { m = true; } if (m) { tmpList.Add(key); } } fieldIterator = tmpList.GetEnumerator(); } while (fieldIterator.MoveNext()) { field = fieldIterator.Current; termEnum = Enclosing_Instance.fieldToReader[field].Terms(new Term(field)); Term term = termEnum.Term(); if (term != null && (System.Object)term.Field() == (System.Object)field) { return(true); } else { termEnum.Close(); } } return(false); // no more fields }
public override bool Next(IState state) { if (termEnum == null) { return(false); } // another term in this field? if (termEnum.Next(state) && (System.Object)termEnum.Term.Field == (System.Object)field) { return(true); // yes, keep going } termEnum.Close(); // close old termEnum // find the next field with terms, if any if (fieldIterator == null) { var newList = new List <string>(); if (Enclosing_Instance.fieldToReader != null && Enclosing_Instance.fieldToReader.Count > 0) { var comparer = Enclosing_Instance.fieldToReader.Comparer; foreach (var entry in Enclosing_Instance.fieldToReader.Keys.Where(x => comparer.Compare(x, field) >= 0)) { newList.Add(entry); } } fieldIterator = newList.Skip(1).GetEnumerator(); // Skip field to get next one } while (fieldIterator.MoveNext()) { field = fieldIterator.Current; termEnum = Enclosing_Instance.fieldToReader[field].Terms(new Term(field), state); Term term = termEnum.Term; if (term != null && (System.Object)term.Field == (System.Object)field) { return(true); } else { termEnum.Close(); } } return(false); // no more fields }
public virtual void TestThreadSafety() { rnd = NewRandom(); int numThreads = 5; int numDocs = 50; ByteArrayPool pool = new ByteArrayPool(numThreads, 5); Directory dir = new RAMDirectory(); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED, null); System.String field = "test"; ThreadClass[] ingesters = new ThreadClass[numThreads]; for (int i = 0; i < numThreads; i++) { ingesters[i] = new AnonymousClassThread(numDocs, field, pool, writer, this); ingesters[i].Start(); } for (int i = 0; i < numThreads; i++) { ingesters[i].Join(); } writer.Close(); IndexReader reader = IndexReader.Open(dir, true, null); TermEnum terms = reader.Terms(null); while (terms.Next(null)) { TermPositions tp = reader.TermPositions(terms.Term, null); while (tp.Next(null)) { int freq = tp.Freq; for (int i = 0; i < freq; i++) { tp.NextPosition(null); Assert.AreEqual(pool.BytesToString(tp.GetPayload(new byte[5], 0, null)), terms.Term.Text); } } tp.Close(); } terms.Close(); reader.Close(); Assert.AreEqual(pool.Size(), numThreads); }
private void PrintSegment(System.IO.StringWriter out_Renamed, System.String segment) { Directory directory = FSDirectory.GetDirectory(indexDir, false); SegmentReader reader = new SegmentReader(new SegmentInfo(segment, 1, directory)); for (int i = 0; i < reader.NumDocs(); i++) { out_Renamed.WriteLine(reader.Document(i)); } TermEnum tis = reader.Terms(); while (tis.Next()) { out_Renamed.Write(tis.Term()); out_Renamed.WriteLine(" DF=" + tis.DocFreq()); TermPositions positions = reader.TermPositions(tis.Term()); try { while (positions.Next()) { out_Renamed.Write(" doc=" + positions.Doc()); out_Renamed.Write(" TF=" + positions.Freq()); out_Renamed.Write(" pos="); out_Renamed.Write(positions.NextPosition()); for (int j = 1; j < positions.Freq(); j++) { out_Renamed.Write("," + positions.NextPosition()); } out_Renamed.WriteLine(""); } } finally { positions.Close(); } } tis.Close(); reader.Close(); directory.Close(); }
internal static void PrintSegment(System.String segment) { Directory directory = FSDirectory.GetDirectory("test", false); SegmentReader reader = new SegmentReader(new SegmentInfo(segment, 1, directory)); for (int i = 0; i < reader.NumDocs(); i++) { System.Console.Out.WriteLine(reader.Document(i)); } TermEnum tis = reader.Terms(); while (tis.Next()) { System.Console.Out.Write(tis.Term()); System.Console.Out.WriteLine(" DF=" + tis.DocFreq()); TermPositions positions = reader.TermPositions(tis.Term()); try { while (positions.Next()) { System.Console.Out.Write(" doc=" + positions.Doc()); System.Console.Out.Write(" TF=" + positions.Freq()); System.Console.Out.Write(" pos="); System.Console.Out.Write(positions.NextPosition()); for (int j = 1; j < positions.Freq(); j++) { System.Console.Out.Write("," + positions.NextPosition()); } System.Console.Out.WriteLine(""); } } finally { positions.Close(); } } tis.Close(); reader.Close(); directory.Close(); }
protected internal override short[] CreateValue(IndexReader reader, Entry entryKey, IState state) { Entry entry = entryKey; System.String field = entry.field; ShortParser parser = (ShortParser)entry.custom; if (parser == null) { return(wrapper.GetShorts(reader, field, Lucene.Net.Search.FieldCache_Fields.DEFAULT_SHORT_PARSER, state)); } short[] retArray = new short[reader.MaxDoc]; TermDocs termDocs = reader.TermDocs(state); TermEnum termEnum = reader.Terms(new Term(field), state); try { do { Term term = termEnum.Term; if (term == null || (System.Object)term.Field != (System.Object)field) { break; } short termval = parser.ParseShort(term.Text); termDocs.Seek(termEnum, state); while (termDocs.Next(state)) { retArray[termDocs.Doc] = termval; } }while (termEnum.Next(state)); } catch (StopFillCacheException) { } finally { termDocs.Close(); termEnum.Close(); } return(retArray); }
public virtual void TestFilterIndexReader_() { RAMDirectory directory = new RAMDirectory(); IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true); Document d1 = new Document(); d1.Add(Field.Text("default", "one two")); writer.AddDocument(d1); Document d2 = new Document(); d2.Add(Field.Text("default", "one three")); writer.AddDocument(d2); Document d3 = new Document(); d3.Add(Field.Text("default", "two four")); writer.AddDocument(d3); writer.Close(); IndexReader reader = new TestReader(IndexReader.Open(directory)); TermEnum terms = reader.Terms(); while (terms.Next()) { Assert.IsTrue(terms.Term().Text().IndexOf((System.Char) 'e') != -1); } terms.Close(); TermPositions positions = reader.TermPositions(new Term("default", "one")); while (positions.Next()) { Assert.IsTrue((positions.Doc() % 2) == 1); } reader.Close(); }
private void PrintSegment(System.IO.StreamWriter out_Renamed, SegmentInfo si) { SegmentReader reader = SegmentReader.Get(true, si, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, null); for (int i = 0; i < reader.NumDocs(); i++) { out_Renamed.WriteLine(reader.Document(i, null)); } TermEnum tis = reader.Terms(null); while (tis.Next(null)) { out_Renamed.Write(tis.Term); out_Renamed.WriteLine(" DF=" + tis.DocFreq()); TermPositions positions = reader.TermPositions(tis.Term, null); try { while (positions.Next(null)) { out_Renamed.Write(" doc=" + positions.Doc); out_Renamed.Write(" TF=" + positions.Freq); out_Renamed.Write(" pos="); out_Renamed.Write(positions.NextPosition(null)); for (int j = 1; j < positions.Freq; j++) { out_Renamed.Write("," + positions.NextPosition(null)); } out_Renamed.WriteLine(""); } } finally { positions.Close(); } } tis.Close(); reader.Close(); }
private void PrintSegment(System.IO.StreamWriter out_Renamed, SegmentInfo si) { SegmentReader reader = SegmentReader.Get(si); for (int i = 0; i < reader.NumDocs(); i++) { out_Renamed.WriteLine(reader.Document(i)); } TermEnum tis = reader.Terms(); while (tis.Next()) { out_Renamed.Write(tis.Term()); out_Renamed.WriteLine(" DF=" + tis.DocFreq()); TermPositions positions = reader.TermPositions(tis.Term()); try { while (positions.Next()) { out_Renamed.Write(" doc=" + positions.Doc()); out_Renamed.Write(" TF=" + positions.Freq()); out_Renamed.Write(" pos="); out_Renamed.Write(positions.NextPosition()); for (int j = 1; j < positions.Freq(); j++) { out_Renamed.Write("," + positions.NextPosition()); } out_Renamed.WriteLine(""); } } finally { positions.Close(); } } tis.Close(); reader.Close(); }
public override bool Next() { if (termEnum == null) { return(false); } // another term in this field? if (termEnum.Next() && (System.Object)termEnum.Term().Field() == (System.Object)field) { return(true); // yes, keep going } termEnum.Close(); // close old termEnum // find the next field with terms, if any if (fieldIterator == null) { fieldIterator = SupportClass.TailMap(Enclosing_Instance.fieldToReader, field).Keys.GetEnumerator(); fieldIterator.MoveNext(); // Skip field to get next one } while (fieldIterator.MoveNext()) { field = ((System.String)fieldIterator.Current); termEnum = ((IndexReader)Enclosing_Instance.fieldToReader[field]).Terms(new Term(field, "")); Term term = termEnum.Term(); if (term != null && (System.Object)term.Field() == (System.Object)field) { return(true); } else { termEnum.Close(); } } return(false); // no more fields }
public override bool Next() { if (termEnum == null) { return(false); } // another term in this field? if (termEnum.Next() && (object)termEnum.Term().Field() == (object)field) { return(true); // yes, keep going } termEnum.Close(); // close old termEnum // find the next field with terms, if any if (fieldIterator == null) { fieldIterator = SupportClass.CollectionsSupport.TailMap(Enclosing_Instance.fieldToReader, field).Keys.GetEnumerator(); } while (fieldIterator.MoveNext()) { field = fieldIterator.Current; termEnum = Enclosing_Instance.fieldToReader[field].Terms(new Term(field)); Term term = termEnum.Term(); if (term != null && (object)term.Field() == (object)field) { return(true); } else { termEnum.Close(); } } return(false); // no more fields }
/// <summary>Returns true if index is clean, else false.</summary> public static bool Check(Directory dir, bool doFix) { System.Globalization.NumberFormatInfo nf = System.Globalization.CultureInfo.CurrentCulture.NumberFormat; SegmentInfos sis = new SegmentInfos(); try { sis.Read(dir); } catch (System.Exception t) { out_Renamed.WriteLine("ERROR: could not read any segments file in directory"); out_Renamed.Write(t.StackTrace); out_Renamed.Flush(); return(false); } int numSegments = sis.Count; System.String segmentsFileName = sis.GetCurrentSegmentFileName(); IndexInput input = null; try { input = dir.OpenInput(segmentsFileName); } catch (System.Exception t) { out_Renamed.WriteLine("ERROR: could not open segments file in directory"); out_Renamed.Write(t.StackTrace); out_Renamed.Flush(); return(false); } int format = 0; try { format = input.ReadInt(); } catch (System.Exception t) { out_Renamed.WriteLine("ERROR: could not read segment file version in directory"); out_Renamed.Write(t.StackTrace); out_Renamed.Flush(); return(false); } finally { if (input != null) { input.Close(); } } System.String sFormat = ""; bool skip = false; if (format == SegmentInfos.FORMAT) { sFormat = "FORMAT [Lucene Pre-2.1]"; } if (format == SegmentInfos.FORMAT_LOCKLESS) { sFormat = "FORMAT_LOCKLESS [Lucene 2.1]"; } else if (format == SegmentInfos.FORMAT_SINGLE_NORM_FILE) { sFormat = "FORMAT_SINGLE_NORM_FILE [Lucene 2.2]"; } else if (format == SegmentInfos.FORMAT_SHARED_DOC_STORE) { sFormat = "FORMAT_SHARED_DOC_STORE [Lucene 2.3]"; } else if (format < SegmentInfos.FORMAT_SHARED_DOC_STORE) { sFormat = "int=" + format + " [newer version of Lucene than this tool]"; skip = true; } else { sFormat = format + " [Lucene 1.3 or prior]"; } out_Renamed.WriteLine("Segments file=" + segmentsFileName + " numSegments=" + numSegments + " version=" + sFormat); if (skip) { out_Renamed.WriteLine("\nERROR: this index appears to be created by a newer version of Lucene than this tool was compiled on; please re-compile this tool on the matching version of Lucene; exiting"); return(false); } SegmentInfos newSIS = (SegmentInfos)sis.Clone(); newSIS.Clear(); bool changed = false; int totLoseDocCount = 0; int numBadSegments = 0; for (int i = 0; i < numSegments; i++) { SegmentInfo info = sis.Info(i); out_Renamed.WriteLine(" " + (1 + i) + " of " + numSegments + ": name=" + info.name + " docCount=" + info.docCount); int toLoseDocCount = info.docCount; SegmentReader reader = null; try { out_Renamed.WriteLine(" compound=" + info.GetUseCompoundFile()); out_Renamed.WriteLine(" numFiles=" + info.Files().Count); out_Renamed.WriteLine(String.Format(nf, " size (MB)={0:f}", new Object[] { (info.SizeInBytes() / (1024.0 * 1024.0)) })); int docStoreOffset = info.GetDocStoreOffset(); if (docStoreOffset != -1) { out_Renamed.WriteLine(" docStoreOffset=" + docStoreOffset); out_Renamed.WriteLine(" docStoreSegment=" + info.GetDocStoreSegment()); out_Renamed.WriteLine(" docStoreIsCompoundFile=" + info.GetDocStoreIsCompoundFile()); } System.String delFileName = info.GetDelFileName(); if (delFileName == null) { out_Renamed.WriteLine(" no deletions"); } else { out_Renamed.WriteLine(" has deletions [delFileName=" + delFileName + "]"); } out_Renamed.Write(" test: open reader........."); reader = SegmentReader.Get(info); int numDocs = reader.NumDocs(); toLoseDocCount = numDocs; if (reader.HasDeletions()) { out_Renamed.WriteLine("OK [" + (info.docCount - numDocs) + " deleted docs]"); } else { out_Renamed.WriteLine("OK"); } out_Renamed.Write(" test: fields, norms......."); System.Collections.IDictionary fieldNames = (System.Collections.IDictionary)reader.GetFieldNames(IndexReader.FieldOption.ALL); System.Collections.IEnumerator it = fieldNames.Keys.GetEnumerator(); while (it.MoveNext()) { System.String fieldName = (System.String)it.Current; byte[] b = reader.Norms(fieldName); if (b.Length != info.docCount) { throw new System.SystemException("norms for field \"" + fieldName + "\" is length " + b.Length + " != maxDoc " + info.docCount); } } out_Renamed.WriteLine("OK [" + fieldNames.Count + " fields]"); out_Renamed.Write(" test: terms, freq, prox..."); TermEnum termEnum = reader.Terms(); TermPositions termPositions = reader.TermPositions(); // Used only to count up # deleted docs for this // term MySegmentTermDocs myTermDocs = new MySegmentTermDocs(reader); long termCount = 0; long totFreq = 0; long totPos = 0; while (termEnum.Next()) { termCount++; Term term = termEnum.Term(); int docFreq = termEnum.DocFreq(); termPositions.Seek(term); int lastDoc = -1; int freq0 = 0; totFreq += docFreq; while (termPositions.Next()) { freq0++; int doc = termPositions.Doc(); int freq = termPositions.Freq(); if (doc <= lastDoc) { throw new System.SystemException("term " + term + ": doc " + doc + " < lastDoc " + lastDoc); } lastDoc = doc; if (freq <= 0) { throw new System.SystemException("term " + term + ": doc " + doc + ": freq " + freq + " is out of bounds"); } int lastPos = -1; totPos += freq; for (int j = 0; j < freq; j++) { int pos = termPositions.NextPosition(); if (pos < 0) { throw new System.SystemException("term " + term + ": doc " + doc + ": pos " + pos + " is out of bounds"); } if (pos <= lastPos) { throw new System.SystemException("term " + term + ": doc " + doc + ": pos " + pos + " < lastPos " + lastPos); } } } // Now count how many deleted docs occurred in // this term: int delCount; if (reader.HasDeletions()) { myTermDocs.Seek(term); while (myTermDocs.Next()) { } delCount = myTermDocs.delCount; } else { delCount = 0; } if (freq0 + delCount != docFreq) { throw new System.SystemException("term " + term + " docFreq=" + docFreq + " != num docs seen " + freq0 + " + num docs deleted " + delCount); } } out_Renamed.WriteLine("OK [" + termCount + " terms; " + totFreq + " terms/docs pairs; " + totPos + " tokens]"); out_Renamed.Write(" test: stored fields......."); int docCount = 0; long totFields = 0; for (int j = 0; j < info.docCount; j++) { if (!reader.IsDeleted(j)) { docCount++; Document doc = reader.Document(j); totFields += doc.GetFields().Count; } } if (docCount != reader.NumDocs()) { throw new System.SystemException("docCount=" + docCount + " but saw " + docCount + " undeleted docs"); } out_Renamed.WriteLine(String.Format(nf, "OK [{0:d} total field count; avg {1:f} fields per doc]", new Object[] { totFields, (((float)totFields) / docCount) })); out_Renamed.Write(" test: term vectors........"); int totVectors = 0; for (int j = 0; j < info.docCount; j++) { if (!reader.IsDeleted(j)) { TermFreqVector[] tfv = reader.GetTermFreqVectors(j); if (tfv != null) { totVectors += tfv.Length; } } } out_Renamed.WriteLine(String.Format(nf, "OK [{0:d} total vector count; avg {1:f} term/freq vector fields per doc]", new Object[] { totVectors, (((float)totVectors) / docCount) })); out_Renamed.WriteLine(""); } catch (System.Exception t) { out_Renamed.WriteLine("FAILED"); System.String comment; if (doFix) { comment = "will remove reference to this segment (-fix is specified)"; } else { comment = "would remove reference to this segment (-fix was not specified)"; } out_Renamed.WriteLine(" WARNING: " + comment + "; full exception:"); out_Renamed.Write(t.StackTrace); out_Renamed.Flush(); out_Renamed.WriteLine(""); totLoseDocCount += toLoseDocCount; numBadSegments++; changed = true; continue; } finally { if (reader != null) { reader.Close(); } } // Keeper newSIS.Add(info.Clone()); } if (!changed) { out_Renamed.WriteLine("No problems were detected with this index.\n"); return(true); } else { out_Renamed.WriteLine("WARNING: " + numBadSegments + " broken segments detected"); if (doFix) { out_Renamed.WriteLine("WARNING: " + totLoseDocCount + " documents will be lost"); } else { out_Renamed.WriteLine("WARNING: " + totLoseDocCount + " documents would be lost if -fix were specified"); } out_Renamed.WriteLine(); } if (doFix) { out_Renamed.WriteLine("NOTE: will write new segments file in 5 seconds; this will remove " + totLoseDocCount + " docs from the index. THIS IS YOUR LAST CHANCE TO CTRL+C!"); for (int i = 0; i < 5; i++) { try { System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 1000)); } catch (System.Threading.ThreadInterruptedException) { SupportClass.ThreadClass.Current().Interrupt(); i--; continue; } out_Renamed.WriteLine(" " + (5 - i) + "..."); } out_Renamed.Write("Writing..."); try { newSIS.Write(dir); } catch (System.Exception t) { out_Renamed.WriteLine("FAILED; exiting"); out_Renamed.Write(t.StackTrace); out_Renamed.Flush(); return(false); } out_Renamed.WriteLine("OK"); out_Renamed.WriteLine("Wrote new segments file \"" + newSIS.GetCurrentSegmentFileName() + "\""); } else { out_Renamed.WriteLine("NOTE: would write new segments file [-fix was not specified]"); } out_Renamed.WriteLine(""); return(false); }
public override bool Next() { return(in_Renamed.Next()); }
private static void IndexFile(IndexWriter writer, TermEnum uidIter, FileInfo file, Operation operation) { if (file.FullName.EndsWith(".html") || file.FullName.EndsWith(".htm") || file.FullName.EndsWith(".txt")) { // We've found a file we should index. if (operation == Operation.IncrementalReindex || operation == Operation.RemoveStale) { // We should only get here with an open uidIter. Debug.Assert(uidIter != null, "Expected uidIter != null for operation " + operation); var uid = HTMLDocument.Uid(file); // construct uid for doc while (uidIter.Term != null && uidIter.Term.Field == "uid" && String.CompareOrdinal(uidIter.Term.Text, uid) < 0) { if (operation == Operation.RemoveStale) { Console.Out.WriteLine("deleting " + HTMLDocument.Uid2url(uidIter.Term.Text)); writer.DeleteDocuments(uidIter.Term); } uidIter.Next(); } // The uidIter TermEnum should now be pointing at either // 1) a null term, meaning there are no more uids to check. // 2) a term matching the current file. // 3) a term not matching us. if (uidIter.Term != null && uidIter.Term.Field == "uid" && String.CompareOrdinal(uidIter.Term.Text, uid) == 0) { // uidIter points to the current document, we should move one // step ahead to keep state consistant, and carry on. uidIter.Next(); } else if (operation == Operation.IncrementalReindex) { // uidIter does not point to the current document, and we're // currently indexing documents. var doc = HTMLDocument.Document(file); Console.Out.WriteLine("adding " + doc.Get("path")); writer.AddDocument(doc); } } else { // We're doing a complete reindexing. We aren't using uidIter, // but for completeness we assert that it's null (as expected). Debug.Assert(uidIter == null, "Expected uidIter == null for operation == " + operation); var doc = HTMLDocument.Document(file); Console.Out.WriteLine("adding " + doc.Get("path")); writer.AddDocument(doc); } } }
public virtual void Test1() { ParallelReader pr = new ParallelReader(); pr.Add(ir1); pr.Add(ir2); TermDocs td = pr.TermDocs(); TermEnum te = pr.Terms(); Assert.IsTrue(te.Next()); Assert.AreEqual("field1:brown", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field1:fox", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field1:jumps", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field1:quick", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field1:the", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field2:brown", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field2:fox", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field2:jumps", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field2:quick", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field2:the", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field3:dog", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field3:fox", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field3:jumps", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field3:lazy", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field3:over", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsTrue(te.Next()); Assert.AreEqual("field3:the", te.Term().ToString()); td.Seek(te.Term()); Assert.IsTrue(td.Next()); Assert.AreEqual(0, td.Doc()); Assert.IsFalse(td.Next()); Assert.IsFalse(te.Next()); }
/// <summary> Test the term index.</summary> private Status.TermIndexStatus TestTermIndex(SegmentInfo info, SegmentReader reader) { Status.TermIndexStatus status = new Status.TermIndexStatus(); try { if (infoStream != null) { infoStream.Write(" test: terms, freq, prox..."); } TermEnum termEnum = reader.Terms(); TermPositions termPositions = reader.TermPositions(); // Used only to count up # deleted docs for this term MySegmentTermDocs myTermDocs = new MySegmentTermDocs(reader); int maxDoc = reader.MaxDoc(); while (termEnum.Next()) { status.termCount++; Term term = termEnum.Term(); int docFreq = termEnum.DocFreq(); termPositions.Seek(term); int lastDoc = -1; int freq0 = 0; status.totFreq += docFreq; while (termPositions.Next()) { freq0++; int doc = termPositions.Doc(); int freq = termPositions.Freq(); if (doc <= lastDoc) { throw new System.SystemException("term " + term + ": doc " + doc + " <= lastDoc " + lastDoc); } if (doc >= maxDoc) { throw new System.SystemException("term " + term + ": doc " + doc + " >= maxDoc " + maxDoc); } lastDoc = doc; if (freq <= 0) { throw new System.SystemException("term " + term + ": doc " + doc + ": freq " + freq + " is out of bounds"); } int lastPos = -1; status.totPos += freq; for (int j = 0; j < freq; j++) { int pos = termPositions.NextPosition(); if (pos < -1) { throw new System.SystemException("term " + term + ": doc " + doc + ": pos " + pos + " is out of bounds"); } if (pos < lastPos) { throw new System.SystemException("term " + term + ": doc " + doc + ": pos " + pos + " < lastPos " + lastPos); } } } // Now count how many deleted docs occurred in // this term: int delCount; if (reader.HasDeletions()) { myTermDocs.Seek(term); while (myTermDocs.Next()) { } delCount = myTermDocs.delCount; } else { delCount = 0; } if (freq0 + delCount != docFreq) { throw new System.SystemException("term " + term + " docFreq=" + docFreq + " != num docs seen " + freq0 + " + num docs deleted " + delCount); } } Msg("OK [" + status.termCount + " terms; " + status.totFreq + " terms/docs pairs; " + status.totPos + " tokens]"); } catch (System.Exception e) { Msg("ERROR [" + System.Convert.ToString(e.Message) + "]"); status.error = e; if (infoStream != null) { infoStream.WriteLine(e.StackTrace); } } return(status); }
public override bool Next(IState state) { return(in_Renamed.Next(state)); }
public virtual void TestPhrasePrefix() { RAMDirectory indexStore = new RAMDirectory(); IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null); Add("blueberry pie", writer); Add("blueberry strudel", writer); Add("blueberry pizza", writer); Add("blueberry chewing gum", writer); Add("bluebird pizza", writer); Add("bluebird foobar pizza", writer); Add("piccadilly circus", writer); writer.Optimize(null); writer.Close(); IndexSearcher searcher = new IndexSearcher(indexStore, true, null); // search for "blueberry pi*": MultiPhraseQuery query1 = new MultiPhraseQuery(); // search for "strawberry pi*": MultiPhraseQuery query2 = new MultiPhraseQuery(); query1.Add(new Term("body", "blueberry")); query2.Add(new Term("body", "strawberry")); System.Collections.ArrayList termsWithPrefix = new System.Collections.ArrayList(); IndexReader ir = IndexReader.Open((Directory)indexStore, true, null); // this TermEnum gives "piccadilly", "pie" and "pizza". System.String prefix = "pi"; TermEnum te = ir.Terms(new Term("body", prefix), null); do { if (te.Term.Text.StartsWith(prefix)) { termsWithPrefix.Add(te.Term); } }while (te.Next(null)); query1.Add((Term[])termsWithPrefix.ToArray(typeof(Term))); Assert.AreEqual("body:\"blueberry (piccadilly pie pizza)\"", query1.ToString()); query2.Add((Term[])termsWithPrefix.ToArray(typeof(Term))); Assert.AreEqual("body:\"strawberry (piccadilly pie pizza)\"", query2.ToString()); ScoreDoc[] result; result = searcher.Search(query1, null, 1000, null).ScoreDocs; Assert.AreEqual(2, result.Length); result = searcher.Search(query2, null, 1000, null).ScoreDocs; Assert.AreEqual(0, result.Length); // search for "blue* pizza": MultiPhraseQuery query3 = new MultiPhraseQuery(); termsWithPrefix.Clear(); prefix = "blue"; te = ir.Terms(new Term("body", prefix), null); do { if (te.Term.Text.StartsWith(prefix)) { termsWithPrefix.Add(te.Term); } }while (te.Next(null)); query3.Add((Term[])termsWithPrefix.ToArray(typeof(Term))); query3.Add(new Term("body", "pizza")); result = searcher.Search(query3, null, 1000, null).ScoreDocs; Assert.AreEqual(2, result.Length); // blueberry pizza, bluebird pizza Assert.AreEqual("body:\"(blueberry bluebird) pizza\"", query3.ToString()); // test slop: query3.Slop = 1; result = searcher.Search(query3, null, 1000, null).ScoreDocs; Assert.AreEqual(3, result.Length); // blueberry pizza, bluebird pizza, bluebird foobar pizza MultiPhraseQuery query4 = new MultiPhraseQuery(); // okay, all terms must belong to the same field Assert.Throws <ArgumentException>(() => { query4.Add(new Term("field1", "foo")); query4.Add(new Term("field2", "foobar")); }); searcher.Close(); indexStore.Close(); }
public static void VerifyEquals(IndexReader r1, IndexReader r2, System.String idField) { Assert.AreEqual(r1.NumDocs(), r2.NumDocs()); bool hasDeletes = !(r1.MaxDoc() == r2.MaxDoc() && r1.NumDocs() == r1.MaxDoc()); int[] r2r1 = new int[r2.MaxDoc()]; // r2 id to r1 id mapping TermDocs termDocs1 = r1.TermDocs(); TermDocs termDocs2 = r2.TermDocs(); // create mapping from id2 space to id2 based on idField idField = StringHelper.Intern(idField); TermEnum termEnum = r1.Terms(new Term(idField, "")); do { Term term = termEnum.Term(); if (term == null || (System.Object)term.Field() != (System.Object)idField) { break; } termDocs1.Seek(termEnum); if (!termDocs1.Next()) { // This doc is deleted and wasn't replaced termDocs2.Seek(termEnum); Assert.IsFalse(termDocs2.Next()); continue; } int id1 = termDocs1.Doc(); Assert.IsFalse(termDocs1.Next()); termDocs2.Seek(termEnum); Assert.IsTrue(termDocs2.Next()); int id2 = termDocs2.Doc(); Assert.IsFalse(termDocs2.Next()); r2r1[id2] = id1; // verify stored fields are equivalent try { VerifyEquals(r1.Document(id1), r2.Document(id2)); } catch (System.Exception t) { System.Console.Out.WriteLine("FAILED id=" + term + " id1=" + id1 + " id2=" + id2 + " term=" + term); System.Console.Out.WriteLine(" d1=" + r1.Document(id1)); System.Console.Out.WriteLine(" d2=" + r2.Document(id2)); throw t; } try { // verify term vectors are equivalent VerifyEquals(r1.GetTermFreqVectors(id1), r2.GetTermFreqVectors(id2)); } catch (System.Exception e) { System.Console.Out.WriteLine("FAILED id=" + term + " id1=" + id1 + " id2=" + id2); TermFreqVector[] tv1 = r1.GetTermFreqVectors(id1); System.Console.Out.WriteLine(" d1=" + tv1); if (tv1 != null) { for (int i = 0; i < tv1.Length; i++) { System.Console.Out.WriteLine(" " + i + ": " + tv1[i]); } } TermFreqVector[] tv2 = r2.GetTermFreqVectors(id2); System.Console.Out.WriteLine(" d2=" + tv2); if (tv2 != null) { for (int i = 0; i < tv2.Length; i++) { System.Console.Out.WriteLine(" " + i + ": " + tv2[i]); } } throw e; } }while (termEnum.Next()); termEnum.Close(); // Verify postings TermEnum termEnum1 = r1.Terms(new Term("", "")); TermEnum termEnum2 = r2.Terms(new Term("", "")); // pack both doc and freq into single element for easy sorting long[] info1 = new long[r1.NumDocs()]; long[] info2 = new long[r2.NumDocs()]; for (; ;) { Term term1, term2; // iterate until we get some docs int len1; for (; ;) { len1 = 0; term1 = termEnum1.Term(); if (term1 == null) { break; } termDocs1.Seek(termEnum1); while (termDocs1.Next()) { int d1 = termDocs1.Doc(); int f1 = termDocs1.Freq(); info1[len1] = (((long)d1) << 32) | f1; len1++; } if (len1 > 0) { break; } if (!termEnum1.Next()) { break; } } // iterate until we get some docs int len2; for (; ;) { len2 = 0; term2 = termEnum2.Term(); if (term2 == null) { break; } termDocs2.Seek(termEnum2); while (termDocs2.Next()) { int d2 = termDocs2.Doc(); int f2 = termDocs2.Freq(); info2[len2] = (((long)r2r1[d2]) << 32) | f2; len2++; } if (len2 > 0) { break; } if (!termEnum2.Next()) { break; } } if (!hasDeletes) { Assert.AreEqual(termEnum1.DocFreq(), termEnum2.DocFreq()); } Assert.AreEqual(len1, len2); if (len1 == 0) { break; // no more terms } Assert.AreEqual(term1, term2); // sort info2 to get it into ascending docid System.Array.Sort(info2, 0, len2 - 0); // now compare for (int i = 0; i < len1; i++) { Assert.AreEqual(info1[i], info2[i]); } termEnum1.Next(); termEnum2.Next(); } }