public virtual void TestFarsiRangeQueryCollating(Analyzer analyzer, BytesRef firstBeg, BytesRef firstEnd, BytesRef secondBeg, BytesRef secondEnd) { using Directory dir = NewDirectory(); using (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, analyzer))) { Document doc = new Document(); // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi // orders the U+0698 character before the U+0633 character, so the single // index Term below should NOT be returned by a TermRangeQuery with a Farsi // Collator (or an Arabic one for the case when Farsi is not supported). doc.Add(new TextField("content", "\u0633\u0627\u0628", Field.Store.YES)); writer.AddDocument(doc); } // writer.Dispose(); using IndexReader reader = DirectoryReader.Open(dir); IndexSearcher searcher = new IndexSearcher(reader); Search.Query query = new TermRangeQuery("content", firstBeg, firstEnd, true, true); ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length, "The index Term should not be included."); query = new TermRangeQuery("content", secondBeg, secondEnd, true, true); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length, "The index Term should be included."); }
public virtual void AssertThreadSafe(Analyzer analyzer) { int numTestPoints = 100; int numThreads = TestUtil.NextInt32(Random, 3, 5); Dictionary <string, BytesRef> map = new Dictionary <string, BytesRef>(); // create a map<String,SortKey> up front. // then with multiple threads, generate sort keys for all the keys in the map // and ensure they are the same as the ones we produced in serial fashion. for (int i = 0; i < numTestPoints; i++) { string term = TestUtil.RandomSimpleString(Random); Exception priorException = null; // LUCENENET: No need to cast to IOExcpetion TokenStream ts = analyzer.GetTokenStream("fake", new StringReader(term)); try { ITermToBytesRefAttribute termAtt = ts.AddAttribute <ITermToBytesRefAttribute>(); BytesRef bytes = termAtt.BytesRef; ts.Reset(); Assert.IsTrue(ts.IncrementToken()); termAtt.FillBytesRef(); // ensure we make a copy of the actual bytes too map[term] = BytesRef.DeepCopyOf(bytes); Assert.IsFalse(ts.IncrementToken()); ts.End(); } catch (Exception e) when(e.IsIOException()) { priorException = e; } finally { IOUtils.DisposeWhileHandlingException(priorException, ts); } } ThreadJob[] threads = new ThreadJob[numThreads]; for (int i = 0; i < numThreads; i++) { threads[i] = new ThreadAnonymousClass(analyzer, map); } for (int i = 0; i < numThreads; i++) { threads[i].Start(); } for (int i = 0; i < numThreads; i++) { threads[i].Join(); } }
/// <summary> /// Make sure the documents returned by the search match the expected list /// </summary> // Copied from TestSort.java private void AssertMatches(IndexSearcher searcher, Search.Query query, Sort sort, string expectedResult) { ScoreDoc[] result = searcher.Search(query, null, 1000, sort).ScoreDocs; StringBuilder buff = new StringBuilder(10); int n = result.Length; for (int i = 0; i < n; ++i) { Document doc = searcher.Doc(result[i].Doc); IIndexableField[] v = doc.GetFields("tracer"); for (var j = 0; j < v.Length; ++j) { buff.Append(v[j].GetStringValue()); } } Assert.AreEqual(expectedResult, buff.ToString()); }
public override void Run() { try { foreach (var mapping in this.map) { string term = mapping.Key; BytesRef expected = mapping.Value; IOException priorException = null; TokenStream ts = this.analyzer.GetTokenStream("fake", new StringReader(term)); try { ITermToBytesRefAttribute termAtt = ts.AddAttribute <ITermToBytesRefAttribute>(); BytesRef bytes = termAtt.BytesRef; ts.Reset(); Assert.IsTrue(ts.IncrementToken()); termAtt.FillBytesRef(); Assert.AreEqual(expected, bytes); Assert.IsFalse(ts.IncrementToken()); ts.End(); } catch (IOException e) { priorException = e; } finally { IOUtils.DisposeWhileHandlingException(priorException, ts); } } } catch (IOException e) { throw new Exception(e.ToString(), e); } }
/// <summary> /// Assert that the content of the <see cref="DocIdSet"/> is the same as the content of the <see cref="BitArray"/>. /// </summary> #pragma warning disable xUnit1013 public virtual void AssertEquals(int numBits, BitArray ds1, T ds2) #pragma warning restore xUnit1013 { // nextDoc DocIdSetIterator it2 = ds2.GetIterator(); if (it2 == null) { Assert.AreEqual(-1, ds1.NextSetBit(0)); } else { Assert.AreEqual(-1, it2.DocID); for (int doc = ds1.NextSetBit(0); doc != -1; doc = ds1.NextSetBit(doc + 1)) { Assert.AreEqual(doc, it2.NextDoc()); Assert.AreEqual(doc, it2.DocID); } Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, it2.NextDoc()); Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, it2.DocID); } // nextDoc / advance it2 = ds2.GetIterator(); if (it2 == null) { Assert.AreEqual(-1, ds1.NextSetBit(0)); } else { for (int doc = -1; doc != DocIdSetIterator.NO_MORE_DOCS;) { if (Random.NextBoolean()) { doc = ds1.NextSetBit(doc + 1); if (doc == -1) { doc = DocIdSetIterator.NO_MORE_DOCS; } Assert.AreEqual(doc, it2.NextDoc()); Assert.AreEqual(doc, it2.DocID); } else { int target = doc + 1 + Random.Next(Random.NextBoolean() ? 64 : Math.Max(numBits / 8, 1)); doc = ds1.NextSetBit(target); if (doc == -1) { doc = DocIdSetIterator.NO_MORE_DOCS; } Assert.AreEqual(doc, it2.Advance(target)); Assert.AreEqual(doc, it2.DocID); } } } // bits() IBits bits = ds2.Bits; if (bits != null) { // test consistency between bits and iterator it2 = ds2.GetIterator(); for (int previousDoc = -1, doc = it2.NextDoc(); ; previousDoc = doc, doc = it2.NextDoc()) { int max = doc == DocIdSetIterator.NO_MORE_DOCS ? bits.Length : doc; for (int i = previousDoc + 1; i < max; ++i) { Assert.AreEqual(false, bits.Get(i)); } if (doc == DocIdSetIterator.NO_MORE_DOCS) { break; } Assert.AreEqual(true, bits.Get(doc)); } } }
public void CheckNrHits() { Assert.AreEqual(parent.ExpectedDocNrs.Length, totalMatched, parent.QueryText + ": nr of hits"); }