public virtual void TestUpgradeOldIndex() { List <string> names = new List <string>(OldNames.Length + OldSingleSegmentNames.Length); names.AddRange(OldNames); names.AddRange(OldSingleSegmentNames); foreach (string name in names) { if (VERBOSE) { Console.WriteLine("testUpgradeOldIndex: index=" + name); } Directory dir = NewDirectory(OldIndexDirs[name]); (new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false)).Upgrade(); CheckAllSegmentsUpgraded(dir); dir.Dispose(); } }
public virtual void TestWriterAfterCrash() { // this test relies on being able to open a reader before any commit // happened, so we must create an initial commit just to allow that, but // before any documents were added. Console.WriteLine("TEST: initIndex"); IndexWriter writer = InitIndex(Random, true); Console.WriteLine("TEST: done initIndex"); MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory; // We create leftover files because merging could be // running / store files could be open when we crash: dir.AssertNoUnreferencedFilesOnClose = false; dir.PreventDoubleWrite = false; Console.WriteLine("TEST: now crash"); Crash(writer); writer = InitIndex(Random, dir, false); writer.Dispose(); IndexReader reader = DirectoryReader.Open(dir); Assert.IsTrue(reader.NumDocs < 314); reader.Dispose(); // Make a new dir, copying from the crashed dir, and // open IW on it, to confirm IW "recovers" after a // crash: Directory dir2 = NewDirectory(dir); dir.Dispose(); (new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, dir2)).Dispose(); dir2.Dispose(); }
public virtual void TestUTF8toUTF32() { BytesRef utf8 = new BytesRef(20); Int32sRef utf32 = new Int32sRef(20); int[] codePoints = new int[20]; int num = AtLeast(50000); for (int i = 0; i < num; i++) { string s = TestUtil.RandomUnicodeString(Random); UnicodeUtil.UTF16toUTF8(s.ToCharArray(), 0, s.Length, utf8); UnicodeUtil.UTF8toUTF32(utf8, utf32); int charUpto = 0; int intUpto = 0; while (charUpto < s.Length) { int cp = Character.CodePointAt(s, charUpto); codePoints[intUpto++] = cp; charUpto += Character.CharCount(cp); } if (!ArrayUtil.Equals(codePoints, 0, utf32.Int32s, utf32.Offset, intUpto)) { Console.WriteLine("FAILED"); for (int j = 0; j < s.Length; j++) { Console.WriteLine(" char[" + j + "]=" + ((int)s[j]).ToString("x")); } Console.WriteLine(); Assert.AreEqual(intUpto, utf32.Length); for (int j = 0; j < intUpto; j++) { Console.WriteLine(" " + utf32.Int32s[j].ToString("x") + " vs " + codePoints[j].ToString("x")); } Assert.Fail("mismatch"); } } }
public virtual void TestIsCurrentWithThreads() { Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); IndexWriter writer = new IndexWriter(dir, conf); ReaderHolder holder = new ReaderHolder(); ReaderThread[] threads = new ReaderThread[AtLeast(3)]; CountdownEvent latch = new CountdownEvent(1); WriterThread writerThread = new WriterThread(holder, writer, AtLeast(500), Random, latch); for (int i = 0; i < threads.Length; i++) { threads[i] = new ReaderThread(holder, latch); threads[i].Start(); } writerThread.Start(); writerThread.Join(); bool failed = writerThread.failed != null; if (failed) { Console.WriteLine(writerThread.failed.ToString()); Console.Write(writerThread.failed.StackTrace); } for (int i = 0; i < threads.Length; i++) { threads[i].Join(); if (threads[i].failed != null) { Console.WriteLine(threads[i].failed.ToString()); Console.Write(threads[i].failed.StackTrace); failed = true; } } Assert.IsFalse(failed); writer.Dispose(); dir.Dispose(); }
public virtual void Test() { PayloadNearQuery query; TopDocs hits; query = NewPhraseQuery("field", "twenty two", true, new AveragePayloadFunction()); QueryUtils.Check(query); // all 10 hits should have score = 3 because adjacent terms have payloads of 2,4 // and all the similarity factors are set to 1 hits = Searcher.Search(query, null, 100); Assert.IsTrue(hits != null, "hits is null and it shouldn't be"); // 10 documents were added with the tokens "twenty two", each has 3 instances Assert.AreEqual(10, hits.TotalHits, "should be 10 hits"); for (int j = 0; j < hits.ScoreDocs.Length; j++) { ScoreDoc doc = hits.ScoreDocs[j]; Assert.AreEqual(3, doc.Score, doc.Score + " does not equal: " + 3); } for (int i = 1; i < 10; i++) { query = NewPhraseQuery("field", English.Int32ToEnglish(i) + " hundred", true, new AveragePayloadFunction()); if (VERBOSE) { Console.WriteLine("TEST: run query=" + query); } // all should have score = 3 because adjacent terms have payloads of 2,4 // and all the similarity factors are set to 1 hits = Searcher.Search(query, null, 100); Assert.IsTrue(hits != null, "hits is null and it shouldn't be"); Assert.AreEqual(100, hits.TotalHits, "should be 100 hits"); for (int j = 0; j < hits.ScoreDocs.Length; j++) { ScoreDoc doc = hits.ScoreDocs[j]; // System.out.println("Doc: " + doc.toString()); // System.out.println("Explain: " + searcher.Explain(query, doc.Doc)); Assert.AreEqual(3, doc.Score, doc.Score + " does not equal: " + 3); } } }
public virtual void TestTimSortStability() { Random rnd = Random; Item[] items = new Item[100]; for (int i = 0; i < items.Length; i++) { // half of the items have value but same order. The value of this items is sorted, // so they should always be in order after sorting. // The other half has defined order, but no (-1) value (they should appear after // all above, when sorted). bool equal = rnd.NextBoolean(); items[i] = new Item(equal ? (i + 1) : -1, equal ? 0 : (rnd.Next(1000) + 1)); } if (VERBOSE) { Console.WriteLine("Before: " + Arrays.ToString(items)); } // if you replace this with ArrayUtil.quickSort(), test should fail: ArrayUtil.TimSort(items); if (VERBOSE) { Console.WriteLine("Sorted: " + Arrays.ToString(items)); } Item last = items[0]; for (int i = 1; i < items.Length; i++) { Item act = items[i]; if (act.Order == 0) { // order of "equal" items should be not mixed up Assert.IsTrue(act.Val > last.Val); } Assert.IsTrue(act.Order >= last.Order); last = act; } }
public virtual int DoNestedConjunctions(int iter, int maxOuterClauses, int maxClauses) { int ret = 0; long nMatches = 0; for (int i = 0; i < iter; i++) { int oClauses = Random.Next(maxOuterClauses - 1) + 2; BooleanQuery oq = new BooleanQuery(); BitSet result = null; for (int o = 0; o < oClauses; o++) { int nClauses = Random.Next(maxClauses - 1) + 2; // min 2 clauses BooleanQuery bq = new BooleanQuery(); for (int j = 0; j < nClauses; j++) { result = AddClause(bq, result); } oq.Add(bq, Occur.MUST); } // outer CountingHitCollector hc = validate ? new MatchingHitCollector(result) : new CountingHitCollector(); s.Search(oq, hc); nMatches += hc.Count; ret += hc.Sum; if (validate) { Assert.AreEqual(result.Cardinality, hc.Count); } // System.out.println(hc.getCount()); } if (Verbose) { Console.WriteLine("Average number of matches=" + (nMatches / iter)); } return(ret); }
public override void Run() { long stopTime = (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond) + (long)(RUN_TIME_MSEC); try { do { if (AnyErrors()) { break; } DoWork(); } while (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond < stopTime); } catch (Exception e) when(e.IsThrowable()) { Console.WriteLine(Thread.CurrentThread + ": exc"); Console.Error.WriteLine(e.StackTrace); failed = true; } }
public override void Run() { Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_STORED); customType.StoreTermVectors = true; customType.StoreTermVectorPositions = true; customType.StoreTermVectorOffsets = true; doc.Add(newFieldFunc("content", "aaa", customType)); do { for (int i = 0; i < 27; i++) { try { writer.AddDocument(doc); } catch (Exception t) when(t.IsThrowable()) { Console.WriteLine(t.StackTrace); Assert.Fail("addDocument failed"); } if (i % 2 == 0) { try { writer.Commit(); } catch (Exception e) when(e.IsException()) { throw RuntimeException.Create(e); } } } Thread.Sleep(1); // LUCENENET NOTE: No need to catch and rethrow same excepton type ThreadInterruptedException } while (Environment.TickCount < stopTime); }
public override void Run() { Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_STORED); customType.StoreTermVectors = true; customType.StoreTermVectorPositions = true; customType.StoreTermVectorOffsets = true; doc.Add(newFieldFunc("content", "aaa", customType)); do { for (int i = 0; i < 27; i++) { try { writer.AddDocument(doc); } catch (Exception t) when(t.IsThrowable()) { Console.WriteLine(t.StackTrace); Assert.Fail("addDocument failed"); } if (i % 2 == 0) { try { writer.Commit(); } catch (Exception e) when(e.IsException()) { throw RuntimeException.Create(e); } } } Thread.Sleep(1); // LUCENENET NOTE: No need to catch and rethrow same excepton type ThreadInterruptedException } while (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond < stopTime); // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results }
public override void Run() { long stopTime = Environment.TickCount + (long)(RUN_TIME_MSEC); try { do { if (AnyErrors()) { break; } DoWork(); } while (Environment.TickCount < stopTime); } catch (Exception e) { Console.WriteLine(Thread.CurrentThread + ": exc"); Console.Error.WriteLine(e.StackTrace); failed = true; } }
/// <summary> /// Checks to see if the hits are what we expected. /// /// LUCENENET specific /// Is non-static because it depends on the non-static variable, <see cref="LuceneTestCase.Similarity"/> /// </summary> /// <param name="query"> the query to execute </param> /// <param name="description"> the description of the search </param> /// <param name="expectedIds"> the expected document ids of the hits </param> /// <param name="expectedScores"> the expected scores of the hits </param> protected internal void AssertHits(IndexSearcher s, Query query, string description, string[] expectedIds, float[] expectedScores) { QueryUtils.Check(Random, query, s); const float tolerance = 1e-5f; // Hits hits = searcher.Search(query); // hits normalizes and throws things off if one score is greater than 1.0 TopDocs topdocs = s.Search(query, null, 10000); /* * /// // display the hits System.out.println(hits.Length() + * /// " hits for search: \"" + description + '\"'); for (int i = 0; i < * /// hits.Length(); i++) { System.out.println(" " + FIELD_ID + ':' + * /// hits.Doc(i).Get(FIELD_ID) + " (score:" + hits.Score(i) + ')'); } * /// **** */ // did we get the hits we expected Assert.AreEqual(expectedIds.Length, topdocs.TotalHits); for (int i = 0; i < topdocs.TotalHits; i++) { // System.out.println(i + " exp: " + expectedIds[i]); // System.out.println(i + " field: " + hits.Doc(i).Get(FIELD_ID)); int id = topdocs.ScoreDocs[i].Doc; float score = topdocs.ScoreDocs[i].Score; Document doc = s.Doc(id); Assert.AreEqual(expectedIds[i], doc.Get(FIELD_ID)); bool scoreEq = Math.Abs(expectedScores[i] - score) < tolerance; if (!scoreEq) { Console.WriteLine(i + " warning, expected score: " + expectedScores[i] + ", actual " + score); Console.WriteLine(s.Explain(query, id)); } Assert.AreEqual(expectedScores[i], score, tolerance); Assert.AreEqual(s.Explain(query, id).Value, score, tolerance); } }
public virtual void TestMockGraphTokenFilterRandom() { for (int iter = 0; iter < 10 * RandomMultiplier; iter++) { if (Verbose) { Console.WriteLine("\nTEST: iter=" + iter); } // Make new analyzer each time, because MGTF has fixed // seed: Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); TokenStream t2 = new MockGraphTokenFilter(Random, t); return(new TokenStreamComponents(t, t2)); }); Random random = Random; CheckRandomData(random, a, 5, AtLeast(100)); } }
private void CheckBoosts(MultiTermQuery.RewriteMethod method) { MultiTermQuery mtq = new MultiTermQueryAnonymousInnerClassHelper(this); mtq.MultiTermRewriteMethod = (method); Query q1 = searcher.Rewrite(mtq); Query q2 = multiSearcher.Rewrite(mtq); Query q3 = multiSearcherDupls.Rewrite(mtq); if (Verbose) { Console.WriteLine(); Console.WriteLine("single segment: " + q1); Console.WriteLine("multi segment: " + q2); Console.WriteLine("multi segment with duplicates: " + q3); } Assert.IsTrue(q1.Equals(q2), "The multi-segment case must produce same rewritten query"); Assert.IsTrue(q1.Equals(q3), "The multi-segment case with duplicates must produce same rewritten query"); CheckBooleanQueryBoosts((BooleanQuery)q1); CheckBooleanQueryBoosts((BooleanQuery)q2); CheckBooleanQueryBoosts((BooleanQuery)q3); }
public override IndexOutput CreateOutput(string name, IOContext context) { if (VERBOSE) { Console.WriteLine("nrtdir.createOutput name=" + name); } if (DoCacheWrite(name, context)) { if (VERBOSE) { Console.WriteLine(" to cache"); } try { @delegate.DeleteFile(name); } #pragma warning disable 168 catch (IOException ioe) #pragma warning restore 168 { // this is fine: file may not exist } return(cache.CreateOutput(name, context)); } else { try { cache.DeleteFile(name); } #pragma warning disable 168 catch (IOException ioe) #pragma warning restore 168 { // this is fine: file may not exist } return(@delegate.CreateOutput(name, context)); } }
private void UnCache(string fileName) { // Only let one thread uncache at a time; this only // happens during commit() or close(): lock (uncacheLock) { if (VERBOSE) { Console.WriteLine("nrtdir.unCache name=" + fileName); } #pragma warning disable 612, 618 if (!cache.FileExists(fileName)) #pragma warning restore 612, 618 { // Another thread beat us... return; } IOContext context = IOContext.DEFAULT; IndexOutput @out = @delegate.CreateOutput(fileName, context); IndexInput @in = null; try { @in = cache.OpenInput(fileName, context); @out.CopyBytes(@in, @in.Length); } finally { IOUtils.Dispose(@in, @out); } // Lock order: uncacheLock -> this lock (this) { // Must sync here because other sync methods have // if (cache.fileExists(name)) { ... } else { ... }: cache.DeleteFile(fileName); } } }
private void CheckDuplicateTerms(MultiTermQuery.RewriteMethod method) { MultiTermQuery mtq = TermRangeQuery.NewStringRange("data", "2", "7", true, true); mtq.MultiTermRewriteMethod = (method); Query q1 = searcher.Rewrite(mtq); Query q2 = multiSearcher.Rewrite(mtq); Query q3 = multiSearcherDupls.Rewrite(mtq); if (Verbose) { Console.WriteLine(); Console.WriteLine("single segment: " + q1); Console.WriteLine("multi segment: " + q2); Console.WriteLine("multi segment with duplicates: " + q3); } Assert.IsTrue(q1.Equals(q2), "The multi-segment case must produce same rewritten query"); Assert.IsTrue(q1.Equals(q3), "The multi-segment case with duplicates must produce same rewritten query"); CheckBooleanQueryOrder(q1); CheckBooleanQueryOrder(q2); CheckBooleanQueryOrder(q3); }
internal virtual void OpenNextFile() { Dispose(); //currPathType = null; while (true) { if (nextFile >= inputFiles.Count) { // exhausted files, start a new round, unless forever set to false. if (!m_forever) { throw new NoMoreDataException(); } nextFile = 0; iteration++; } FileInfo f = inputFiles[nextFile++]; if (m_verbose) { Console.WriteLine("opening: " + f + " length: " + f.Length); } try { Stream inputStream = StreamUtils.GetInputStream(f); // support either gzip, bzip2, or regular text file, by extension reader = new StreamReader(inputStream, m_encoding); currPathType = TrecDocParser.PathType(f); return; } catch (Exception e) { if (m_verbose) { Console.WriteLine("Skipping 'bad' file " + f.FullName + " due to " + e.Message); continue; } throw new NoMoreDataException(); } } }
private static void AssertAutomaton(Automaton automaton) { var cra = new CharacterRunAutomaton(automaton); var bra = new ByteRunAutomaton(automaton); var ras = new RandomAcceptedStrings(automaton); int num = AtLeast(1000); for (int i = 0; i < num; i++) { string s; if (Random.NextBoolean()) { // likely not accepted s = TestUtil.RandomUnicodeString(Random); } else { // will be accepted int[] codepoints = ras.GetRandomAcceptedString(Random); try { s = UnicodeUtil.NewString(codepoints, 0, codepoints.Length); } catch (Exception /*e*/) { Console.WriteLine(codepoints.Length + " codepoints:"); for (int j = 0; j < codepoints.Length; j++) { Console.WriteLine(" " + codepoints[j].ToString("x")); } throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details) } } var bytes = s.GetBytes(Encoding.UTF8); Assert.AreEqual(cra.Run(s), bra.Run(bytes, 0, bytes.Length)); } }
public virtual void TestFilteredDocIdSet() { const int maxdoc = 10; DocIdSet innerSet = new DocIdSetAnonymousClass(this, maxdoc); DocIdSet filteredSet = new FilteredDocIdSetAnonymousClass(this, innerSet); DocIdSetIterator iter = filteredSet.GetIterator(); IList <int> list = new JCG.List <int>(); int doc = iter.Advance(3); if (doc != DocIdSetIterator.NO_MORE_DOCS) { list.Add(Convert.ToInt32(doc)); while ((doc = iter.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { list.Add(Convert.ToInt32(doc)); } } int[] docs = new int[list.Count]; int c = 0; using IEnumerator <int> intIter = list.GetEnumerator(); while (intIter.MoveNext()) { docs[c++] = intIter.Current; } int[] answer = new int[] { 4, 6, 8 }; bool same = Arrays.Equals(answer, docs); if (!same) { Console.WriteLine("answer: " + Arrays.ToString(answer)); Console.WriteLine("gotten: " + Arrays.ToString(docs)); Assert.Fail(); } }
public virtual void TestBooleanSpanQuery() { bool failed = false; int hits = 0; Directory directory = NewDirectory(); Analyzer indexerAnalyzer = new MockAnalyzer(Random); IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, indexerAnalyzer); IndexWriter writer = new IndexWriter(directory, config); string FIELD = "content"; Document d = new Document(); d.Add(new TextField(FIELD, "clockwork orange", Field.Store.YES)); writer.AddDocument(d); writer.Dispose(); IndexReader indexReader = DirectoryReader.Open(directory); IndexSearcher searcher = NewSearcher(indexReader); BooleanQuery query = new BooleanQuery(); SpanQuery sq1 = new SpanTermQuery(new Term(FIELD, "clockwork")); SpanQuery sq2 = new SpanTermQuery(new Term(FIELD, "clckwork")); query.Add(sq1, Occur.SHOULD); query.Add(sq2, Occur.SHOULD); TopScoreDocCollector collector = TopScoreDocCollector.Create(1000, true); searcher.Search(query, collector); hits = collector.GetTopDocs().ScoreDocs.Length; foreach (ScoreDoc scoreDoc in collector.GetTopDocs().ScoreDocs) { Console.WriteLine(scoreDoc.Doc); } indexReader.Dispose(); Assert.AreEqual(failed, false, "Bug in boolean query composed of span queries"); Assert.AreEqual(hits, 1, "Bug in boolean query composed of span queries"); directory.Dispose(); }
private string[] BestTerms(string field, int numTerms) { Util.PriorityQueue <TermDf> pq = new TermsDfQueue(numTerms); IndexReader ir = DirectoryReader.Open(dir); try { int threshold = ir.MaxDoc / 10; // ignore words too common. Terms terms = MultiFields.GetTerms(ir, field); if (terms != null) { TermsEnum termsEnum = terms.GetIterator(null); while (termsEnum.Next() != null) { int df = termsEnum.DocFreq; if (df < threshold) { string ttxt = termsEnum.Term.Utf8ToString(); pq.InsertWithOverflow(new TermDf(ttxt, df)); } } } } finally { ir.Dispose(); } string[] res = new string[pq.Count]; int i = 0; while (pq.Count > 0) { TermDf tdf = pq.Pop(); res[i++] = tdf.word; Console.WriteLine(i + ". word: " + tdf.df + " " + tdf.word); } return(res); }
public virtual void Extract() { Document doc; // LUCENENET: IDE0059: Remove unnecessary value assignment Console.WriteLine("Starting Extraction"); long start = J2N.Time.CurrentTimeMilliseconds(); try { while ((doc = m_docMaker.MakeDocument()) != null) { Create(doc.Get(DocMaker.ID_FIELD), doc.Get(DocMaker.TITLE_FIELD), doc .Get(DocMaker.DATE_FIELD), doc.Get(DocMaker.BODY_FIELD)); } } catch (NoMoreDataException /*e*/) { //continue } long finish = J2N.Time.CurrentTimeMilliseconds(); Console.WriteLine("Extraction took " + (finish - start) + " ms"); }
public override void Run() { bool done = false; while (!done) { for (int i = 0; i < 100; i++) { try { FinalWriter.AddDocument(Doc); } #pragma warning disable 168 catch (ObjectDisposedException e) #pragma warning restore 168 { done = true; break; } #pragma warning disable 168 catch (System.NullReferenceException e) #pragma warning restore 168 { done = true; break; } catch (Exception e) { Console.WriteLine(e.StackTrace); Failure.Add(e); done = true; break; } } Thread.Sleep(0); } }
public override void Run() { try { if (Verbose) { Console.WriteLine("[" + Thread.CurrentThread.Name + "]: launch reopen thread"); } while (Environment.TickCount < stopTime) { Thread.Sleep(TestUtil.NextInt32(Random, 1, 100)); outerInstance.m_writer.Commit(); Thread.Sleep(TestUtil.NextInt32(Random, 1, 5)); bool block = Random.NextBoolean(); if (block) { outerInstance.mgr.MaybeRefreshBlocking(); outerInstance.lifetimeMGR.Prune(outerInstance.pruner); } else if (outerInstance.mgr.MaybeRefresh()) { outerInstance.lifetimeMGR.Prune(outerInstance.pruner); } } } catch (Exception t) { if (Verbose) { Console.WriteLine("TEST: reopen thread hit exc"); Console.Out.Write(t.StackTrace); } outerInstance.m_failed.Value = (true); throw new Exception(t.ToString(), t); } }
public override void Run() { try { if (Verbose) { Console.WriteLine("[" + Thread.CurrentThread.Name + "]: launch reopen thread"); } while (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond < stopTime) // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results { Thread.Sleep(TestUtil.NextInt32(Random, 1, 100)); outerInstance.m_writer.Commit(); Thread.Sleep(TestUtil.NextInt32(Random, 1, 5)); bool block = Random.NextBoolean(); if (block) { outerInstance.mgr.MaybeRefreshBlocking(); outerInstance.lifetimeMGR.Prune(outerInstance.pruner); } else if (outerInstance.mgr.MaybeRefresh()) { outerInstance.lifetimeMGR.Prune(outerInstance.pruner); } } } catch (Exception t) when(t.IsThrowable()) { if (Verbose) { Console.WriteLine("TEST: reopen thread hit exc"); Console.Out.Write(t.StackTrace); } outerInstance.m_failed.Value = (true); throw RuntimeException.Create(t); } }
public override void SetUp() { base.SetUp(); dir = NewDirectory(); fieldName = Random.NextBoolean() ? "field" : ""; // sometimes use an empty string as field name RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000))); Document doc = new Document(); Field field = NewStringField(fieldName, "", Field.Store.NO); doc.Add(field); JCG.List <string> terms = new JCG.List <string>(); int num = AtLeast(200); for (int i = 0; i < num; i++) { string s = TestUtil.RandomUnicodeString(Random); field.SetStringValue(s); terms.Add(s); writer.AddDocument(doc); } if (Verbose) { // utf16 order terms.Sort(); Console.WriteLine("UTF16 order:"); foreach (string s in terms) { Console.WriteLine(" " + UnicodeUtil.ToHexString(s)); } } reader = writer.GetReader(); searcher1 = NewSearcher(reader); searcher2 = NewSearcher(reader); writer.Dispose(); }
public override IndexInput OpenInput(string name, IOContext context) { lock (this) { if (VERBOSE) { Console.WriteLine("nrtdir.openInput name=" + name); } #pragma warning disable 612, 618 if (cache.FileExists(name)) #pragma warning restore 612, 618 { if (VERBOSE) { Console.WriteLine(" from cache"); } return(cache.OpenInput(name, context)); } else { return(@delegate.OpenInput(name, context)); } } }
public virtual void TestEmptyIndexWithVectors() { Directory rd1 = NewDirectory(); { if (Verbose) { Console.WriteLine("\nTEST: make 1st writer"); } IndexWriter iw = new IndexWriter(rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Document doc = new Document(); Field idField = NewTextField("id", "", Field.Store.NO); doc.Add(idField); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.StoreTermVectors = true; doc.Add(NewField("test", "", customType)); idField.SetStringValue("1"); iw.AddDocument(doc); doc.Add(NewTextField("test", "", Field.Store.NO)); idField.SetStringValue("2"); iw.AddDocument(doc); iw.Dispose(); IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); if (Verbose) { Console.WriteLine("\nTEST: make 2nd writer"); } IndexWriter writer = new IndexWriter(rd1, dontMergeConfig); writer.DeleteDocuments(new Term("id", "1")); writer.Dispose(); IndexReader ir = DirectoryReader.Open(rd1); Assert.AreEqual(2, ir.MaxDoc); Assert.AreEqual(1, ir.NumDocs); ir.Dispose(); iw = new IndexWriter(rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND)); iw.ForceMerge(1); iw.Dispose(); } Directory rd2 = NewDirectory(); { IndexWriter iw = new IndexWriter(rd2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Document doc = new Document(); iw.AddDocument(doc); iw.Dispose(); } Directory rdOut = NewDirectory(); IndexWriter iwOut = new IndexWriter(rdOut, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); DirectoryReader reader1, reader2; ParallelAtomicReader pr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(reader1 = DirectoryReader.Open(rd1)), SlowCompositeReaderWrapper.Wrap(reader2 = DirectoryReader.Open(rd2))); // When unpatched, Lucene crashes here with an ArrayIndexOutOfBoundsException (caused by TermVectorsWriter) iwOut.AddIndexes(pr); // ParallelReader closes any IndexReader you added to it: pr.Dispose(); // assert subreaders were closed Assert.AreEqual(0, reader1.RefCount); Assert.AreEqual(0, reader2.RefCount); rd1.Dispose(); rd2.Dispose(); iwOut.ForceMerge(1); iwOut.Dispose(); rdOut.Dispose(); }
protected override void DoSearching(TaskScheduler es, long stopTime) { bool anyOpenDelFiles = false; DirectoryReader r = DirectoryReader.Open(m_writer, true); while (Environment.TickCount < stopTime && !m_failed) { if (Random.NextBoolean()) { if (Verbose) { Console.WriteLine("TEST: now reopen r=" + r); } DirectoryReader r2 = DirectoryReader.OpenIfChanged(r); if (r2 != null) { r.Dispose(); r = r2; } } else { if (Verbose) { Console.WriteLine("TEST: now close reader=" + r); } r.Dispose(); m_writer.Commit(); ICollection <string> openDeletedFiles = ((MockDirectoryWrapper)m_dir).GetOpenDeletedFiles(); if (openDeletedFiles.Count > 0) { Console.WriteLine("OBD files: " + openDeletedFiles); } anyOpenDelFiles |= openDeletedFiles.Count > 0; //Assert.AreEqual("open but deleted: " + openDeletedFiles, 0, openDeletedFiles.Size()); if (Verbose) { Console.WriteLine("TEST: now open"); } r = DirectoryReader.Open(m_writer, true); } if (Verbose) { Console.WriteLine("TEST: got new reader=" + r); } //System.out.println("numDocs=" + r.NumDocs + " //openDelFileCount=" + dir.openDeleteFileCount()); if (r.NumDocs > 0) { fixedSearcher = new IndexSearcher(r, es); SmokeTestSearcher(fixedSearcher); RunSearchThreads(Environment.TickCount + 500); } } r.Dispose(); //System.out.println("numDocs=" + r.NumDocs + " openDelFileCount=" + dir.openDeleteFileCount()); ICollection <string> openDeletedFiles_ = ((MockDirectoryWrapper)m_dir).GetOpenDeletedFiles(); if (openDeletedFiles_.Count > 0) { Console.WriteLine("OBD files: " + openDeletedFiles_); } anyOpenDelFiles |= openDeletedFiles_.Count > 0; Assert.IsFalse(anyOpenDelFiles, "saw non-zero open-but-deleted count"); }