// TODO: create a testNormsNotPresent ourselves by adding/deleting/merging docs public virtual void BuildIndex(Directory dir) { Random random = Random(); MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); Similarity provider = new MySimProvider(this); config.SetSimilarity(provider); RandomIndexWriter writer = new RandomIndexWriter(random, dir, config); LineFileDocs docs = new LineFileDocs(random, DefaultCodecSupportsDocValues()); int num = AtLeast(100); for (int i = 0; i < num; i++) { Document doc = docs.NextDoc(); int boost = Random().Next(255); Field f = new TextField(ByteTestField, "" + boost, Field.Store.YES); f.Boost = boost; doc.Add(f); writer.AddDocument(doc); doc.RemoveField(ByteTestField); if (Rarely()) { writer.Commit(); } } writer.Commit(); writer.Dispose(); docs.Dispose(); }
/// <summary> /// populates a writer with random stuff. this must be fully reproducable with the seed! /// </summary> public static void CreateRandomIndex(int numdocs, RandomIndexWriter writer, long seed) { Random random = new Random((int)seed); // primary source for our data is from linefiledocs, its realistic. LineFileDocs lineFileDocs = new LineFileDocs(random); // TODO: we should add other fields that use things like docs&freqs but omit positions, // because linefiledocs doesn't cover all the possibilities. for (int i = 0; i < numdocs; i++) { Document document = lineFileDocs.NextDoc(); // grab the title and add some SortedSet instances for fun string title = document.Get("titleTokenized"); string[] split = title.Split("\\s+".ToCharArray()); foreach (string trash in split) { document.Add(new SortedSetDocValuesField("sortedset", new BytesRef(trash))); } // add a numeric dv field sometimes document.RemoveFields("sparsenumeric"); if (random.Next(4) == 2) { document.Add(new NumericDocValuesField("sparsenumeric", random.Next())); } writer.AddDocument(document); } lineFileDocs.Dispose(); }
/// <summary> /// populates a writer with random stuff. this must be fully reproducable with /// the seed! /// </summary> public static void CreateRandomIndex(int numdocs, RandomIndexWriter writer, Random random) { LineFileDocs lineFileDocs = new LineFileDocs(random); for (int i = 0; i < numdocs; i++) { writer.AddDocument(lineFileDocs.NextDoc()); } lineFileDocs.Dispose(); }
public virtual void Test() { Directory d = NewDirectory(); MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); MyIndexWriter w = new MyIndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); // Try to make an index that requires merging: w.Config.SetMaxBufferedDocs(TestUtil.NextInt(Random(), 2, 11)); int numStartDocs = AtLeast(20); LineFileDocs docs = new LineFileDocs(Random(), DefaultCodecSupportsDocValues()); for (int docIDX = 0; docIDX < numStartDocs; docIDX++) { w.AddDocument(docs.NextDoc()); } MergePolicy mp = w.Config.MergePolicy; int mergeAtOnce = 1 + w.GetSegmentInfosSize_Nunit(); if (mp is TieredMergePolicy) { ((TieredMergePolicy)mp).MaxMergeAtOnce = mergeAtOnce; } else if (mp is LogMergePolicy) { ((LogMergePolicy)mp).MergeFactor = mergeAtOnce; } else { // skip test w.Dispose(); d.Dispose(); return; } AtomicBoolean doStop = new AtomicBoolean(); w.Config.SetMaxBufferedDocs(2); ThreadClass t = new ThreadAnonymousInnerClassHelper(this, w, numStartDocs, docs, doStop); t.Start(); w.ForceMerge(1); doStop.Set(true); t.Join(); Assert.IsTrue(w.MergeCount.Get() <= 1, "merge count is " + w.MergeCount.Get()); w.Dispose(); d.Dispose(); docs.Dispose(); }
public virtual void TestFloatNorms() { Directory dir = NewDirectory(); MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); Similarity provider = new MySimProvider(this); config.SetSimilarity(provider); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, config); LineFileDocs docs = new LineFileDocs(Random()); int num = AtLeast(100); for (int i = 0; i < num; i++) { Document doc = docs.NextDoc(); float nextFloat = (float)Random().NextDouble(); // Cast to a double to get more precision output to the string. Field f = new TextField(FloatTestField, "" + (double)nextFloat, Field.Store.YES); f.Boost = nextFloat; doc.Add(f); writer.AddDocument(doc); doc.RemoveField(FloatTestField); if (Rarely()) { writer.Commit(); } } writer.Commit(); writer.Dispose(); AtomicReader open = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir)); NumericDocValues norms = open.GetNormValues(FloatTestField); Assert.IsNotNull(norms); for (int i = 0; i < open.MaxDoc; i++) { Document document = open.Document(i); float expected = Convert.ToSingle(document.Get(FloatTestField)); Assert.AreEqual(expected, Number.IntBitsToFloat((int)norms.Get(i)), 0.0f); } open.Dispose(); dir.Dispose(); docs.Dispose(); }
public virtual void TestRollbackAndCommitWithThreads() { BaseDirectoryWrapper d = NewDirectory(); if (d is MockDirectoryWrapper) { ((MockDirectoryWrapper)d).PreventDoubleWrite = false; } int threadCount = TestUtil.NextInt(Random(), 2, 6); AtomicReference<IndexWriter> writerRef = new AtomicReference<IndexWriter>(); MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); writerRef.Value = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); LineFileDocs docs = new LineFileDocs(Random()); ThreadClass[] threads = new ThreadClass[threadCount]; int iters = AtLeast(100); AtomicBoolean failed = new AtomicBoolean(); ReentrantLock rollbackLock = new ReentrantLock(); ReentrantLock commitLock = new ReentrantLock(); for (int threadID = 0; threadID < threadCount; threadID++) { threads[threadID] = new ThreadAnonymousInnerClassHelper(this, d, writerRef, docs, iters, failed, rollbackLock, commitLock); threads[threadID].Start(); } for (int threadID = 0; threadID < threadCount; threadID++) { threads[threadID].Join(); } Assert.IsTrue(!failed.Get()); writerRef.Value.Dispose(); d.Dispose(); }
public virtual void Test() { Random random = new Random(Random().Next()); LineFileDocs docs = new LineFileDocs(random, DefaultCodecSupportsDocValues()); Directory d = NewDirectory(); MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); RandomIndexWriter w = new RandomIndexWriter(Random(), d, analyzer); int numDocs = AtLeast(10); for (int docCount = 0; docCount < numDocs; docCount++) { w.AddDocument(docs.NextDoc()); } IndexReader r = w.Reader; w.Dispose(); List<BytesRef> terms = new List<BytesRef>(); TermsEnum termsEnum = MultiFields.GetTerms(r, "body").Iterator(null); BytesRef term; while ((term = termsEnum.Next()) != null) { terms.Add(BytesRef.DeepCopyOf(term)); } if (VERBOSE) { Console.WriteLine("TEST: " + terms.Count + " terms"); } int upto = -1; int iters = AtLeast(200); for (int iter = 0; iter < iters; iter++) { bool isEnd; if (upto != -1 && Random().NextBoolean()) { // next if (VERBOSE) { Console.WriteLine("TEST: iter next"); } isEnd = termsEnum.Next() == null; upto++; if (isEnd) { if (VERBOSE) { Console.WriteLine(" end"); } Assert.AreEqual(upto, terms.Count); upto = -1; } else { if (VERBOSE) { Console.WriteLine(" got term=" + termsEnum.Term().Utf8ToString() + " expected=" + terms[upto].Utf8ToString()); } Assert.IsTrue(upto < terms.Count); Assert.AreEqual(terms[upto], termsEnum.Term()); } } else { BytesRef target; string exists; if (Random().NextBoolean()) { // likely fake term if (Random().NextBoolean()) { target = new BytesRef(TestUtil.RandomSimpleString(Random())); } else { target = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random())); } exists = "likely not"; } else { // real term target = terms[Random().Next(terms.Count)]; exists = "yes"; } upto = terms.BinarySearch(target); if (Random().NextBoolean()) { if (VERBOSE) { Console.WriteLine("TEST: iter seekCeil target=" + target.Utf8ToString() + " exists=" + exists); } // seekCeil TermsEnum.SeekStatus status = termsEnum.SeekCeil(target); if (VERBOSE) { Console.WriteLine(" got " + status); } if (upto < 0) { upto = -(upto + 1); if (upto >= terms.Count) { Assert.AreEqual(TermsEnum.SeekStatus.END, status); upto = -1; } else { Assert.AreEqual(TermsEnum.SeekStatus.NOT_FOUND, status); Assert.AreEqual(terms[upto], termsEnum.Term()); } } else { Assert.AreEqual(TermsEnum.SeekStatus.FOUND, status); Assert.AreEqual(terms[upto], termsEnum.Term()); } } else { if (VERBOSE) { Console.WriteLine("TEST: iter seekExact target=" + target.Utf8ToString() + " exists=" + exists); } // seekExact bool result = termsEnum.SeekExact(target); if (VERBOSE) { Console.WriteLine(" got " + result); } if (upto < 0) { Assert.IsFalse(result); upto = -1; } else { Assert.IsTrue(result); Assert.AreEqual(target, termsEnum.Term()); } } } } r.Dispose(); d.Dispose(); docs.Dispose(); }
public virtual void Test() { MockDirectoryWrapper dir = NewMockFSDirectory(CreateTempDir("TestIndexWriterOutOfFileDescriptors")); dir.PreventDoubleWrite = false; double rate = Random().NextDouble() * 0.01; //System.out.println("rate=" + rate); dir.RandomIOExceptionRateOnOpen = rate; int iters = AtLeast(20); LineFileDocs docs = new LineFileDocs(Random(), DefaultCodecSupportsDocValues()); IndexReader r = null; DirectoryReader r2 = null; bool any = false; MockDirectoryWrapper dirCopy = null; int lastNumDocs = 0; for (int iter = 0; iter < iters; iter++) { IndexWriter w = null; if (VERBOSE) { Console.WriteLine("TEST: iter=" + iter); } try { MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); if (VERBOSE) { // Do this ourselves instead of relying on LTC so // we see incrementing messageID: iwc.InfoStream = new PrintStreamInfoStream(Console.Out); } var ms = iwc.MergeScheduler; if (ms is IConcurrentMergeScheduler) { ((IConcurrentMergeScheduler)ms).SetSuppressExceptions(); } w = new IndexWriter(dir, iwc); if (r != null && Random().Next(5) == 3) { if (Random().NextBoolean()) { if (VERBOSE) { Console.WriteLine("TEST: addIndexes IR[]"); } w.AddIndexes(new IndexReader[] { r }); } else { if (VERBOSE) { Console.WriteLine("TEST: addIndexes Directory[]"); } w.AddIndexes(new Directory[] { dirCopy }); } } else { if (VERBOSE) { Console.WriteLine("TEST: addDocument"); } w.AddDocument(docs.NextDoc()); } dir.RandomIOExceptionRateOnOpen = 0.0; w.Dispose(); w = null; // NOTE: this is O(N^2)! Only enable for temporary debugging: //dir.setRandomIOExceptionRateOnOpen(0.0); //TestUtil.CheckIndex(dir); //dir.setRandomIOExceptionRateOnOpen(rate); // Verify numDocs only increases, to catch IndexWriter // accidentally deleting the index: dir.RandomIOExceptionRateOnOpen = 0.0; Assert.IsTrue(DirectoryReader.IndexExists(dir)); if (r2 == null) { r2 = DirectoryReader.Open(dir); } else { DirectoryReader r3 = DirectoryReader.OpenIfChanged(r2); if (r3 != null) { r2.Dispose(); r2 = r3; } } Assert.IsTrue(r2.NumDocs >= lastNumDocs, "before=" + lastNumDocs + " after=" + r2.NumDocs); lastNumDocs = r2.NumDocs; //System.out.println("numDocs=" + lastNumDocs); dir.RandomIOExceptionRateOnOpen = rate; any = true; if (VERBOSE) { Console.WriteLine("TEST: iter=" + iter + ": success"); } } catch (IOException ioe) { if (VERBOSE) { Console.WriteLine("TEST: iter=" + iter + ": exception"); Console.WriteLine(ioe.ToString()); Console.Write(ioe.StackTrace); } if (w != null) { // NOTE: leave random IO exceptions enabled here, // to verify that rollback does not try to write // anything: w.Rollback(); } } if (any && r == null && Random().NextBoolean()) { // Make a copy of a non-empty index so we can use // it to addIndexes later: dir.RandomIOExceptionRateOnOpen = 0.0; r = DirectoryReader.Open(dir); dirCopy = NewMockFSDirectory(CreateTempDir("TestIndexWriterOutOfFileDescriptors.copy")); HashSet<string> files = new HashSet<string>(); foreach (string file in dir.ListAll()) { dir.Copy(dirCopy, file, file, IOContext.DEFAULT); files.Add(file); } dirCopy.Sync(files); // Have IW kiss the dir so we remove any leftover // files ... we can easily have leftover files at // the time we take a copy because we are holding // open a reader: (new IndexWriter(dirCopy, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())))).Dispose(); dirCopy.RandomIOExceptionRate = rate; dir.RandomIOExceptionRateOnOpen = rate; } } if (r2 != null) { r2.Dispose(); } if (r != null) { r.Dispose(); dirCopy.Dispose(); } dir.Dispose(); }
private ThreadClass[] LaunchIndexingThreads(LineFileDocs docs, int numThreads, DateTime stopTime, ISet<string> delIDs, ISet<string> delPackIDs, IList<SubDocs> allSubDocs) { ThreadClass[] threads = new ThreadClass[numThreads]; for (int thread = 0; thread < numThreads; thread++) { threads[thread] = new ThreadAnonymousInnerClassHelper(this, docs, stopTime, delIDs, delPackIDs, allSubDocs); threads[thread].SetDaemon(true); threads[thread].Start(); } return threads; }
// Collections.synchronizedMap(new WeakHashMap<SegmentCoreReaders, bool?>()); public virtual void RunTest(string testName) { Failed.Set(false); AddCount.Set(0); DelCount.Set(0); PackCount.Set(0); DateTime t0 = DateTime.UtcNow; Random random = new Random(Random().Next()); LineFileDocs docs = new LineFileDocs(random, DefaultCodecSupportsDocValues()); DirectoryInfo tempDir = CreateTempDir(testName); Dir = GetDirectory(NewMockFSDirectory(tempDir)); // some subclasses rely on this being MDW if (Dir is BaseDirectoryWrapper) { ((BaseDirectoryWrapper)Dir).CheckIndexOnClose = false; // don't double-checkIndex, we do it ourselves. } MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetInfoStream(new FailOnNonBulkMergesInfoStream()); if (LuceneTestCase.TEST_NIGHTLY) { // newIWConfig makes smallish max seg size, which // results in tons and tons of segments for this test // when run nightly: MergePolicy mp = conf.MergePolicy; if (mp is TieredMergePolicy) { ((TieredMergePolicy)mp).MaxMergedSegmentMB = 5000.0; } else if (mp is LogByteSizeMergePolicy) { ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1000.0; } else if (mp is LogMergePolicy) { ((LogMergePolicy)mp).MaxMergeDocs = 100000; } } conf.SetMergedSegmentWarmer(new IndexReaderWarmerAnonymousInnerClassHelper(this)); if (VERBOSE) { conf.InfoStream = new PrintStreamInfoStreamAnonymousInnerClassHelper(this, Console.Out); } Writer = new IndexWriter(Dir, conf); TestUtil.ReduceOpenFiles(Writer); //TaskScheduler es = Random().NextBoolean() ? null : Executors.newCachedThreadPool(new NamedThreadFactory(testName)); TaskScheduler es = null; DoAfterWriter(es); int NUM_INDEX_THREADS = TestUtil.NextInt(Random(), 2, 4); int RUN_TIME_SEC = LuceneTestCase.TEST_NIGHTLY ? 300 : RANDOM_MULTIPLIER; ISet<string> delIDs = new ConcurrentHashSet<string>(new HashSet<string>()); ISet<string> delPackIDs = new ConcurrentHashSet<string>(new HashSet<string>()); IList<SubDocs> allSubDocs = new SynchronizedCollection<SubDocs>(); DateTime stopTime = DateTime.UtcNow.AddSeconds(RUN_TIME_SEC); ThreadClass[] indexThreads = LaunchIndexingThreads(docs, NUM_INDEX_THREADS, stopTime, delIDs, delPackIDs, allSubDocs); if (VERBOSE) { Console.WriteLine("TEST: DONE start " + NUM_INDEX_THREADS + " indexing threads [" + (DateTime.UtcNow - t0).TotalMilliseconds + " ms]"); } // Let index build up a bit Thread.Sleep(100); DoSearching(es, stopTime); if (VERBOSE) { Console.WriteLine("TEST: all searching done [" + (DateTime.UtcNow - t0).TotalMilliseconds + " ms]"); } for (int thread = 0; thread < indexThreads.Length; thread++) { indexThreads[thread].Join(); } if (VERBOSE) { Console.WriteLine("TEST: done join indexing threads [" + (DateTime.UtcNow - t0).TotalMilliseconds + " ms]; addCount=" + AddCount + " delCount=" + DelCount); } IndexSearcher s = FinalSearcher; if (VERBOSE) { Console.WriteLine("TEST: finalSearcher=" + s); } Assert.IsFalse(Failed.Get()); bool doFail = false; // Verify: make sure delIDs are in fact deleted: foreach (string id in delIDs) { TopDocs hits = s.Search(new TermQuery(new Term("docid", id)), 1); if (hits.TotalHits != 0) { Console.WriteLine("doc id=" + id + " is supposed to be deleted, but got " + hits.TotalHits + " hits; first docID=" + hits.ScoreDocs[0].Doc); doFail = true; } } // Verify: make sure delPackIDs are in fact deleted: foreach (string id in delPackIDs) { TopDocs hits = s.Search(new TermQuery(new Term("packID", id)), 1); if (hits.TotalHits != 0) { Console.WriteLine("packID=" + id + " is supposed to be deleted, but got " + hits.TotalHits + " matches"); doFail = true; } } // Verify: make sure each group of sub-docs are still in docID order: foreach (SubDocs subDocs in allSubDocs.ToList()) { TopDocs hits = s.Search(new TermQuery(new Term("packID", subDocs.PackID)), 20); if (!subDocs.Deleted) { // We sort by relevance but the scores should be identical so sort falls back to by docID: if (hits.TotalHits != subDocs.SubIDs.Count) { Console.WriteLine("packID=" + subDocs.PackID + ": expected " + subDocs.SubIDs.Count + " hits but got " + hits.TotalHits); doFail = true; } else { int lastDocID = -1; int startDocID = -1; foreach (ScoreDoc scoreDoc in hits.ScoreDocs) { int docID = scoreDoc.Doc; if (lastDocID != -1) { Assert.AreEqual(1 + lastDocID, docID); } else { startDocID = docID; } lastDocID = docID; Document doc = s.Doc(docID); Assert.AreEqual(subDocs.PackID, doc.Get("packID")); } lastDocID = startDocID - 1; foreach (string subID in subDocs.SubIDs) { hits = s.Search(new TermQuery(new Term("docid", subID)), 1); Assert.AreEqual(1, hits.TotalHits); int docID = hits.ScoreDocs[0].Doc; if (lastDocID != -1) { Assert.AreEqual(1 + lastDocID, docID); } lastDocID = docID; } } } else { // Pack was deleted -- make sure its docs are // deleted. We can't verify packID is deleted // because we can re-use packID for update: foreach (string subID in subDocs.SubIDs) { Assert.AreEqual(0, s.Search(new TermQuery(new Term("docid", subID)), 1).TotalHits); } } } // Verify: make sure all not-deleted docs are in fact // not deleted: int endID = Convert.ToInt32(docs.NextDoc().Get("docid")); docs.Dispose(); for (int id = 0; id < endID; id++) { string stringID = "" + id; if (!delIDs.Contains(stringID)) { TopDocs hits = s.Search(new TermQuery(new Term("docid", stringID)), 1); if (hits.TotalHits != 1) { Console.WriteLine("doc id=" + stringID + " is not supposed to be deleted, but got hitCount=" + hits.TotalHits + "; delIDs=" + string.Join(",", delIDs.ToArray())); doFail = true; } } } Assert.IsFalse(doFail); Assert.AreEqual(AddCount.Get() - DelCount.Get(), s.IndexReader.NumDocs, "index=" + Writer.SegString() + " addCount=" + AddCount + " delCount=" + DelCount); ReleaseSearcher(s); Writer.Commit(); Assert.AreEqual(AddCount.Get() - DelCount.Get(), Writer.NumDocs(), "index=" + Writer.SegString() + " addCount=" + AddCount + " delCount=" + DelCount); DoClose(); Writer.Dispose(false); // Cannot shutdown until after writer is closed because // writer has merged segment warmer that uses IS to run // searches, and that IS may be using this es! /*if (es != null) { es.shutdown(); es.awaitTermination(1, TimeUnit.SECONDS); }*/ TestUtil.CheckIndex(Dir); Dir.Dispose(); System.IO.Directory.Delete(tempDir.FullName, true); if (VERBOSE) { Console.WriteLine("TEST: done [" + (DateTime.UtcNow - t0).TotalMilliseconds + " ms]"); } }
private static void CheckRandomData(Random random, Analyzer a, int iterations, int maxWordLength, bool useCharFilter, bool simple, bool offsetsAreCorrect, RandomIndexWriter iw) { LineFileDocs docs = new LineFileDocs(random); Document doc = null; Field field = null, currentField = null; StringReader bogus = new StringReader(""); if (iw != null) { doc = new Document(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); if (random.NextBoolean()) { ft.StoreTermVectors = true; ft.StoreTermVectorOffsets = random.NextBoolean(); ft.StoreTermVectorPositions = random.NextBoolean(); if (ft.StoreTermVectorPositions && !OLD_FORMAT_IMPERSONATION_IS_ACTIVE) { ft.StoreTermVectorPayloads = random.NextBoolean(); } } if (random.NextBoolean()) { ft.OmitNorms = true; } string pf = TestUtil.GetPostingsFormat("dummy"); bool supportsOffsets = !DoesntSupportOffsets.Contains(pf); switch (random.Next(4)) { case 0: ft.IndexOptions = FieldInfo.IndexOptions.DOCS_ONLY; break; case 1: ft.IndexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS; break; case 2: ft.IndexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; break; default: if (supportsOffsets && offsetsAreCorrect) { ft.IndexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; } else { ft.IndexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; } break; } currentField = field = new Field("dummy", bogus, ft); doc.Add(currentField); } try { for (int i = 0; i < iterations; i++) { string text; if (random.Next(10) == 7) { // real data from linedocs text = docs.NextDoc().Get("body"); if (text.Length > maxWordLength) { // Take a random slice from the text...: int startPos = random.Next(text.Length - maxWordLength); if (startPos > 0 && char.IsLowSurrogate(text[startPos])) { // Take care not to split up a surrogate pair: startPos--; Assert.True(char.IsHighSurrogate(text[startPos])); } int endPos = startPos + maxWordLength - 1; if (char.IsHighSurrogate(text[endPos])) { // Take care not to split up a surrogate pair: endPos--; } text = text.Substring(startPos, 1 + endPos - startPos); } } else { // synthetic text = TestUtil.RandomAnalysisString(random, maxWordLength, simple); } try { CheckAnalysisConsistency(random, a, useCharFilter, text, offsetsAreCorrect, currentField); if (iw != null) { if (random.Next(7) == 0) { // pile up a multivalued field var ft = (FieldType)field.FieldType(); currentField = new Field("dummy", bogus, ft); doc.Add(currentField); } else { iw.AddDocument(doc); if (doc.Fields.Count > 1) { // back to 1 field currentField = field; doc.RemoveFields("dummy"); doc.Add(currentField); } } } } catch (Exception t) { // TODO: really we should pass a random seed to // checkAnalysisConsistency then print it here too: Console.Error.WriteLine("TEST FAIL: useCharFilter=" + useCharFilter + " text='" + Escape(text) + "'"); throw; } } } finally { IOUtils.CloseWhileHandlingException(docs); } }
public static void BeforeClass() { LineDocFile = new LineFileDocs(Random(), DefaultCodecSupportsDocValues()); }
public static void AfterClass() { LineDocFile.Dispose(); LineDocFile = null; }
public IndexThread(TestFlushByRamOrCountsPolicy outerInstance, AtomicInteger pendingDocs, int numThreads, IndexWriter writer, LineFileDocs docs, bool doRandomCommit) { this.OuterInstance = outerInstance; this.PendingDocs = pendingDocs; this.Writer = writer; Iwc = writer.Config; this.Docs = docs; this.DoRandomCommit = doRandomCommit; }
public ThreadAnonymousInnerClassHelper(TestIndexWriterWithThreads outerInstance, BaseDirectoryWrapper d, AtomicReference<IndexWriter> writerRef, LineFileDocs docs, int iters, AtomicBoolean failed, ReentrantLock rollbackLock, ReentrantLock commitLock) { this.OuterInstance = outerInstance; this.d = d; this.WriterRef = writerRef; this.Docs = docs; this.Iters = iters; this.Failed = failed; this.RollbackLock = rollbackLock; this.CommitLock = commitLock; }
public ThreadAnonymousInnerClassHelper(ThreadedIndexingAndSearchingTestCase outerInstance, LineFileDocs docs, DateTime stopTime, ISet<string> delIDs, ISet<string> delPackIDs, IList<SubDocs> allSubDocs) { this.OuterInstance = outerInstance; this.Docs = docs; this.StopTime = stopTime; this.DelIDs = delIDs; this.DelPackIDs = delPackIDs; this.AllSubDocs = allSubDocs; }
public virtual void Test() { Directory dir = NewDirectory(); MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); RandomIndexWriter w = new RandomIndexWriter(Random(), dir, analyzer, Similarity, TimeZone); LineFileDocs docs = new LineFileDocs(Random(), DefaultCodecSupportsDocValues()); int charsToIndex = AtLeast(100000); int charsIndexed = 0; //System.out.println("bytesToIndex=" + charsToIndex); while (charsIndexed < charsToIndex) { Document doc = docs.NextDoc(); charsIndexed += doc.Get("body").Length; w.AddDocument(doc); //System.out.println(" bytes=" + charsIndexed + " add: " + doc); } IndexReader r = w.Reader; //System.out.println("numDocs=" + r.NumDocs); w.Dispose(); IndexSearcher s = NewSearcher(r); Terms terms = MultiFields.GetFields(r).Terms("body"); int termCount = 0; TermsEnum termsEnum = terms.Iterator(null); while (termsEnum.Next() != null) { termCount++; } Assert.IsTrue(termCount > 0); // Target ~10 terms to search: double chance = 10.0 / termCount; termsEnum = terms.Iterator(termsEnum); IDictionary<BytesRef, TopDocs> answers = new Dictionary<BytesRef, TopDocs>(); while (termsEnum.Next() != null) { if (Random().NextDouble() <= chance) { BytesRef term = BytesRef.DeepCopyOf(termsEnum.Term()); answers[term] = s.Search(new TermQuery(new Term("body", term)), 100); } } if (answers.Count > 0) { CountdownEvent startingGun = new CountdownEvent(1); int numThreads = TestUtil.NextInt(Random(), 2, 5); ThreadClass[] threads = new ThreadClass[numThreads]; for (int threadID = 0; threadID < numThreads; threadID++) { ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, s, answers, startingGun); threads[threadID] = thread; thread.Start(); } startingGun.Signal(); foreach (ThreadClass thread in threads) { thread.Join(); } } r.Dispose(); dir.Dispose(); }
public ThreadAnonymousInnerClassHelper(TestForceMergeForever outerInstance, Lucene.Net.Index.TestForceMergeForever.MyIndexWriter w, int numStartDocs, LineFileDocs docs, AtomicBoolean doStop) { this.OuterInstance = outerInstance; this.w = w; this.NumStartDocs = numStartDocs; this.Docs = docs; this.DoStop = doStop; }
public override void Run() { try { LineFileDocs docs = new LineFileDocs(Random(), DefaultCodecSupportsDocValues()); int numDocs = 0; while (DateTime.UtcNow < OuterInstance.EndTime) { int what = Random().Next(3); NodeState node = OuterInstance.Nodes[Random().Next(OuterInstance.Nodes.Length)]; if (numDocs == 0 || what == 0) { node.Writer.AddDocument(docs.NextDoc()); numDocs++; } else if (what == 1) { node.Writer.UpdateDocument(new Term("docid", "" + Random().Next(numDocs)), docs.NextDoc()); numDocs++; } else { node.Writer.DeleteDocuments(new Term("docid", "" + Random().Next(numDocs))); } // TODO: doc blocks too if (Random().Next(17) == 12) { node.Writer.Commit(); } if (Random().Next(17) == 12) { OuterInstance.Nodes[Random().Next(OuterInstance.Nodes.Length)].Reopen(); } } } catch (Exception t) { Console.WriteLine("FAILED:"); Console.Out.WriteLine(t.StackTrace); throw new Exception(t.Message, t); } }
public virtual void TestNRTAndCommit() { Directory dir = NewDirectory(); NRTCachingDirectory cachedDir = new NRTCachingDirectory(dir, 2.0, 25.0); MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); RandomIndexWriter w = new RandomIndexWriter(Random(), cachedDir, conf); LineFileDocs docs = new LineFileDocs(Random(), DefaultCodecSupportsDocValues()); int numDocs = TestUtil.NextInt(Random(), 100, 400); if (VERBOSE) { Console.WriteLine("TEST: numDocs=" + numDocs); } IList<BytesRef> ids = new List<BytesRef>(); DirectoryReader r = null; for (int docCount = 0; docCount < numDocs; docCount++) { Document doc = docs.NextDoc(); ids.Add(new BytesRef(doc.Get("docid"))); w.AddDocument(doc); if (Random().Next(20) == 17) { if (r == null) { r = DirectoryReader.Open(w.w, false); } else { DirectoryReader r2 = DirectoryReader.OpenIfChanged(r); if (r2 != null) { r.Dispose(); r = r2; } } Assert.AreEqual(1 + docCount, r.NumDocs); IndexSearcher s = NewSearcher(r); // Just make sure search can run; we can't assert // totHits since it could be 0 TopDocs hits = s.Search(new TermQuery(new Term("body", "the")), 10); // System.out.println("tot hits " + hits.totalHits); } } if (r != null) { r.Dispose(); } // Close should force cache to clear since all files are sync'd w.Dispose(); string[] cachedFiles = cachedDir.ListCachedFiles(); foreach (string file in cachedFiles) { Console.WriteLine("FAIL: cached file " + file + " remains after sync"); } Assert.AreEqual(0, cachedFiles.Length); r = DirectoryReader.Open(dir); foreach (BytesRef id in ids) { Assert.AreEqual(1, r.DocFreq(new Term("docid", id))); } r.Dispose(); cachedDir.Dispose(); docs.Dispose(); }