private void Dotest(int ncats, int range) { AtomicInteger numCats = new AtomicInteger(ncats); Directory[] dirs = new Directory[2]; for (int i = 0; i < dirs.Length; i++) { dirs[i] = NewDirectory(); var tw = new DirectoryTaxonomyWriter(dirs[i]); ThreadClass[] addThreads = new ThreadClass[4]; for (int j = 0; j < addThreads.Length; j++) { addThreads[j] = new ThreadAnonymousInnerClassHelper(this, range, numCats, tw); } foreach (ThreadClass t in addThreads) { t.Start(); } foreach (ThreadClass t in addThreads) { t.Join(); } tw.Dispose(); } var tw1 = new DirectoryTaxonomyWriter(dirs[0]); IOrdinalMap map = randomOrdinalMap(); tw1.AddTaxonomy(dirs[1], map); tw1.Dispose(); validate(dirs[0], dirs[1], map); IOUtils.Close(dirs); }
public virtual void TestRandom() { DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl(); ctrl.UpdateStalled(false); ThreadClass[] stallThreads = new ThreadClass[AtLeast(3)]; for (int i = 0; i < stallThreads.Length; i++) { int stallProbability = 1 + Random().Next(10); stallThreads[i] = new ThreadAnonymousInnerClassHelper(this, ctrl, stallProbability); } Start(stallThreads); long time = DateTime.Now.Millisecond; /* * use a 100 sec timeout to make sure we not hang forever. join will fail in * that case */ while ((DateTime.Now.Millisecond - time) < 100 * 1000 && !Terminated(stallThreads)) { ctrl.UpdateStalled(false); if (Random().NextBoolean()) { Thread.@Yield(); } else { Thread.Sleep(1); } } Join(stallThreads); }
public static void Join(ThreadClass[] toJoin) { foreach (ThreadClass thread in toJoin) { thread.Join(); } }
public virtual void TestIndexing() { DirectoryInfo tmpDir = CreateTempDir("TestNeverDelete"); BaseDirectoryWrapper d = NewFSDirectory(tmpDir); // We want to "see" files removed if Lucene removed // them. this is still worth running on Windows since // some files the IR opens and closes. if (d is MockDirectoryWrapper) { ((MockDirectoryWrapper)d).NoDeleteOpenFile = false; } RandomIndexWriter w = new RandomIndexWriter(Random(), d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)); w.w.Config.SetMaxBufferedDocs(TestUtil.NextInt(Random(), 5, 30)); w.Commit(); ThreadClass[] indexThreads = new ThreadClass[Random().Next(4)]; long stopTime = Environment.TickCount + AtLeast(1000); for (int x = 0; x < indexThreads.Length; x++) { indexThreads[x] = new ThreadAnonymousInnerClassHelper(w, stopTime); indexThreads[x].Name = "Thread " + x; indexThreads[x].Start(); } HashSet<string> allFiles = new HashSet<string>(); DirectoryReader r = DirectoryReader.Open(d); while (Environment.TickCount < stopTime) { IndexCommit ic = r.IndexCommit; if (VERBOSE) { Console.WriteLine("TEST: check files: " + ic.FileNames); } allFiles.AddAll(ic.FileNames); // Make sure no old files were removed foreach (string fileName in allFiles) { Assert.IsTrue(SlowFileExists(d, fileName), "file " + fileName + " does not exist"); } DirectoryReader r2 = DirectoryReader.OpenIfChanged(r); if (r2 != null) { r.Dispose(); r = r2; } Thread.Sleep(1); } r.Dispose(); foreach (ThreadClass t in indexThreads) { t.Join(); } w.Dispose(); d.Dispose(); System.IO.Directory.Delete(tmpDir.FullName, true); }
public virtual void RunTest(Random random, Directory directory) { IndexWriter writer = new IndexWriter(directory, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, ANALYZER).SetOpenMode(OpenMode_e.CREATE).SetMaxBufferedDocs(2)).SetMergePolicy(NewLogMergePolicy())); for (int iter = 0; iter < NUM_ITER; iter++) { int iterFinal = iter; ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 1000; FieldType customType = new FieldType(StringField.TYPE_STORED); customType.OmitNorms = true; for (int i = 0; i < 200; i++) { Document d = new Document(); d.Add(NewField("id", Convert.ToString(i), customType)); d.Add(NewField("contents", English.IntToEnglish(i), customType)); writer.AddDocument(d); } ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 4; ThreadClass[] threads = new ThreadClass[NUM_THREADS]; for (int i = 0; i < NUM_THREADS; i++) { int iFinal = i; IndexWriter writerFinal = writer; threads[i] = new ThreadAnonymousInnerClassHelper(this, iterFinal, customType, iFinal, writerFinal); } for (int i = 0; i < NUM_THREADS; i++) { threads[i].Start(); } for (int i = 0; i < NUM_THREADS; i++) { threads[i].Join(); } Assert.IsTrue(!Failed); int expectedDocCount = (int)((1 + iter) * (200 + 8 * NUM_ITER2 * (NUM_THREADS / 2.0) * (1 + NUM_THREADS))); Assert.AreEqual(expectedDocCount, writer.NumDocs(), "index=" + writer.SegString() + " numDocs=" + writer.NumDocs() + " maxDoc=" + writer.MaxDoc + " config=" + writer.Config); Assert.AreEqual(expectedDocCount, writer.MaxDoc, "index=" + writer.SegString() + " numDocs=" + writer.NumDocs() + " maxDoc=" + writer.MaxDoc + " config=" + writer.Config); writer.Dispose(); writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, ANALYZER).SetOpenMode(OpenMode_e.APPEND).SetMaxBufferedDocs(2)); DirectoryReader reader = DirectoryReader.Open(directory); Assert.AreEqual(1, reader.Leaves.Count, "reader=" + reader); Assert.AreEqual(expectedDocCount, reader.NumDocs); reader.Dispose(); } writer.Dispose(); }
public static void Start(ThreadClass[] tostart) { foreach (ThreadClass thread in tostart) { thread.Start(); } Thread.Sleep(1); // let them start }
public SingleThreadedRandom(ThreadClass owner, Random @delegate) : base(0) { this.@delegate = @delegate; this.ownerRef = new WeakReference(owner); this.ownerName = owner.Name; this.trace = new System.Diagnostics.StackTrace(1); }
public static void Main(string[] args) { if (args.Length != 2) { Console.WriteLine("Usage: java Lucene.Net.Store.LockVerifyServer bindToIp clients\n"); Environment.Exit(1); } int arg = 0; IPHostEntry ipHostInfo = Dns.GetHostEntry(args[arg++]); IPAddress ipAddress = ipHostInfo.AddressList[0]; IPEndPoint localEndPoint = new IPEndPoint(ipAddress, 0); int maxClients = Convert.ToInt32(args[arg++]); using (Socket s = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { s.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, 1); s.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReceiveTimeout, 30000);// SoTimeout = 30000; // initially 30 secs to give clients enough time to startup s.Bind(localEndPoint); Console.WriteLine("Listening on " + ((IPEndPoint)s.LocalEndPoint).Port.ToString() + "..."); // we set the port as a sysprop, so the ANT task can read it. For that to work, this server must run in-process: System.Environment.SetEnvironmentVariable("lockverifyserver.port", ((IPEndPoint)s.LocalEndPoint).Port.ToString()); object localLock = new object(); int[] lockedID = new int[1]; lockedID[0] = -1; CountdownEvent startingGun = new CountdownEvent(1); ThreadClass[] threads = new ThreadClass[maxClients]; for (int count = 0; count < maxClients; count++) { Socket cs = s.Accept(); threads[count] = new ThreadAnonymousInnerClassHelper(localLock, lockedID, startingGun, cs); threads[count].Start(); } // start Console.WriteLine("All clients started, fire gun..."); startingGun.Signal(); // wait for all threads to finish foreach (ThreadClass t in threads) { t.Join(); } //LUCENE TO-DO Not sure if equivalent? // cleanup sysprop //System.clearProperty("lockverifyserver.port"); Console.WriteLine("Server terminated."); } }
protected Randomness(ThreadClass owner, int seed, List<ISeedDecorator> decorators) { this.Seed = seed; this.decorators = decorators.ToList(); var decoratedSeed = Decorate(seed, this.decorators); this.SingleThreadedRandom = new SingleThreadedRandom(owner, new Random(decoratedSeed) ); }
public void Test() { ThreadClass thread = new ThreadClass(); //Compare Current Thread Ids Assert.IsTrue(ThreadClass.Current().Instance.ManagedThreadId == System.Threading.Thread.CurrentThread.ManagedThreadId); //Compare instances of ThreadClass MyThread mythread = new MyThread(); mythread.Start(); while (mythread.Result == null) System.Threading.Thread.Sleep(1); Assert.IsTrue((bool)mythread.Result); ThreadClass nullThread = null; Assert.IsTrue(nullThread == null); //test overloaded operator == with null values Assert.IsFalse(nullThread != null); //test overloaded operator != with null values }
public virtual void TestWithThreads() { // LUCENE-5303: OrdinalsCache used the ThreadLocal BinaryDV instead of reader.getCoreCacheKey(). Store.Directory indexDir = NewDirectory(); Store.Directory taxoDir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); IndexWriter writer = new IndexWriter(indexDir, conf); var taxoWriter = new DirectoryTaxonomyWriter(taxoDir); FacetsConfig config = new FacetsConfig(); Document doc = new Document(); doc.Add(new FacetField("A", "1")); writer.AddDocument(config.Build(taxoWriter, doc)); doc = new Document(); doc.Add(new FacetField("A", "2")); writer.AddDocument(config.Build(taxoWriter, doc)); var reader = DirectoryReader.Open(writer, true); CachedOrdinalsReader ordsReader = new CachedOrdinalsReader(new DocValuesOrdinalsReader(FacetsConfig.DEFAULT_INDEX_FIELD_NAME)); ThreadClass[] threads = new ThreadClass[3]; for (int i = 0; i < threads.Length; i++) { threads[i] = new ThreadAnonymousInnerClassHelper(this, "CachedOrdsThread-" + i, reader, ordsReader); } long ramBytesUsed = 0; foreach (ThreadClass t in threads) { t.Start(); t.Join(); if (ramBytesUsed == 0) { ramBytesUsed = ordsReader.RamBytesUsed(); } else { Assert.AreEqual(ramBytesUsed, ordsReader.RamBytesUsed()); } } IOUtils.Close(writer, taxoWriter, reader, indexDir, taxoDir); }
public void Test() { ThreadClass thread = new ThreadClass(); //Compare Current Thread Ids Assert.IsTrue(ThreadClass.Current().Instance.ManagedThreadId == System.Threading.Thread.CurrentThread.ManagedThreadId); //Compare instances of ThreadClass MyThread mythread = new MyThread(); mythread.Start(); while (mythread.Result == null) { System.Threading.Thread.Sleep(1); } Assert.IsTrue((bool)mythread.Result); ThreadClass nullThread = null; Assert.IsTrue(nullThread == null); //test overloaded operator == with null values Assert.IsFalse(nullThread != null); //test overloaded operator != with null values }
public virtual void TestStressMultiThreading() { Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); IndexWriter writer = new IndexWriter(dir, conf); // create index int numThreads = TestUtil.NextInt(Random(), 3, 6); int numDocs = AtLeast(2000); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); doc.Add(new StringField("id", "doc" + i, Store.NO)); double group = Random().NextDouble(); string g; if (group < 0.1) { g = "g0"; } else if (group < 0.5) { g = "g1"; } else if (group < 0.8) { g = "g2"; } else { g = "g3"; } doc.Add(new StringField("updKey", g, Store.NO)); for (int j = 0; j < numThreads; j++) { long value = Random().Next(); doc.Add(new NumericDocValuesField("f" + j, value)); doc.Add(new NumericDocValuesField("cf" + j, value * 2)); // control, always updated to f * 2 } writer.AddDocument(doc); } CountDownLatch done = new CountDownLatch(numThreads); AtomicInteger numUpdates = new AtomicInteger(AtLeast(100)); // same thread updates a field as well as reopens ThreadClass[] threads = new ThreadClass[numThreads]; for (int i = 0; i < threads.Length; i++) { string f = "f" + i; string cf = "cf" + i; threads[i] = new ThreadAnonymousInnerClassHelper(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf); } foreach (ThreadClass t in threads) { t.Start(); } done.@await(); writer.Dispose(); DirectoryReader reader = DirectoryReader.Open(dir); foreach (AtomicReaderContext context in reader.Leaves) { AtomicReader r = context.AtomicReader; for (int i = 0; i < numThreads; i++) { NumericDocValues ndv = r.GetNumericDocValues("f" + i); NumericDocValues control = r.GetNumericDocValues("cf" + i); Bits docsWithNdv = r.GetDocsWithField("f" + i); Bits docsWithControl = r.GetDocsWithField("cf" + i); Bits liveDocs = r.LiveDocs; for (int j = 0; j < r.MaxDoc; j++) { if (liveDocs == null || liveDocs.Get(j)) { Assert.AreEqual(docsWithNdv.Get(j), docsWithControl.Get(j)); if (docsWithNdv.Get(j)) { Assert.AreEqual(control.Get(j), ndv.Get(j) * 2); } } } } } reader.Dispose(); dir.Dispose(); }
public virtual void TestMultiThreadedSnapshotting() { Directory dir = NewDirectory(); IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), DeletionPolicy)); SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy)writer.Config.DelPolicy; ThreadClass[] threads = new ThreadClass[10]; IndexCommit[] snapshots = new IndexCommit[threads.Length]; for (int i = 0; i < threads.Length; i++) { int finalI = i; threads[i] = new ThreadAnonymousInnerClassHelper2(this, writer, sdp, snapshots, finalI); threads[i].Name = "t" + i; } foreach (ThreadClass t in threads) { t.Start(); } foreach (ThreadClass t in threads) { t.Join(); } // Do one last commit, so that after we release all snapshots, we stay w/ one commit writer.AddDocument(new Document()); writer.Commit(); for (int i = 0; i < threads.Length; i++) { sdp.Release(snapshots[i]); writer.DeleteUnusedFiles(); } Assert.AreEqual(1, DirectoryReader.ListCommits(dir).Count); writer.Dispose(); dir.Dispose(); }
public virtual void TestDeleteAllNoDeadLock() { Directory dir = NewDirectory(); RandomIndexWriter modifier = new RandomIndexWriter(Random(), dir); int numThreads = AtLeast(2); ThreadClass[] threads = new ThreadClass[numThreads]; CountDownLatch latch = new CountDownLatch(1); CountDownLatch doneLatch = new CountDownLatch(numThreads); for (int i = 0; i < numThreads; i++) { int offset = i; threads[i] = new ThreadAnonymousInnerClassHelper(this, modifier, latch, doneLatch, offset); threads[i].Start(); } latch.countDown(); //Wait for 1 millisecond while (!doneLatch.@await(new TimeSpan(0, 0, 0, 0, 1))) { modifier.DeleteAll(); if (VERBOSE) { Console.WriteLine("del all"); } } modifier.DeleteAll(); foreach (ThreadClass thread in threads) { thread.Join(); } modifier.Dispose(); DirectoryReader reader = DirectoryReader.Open(dir); Assert.AreEqual(reader.MaxDoc, 0); Assert.AreEqual(reader.NumDocs, 0); Assert.AreEqual(reader.NumDeletedDocs, 0); reader.Dispose(); dir.Dispose(); }
public virtual void Test() { IList<string> postingsList = new List<string>(); int numTerms = AtLeast(300); int maxTermsPerDoc = TestUtil.NextInt(Random(), 10, 20); bool isSimpleText = "SimpleText".Equals(TestUtil.GetPostingsFormat("field")); IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, new MockAnalyzer(Random())); if ((isSimpleText || iwc.MergePolicy is MockRandomMergePolicy) && (TEST_NIGHTLY || RANDOM_MULTIPLIER > 1)) { // Otherwise test can take way too long (> 2 hours) numTerms /= 2; } if (VERBOSE) { Console.WriteLine("maxTermsPerDoc=" + maxTermsPerDoc); Console.WriteLine("numTerms=" + numTerms); } for (int i = 0; i < numTerms; i++) { string term = Convert.ToString(i); for (int j = 0; j < i; j++) { postingsList.Add(term); } } postingsList = CollectionsHelper.Shuffle(postingsList); ConcurrentQueue<string> postings = new ConcurrentQueue<string>(postingsList); Directory dir = NewFSDirectory(CreateTempDir(GetFullMethodName())); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc); int threadCount = TestUtil.NextInt(Random(), 1, 5); if (VERBOSE) { Console.WriteLine("config: " + iw.w.Config); Console.WriteLine("threadCount=" + threadCount); } Field prototype = NewTextField("field", "", Field.Store.NO); FieldType fieldType = new FieldType((FieldType)prototype.FieldType); if (Random().NextBoolean()) { fieldType.OmitNorms = true; } int options = Random().Next(3); if (options == 0) { fieldType.IndexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS; // we dont actually need positions fieldType.StoreTermVectors = true; // but enforce term vectors when we do this so we check SOMETHING } else if (options == 1 && !DoesntSupportOffsets.Contains(TestUtil.GetPostingsFormat("field"))) { fieldType.IndexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; } // else just positions ThreadClass[] threads = new ThreadClass[threadCount]; CountdownEvent startingGun = new CountdownEvent(1); for (int threadID = 0; threadID < threadCount; threadID++) { Random threadRandom = new Random(Random().Next()); Document document = new Document(); Field field = new Field("field", "", fieldType); document.Add(field); threads[threadID] = new ThreadAnonymousInnerClassHelper(this, numTerms, maxTermsPerDoc, postings, iw, startingGun, threadRandom, document, field); threads[threadID].Start(); } startingGun.Signal(); foreach (ThreadClass t in threads) { t.Join(); } iw.ForceMerge(1); DirectoryReader ir = iw.Reader; Assert.AreEqual(1, ir.Leaves.Count); AtomicReader air = (AtomicReader)ir.Leaves[0].Reader; Terms terms = air.Terms("field"); // numTerms-1 because there cannot be a term 0 with 0 postings: Assert.AreEqual(numTerms - 1, terms.Size()); TermsEnum termsEnum = terms.Iterator(null); BytesRef termBR; while ((termBR = termsEnum.Next()) != null) { int value = Convert.ToInt32(termBR.Utf8ToString()); Assert.AreEqual(value, termsEnum.TotalTermFreq()); // don't really need to check more than this, as CheckIndex // will verify that totalTermFreq == total number of positions seen // from a docsAndPositionsEnum. } ir.Dispose(); iw.Dispose(); dir.Dispose(); }
private static RandomizedContext Context(ThreadClass thread) { var group = thread.Instance.GetThreadGroup(); RandomizedContext context; lock (globalLock) { while (true) { context = contexts[group]; if (context == null && group.Parent != null) group = group.Parent; else break; } // TODO: revisit if (context == null) { context = contexts[group] = new RandomizedContext(group, null, null); } } if (context == null) { // TODO: revisit var message = "No context information for thread," + thread.Name + ". " + "Is this thread running under a " + typeof(RandomizedRunner).Name + " context? "; throw new IllegalStateException(message); } lock (context.contextLock) { if (!context.threadResources.ContainsKey(thread)) { var resources = new ThreadResources(); //resources.Queue.Enqueue(context.runner.Randomness.Clone(thread)); context.threadResources.Add(thread, resources); } } return context; }
public virtual void TestAccquireReleaseRace() { DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl(); ctrl.UpdateStalled(false); AtomicBoolean stop = new AtomicBoolean(false); AtomicBoolean checkPoint = new AtomicBoolean(true); int numStallers = AtLeast(1); int numReleasers = AtLeast(1); int numWaiters = AtLeast(1); var sync = new Synchronizer(numStallers + numReleasers, numStallers + numReleasers + numWaiters); var threads = new ThreadClass[numReleasers + numStallers + numWaiters]; IList<Exception> exceptions = new SynchronizedCollection<Exception>(); for (int i = 0; i < numReleasers; i++) { threads[i] = new Updater(stop, checkPoint, ctrl, sync, true, exceptions); } for (int i = numReleasers; i < numReleasers + numStallers; i++) { threads[i] = new Updater(stop, checkPoint, ctrl, sync, false, exceptions); } for (int i = numReleasers + numStallers; i < numReleasers + numStallers + numWaiters; i++) { threads[i] = new Waiter(stop, checkPoint, ctrl, sync, exceptions); } Start(threads); int iters = AtLeast(10000); float checkPointProbability = TEST_NIGHTLY ? 0.5f : 0.1f; for (int i = 0; i < iters; i++) { if (checkPoint.Get()) { Assert.IsTrue(sync.UpdateJoin.@await(new TimeSpan(0, 0, 0, 10)), "timed out waiting for update threads - deadlock?"); if (exceptions.Count > 0) { foreach (Exception throwable in exceptions) { Console.WriteLine(throwable.ToString()); Console.Write(throwable.StackTrace); } Assert.Fail("got exceptions in threads"); } if (ctrl.HasBlocked() && ctrl.Healthy) { AssertState(numReleasers, numStallers, numWaiters, threads, ctrl); } checkPoint.Set(false); sync.Waiter.countDown(); sync.LeftCheckpoint.@await(); } Assert.IsFalse(checkPoint.Get()); Assert.AreEqual(0, sync.Waiter.Remaining); if (checkPointProbability >= (float)Random().NextDouble()) { sync.Reset(numStallers + numReleasers, numStallers + numReleasers + numWaiters); checkPoint.Set(true); } } if (!checkPoint.Get()) { sync.Reset(numStallers + numReleasers, numStallers + numReleasers + numWaiters); checkPoint.Set(true); } Assert.IsTrue(sync.UpdateJoin.@await(new TimeSpan(0, 0, 0, 10))); AssertState(numReleasers, numStallers, numWaiters, threads, ctrl); checkPoint.Set(false); stop.Set(true); sync.Waiter.countDown(); sync.LeftCheckpoint.@await(); for (int i = 0; i < threads.Length; i++) { ctrl.UpdateStalled(false); threads[i].Join(2000); if (threads[i].IsAlive && threads[i] is Waiter) { if (threads[i].State == ThreadState.WaitSleepJoin) { Assert.Fail("waiter is not released - anyThreadsStalled: " + ctrl.AnyStalledThreads()); } } } }
// [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass // run random tests from different threads to make sure the per-thread clones // don't share mutable data public virtual void TestClone() { RandomDocumentFactory docFactory = new RandomDocumentFactory(this, 5, 20); int numDocs = AtLeast(100); foreach (Options options in ValidOptions()) { RandomDocument[] docs = new RandomDocument[numDocs]; for (int i = 0; i < numDocs; ++i) { docs[i] = docFactory.NewDocument(TestUtil.NextInt(Random(), 1, 3), AtLeast(10), options); } Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, ClassEnvRule.Similarity, ClassEnvRule.TimeZone); for (int i = 0; i < numDocs; ++i) { writer.AddDocument(AddId(docs[i].ToDocument(), "" + i)); } IndexReader reader = writer.Reader; for (int i = 0; i < numDocs; ++i) { int docID = DocID(reader, "" + i); AssertEquals(docs[i], reader.GetTermVectors(docID)); } AtomicObject<Exception> exception = new AtomicObject<Exception>(); ThreadClass[] threads = new ThreadClass[2]; for (int i = 0; i < threads.Length; ++i) { threads[i] = new ThreadAnonymousInnerClassHelper(this, numDocs, docs, reader, exception, i); } foreach (ThreadClass thread in threads) { thread.Start(); } foreach (ThreadClass thread in threads) { thread.Join(); } reader.Dispose(); writer.Dispose(); dir.Dispose(); Assert.IsNull(exception.Value, "One thread threw an exception"); } }
protected internal virtual void Start(int numNodes, double runTimeSec, int maxSearcherAgeSeconds) { EndTime = DateTime.UtcNow.AddSeconds(runTimeSec); this.MaxSearcherAgeSeconds = maxSearcherAgeSeconds; Nodes = new NodeState[numNodes]; for (int nodeID = 0; nodeID < numNodes; nodeID++) { Nodes[nodeID] = new NodeState(this, Random(), nodeID, numNodes); } long[] nodeVersions = new long[Nodes.Length]; for (int nodeID = 0; nodeID < numNodes; nodeID++) { IndexSearcher s = Nodes[nodeID].Mgr.Acquire(); try { nodeVersions[nodeID] = Nodes[nodeID].Searchers.Record(s); } finally { Nodes[nodeID].Mgr.Release(s); } } for (int nodeID = 0; nodeID < numNodes; nodeID++) { IndexSearcher s = Nodes[nodeID].Mgr.Acquire(); Debug.Assert(nodeVersions[nodeID] == Nodes[nodeID].Searchers.Record(s)); Debug.Assert(s != null); try { BroadcastNodeReopen(nodeID, nodeVersions[nodeID], s); } finally { Nodes[nodeID].Mgr.Release(s); } } ChangeIndicesThread = new ChangeIndices(this); ChangeIndicesThread.Start(); }
public override void Run() { Result = ThreadClass.Current() == this; }
private void AssertState(int numReleasers, int numStallers, int numWaiters, ThreadClass[] threads, DocumentsWriterStallControl ctrl) { int millisToSleep = 100; while (true) { if (ctrl.HasBlocked() && ctrl.Healthy) { for (int n = numReleasers + numStallers; n < numReleasers + numStallers + numWaiters; n++) { if (ctrl.IsThreadQueued(threads[n])) { if (millisToSleep < 60000) { Thread.Sleep(millisToSleep); millisToSleep *= 2; break; } else { Assert.Fail("control claims no stalled threads but waiter seems to be blocked "); } } } break; } else { break; } } }
public virtual void TestConcurrency() { int ncats = AtLeast(100000); // add many categories int range = ncats * 3; // affects the categories selection AtomicInteger numCats = new AtomicInteger(ncats); Directory dir = NewDirectory(); var values = new ConcurrentDictionary<string, string>(); double d = Random().NextDouble(); ITaxonomyWriterCache cache; if (d < 0.7) { // this is the fastest, yet most memory consuming cache = new Cl2oTaxonomyWriterCache(1024, 0.15f, 3); } else if (TEST_NIGHTLY && d > 0.98) { // this is the slowest, but tests the writer concurrency when no caching is done. // only pick it during NIGHTLY tests, and even then, with very low chances. cache = NO_OP_CACHE; } else { // this is slower than CL2O, but less memory consuming, and exercises finding categories on disk too. cache = new LruTaxonomyWriterCache(ncats / 10); } if (VERBOSE) { Console.WriteLine("TEST: use cache=" + cache); } var tw = new DirectoryTaxonomyWriter(dir, OpenMode.CREATE, cache); ThreadClass[] addThreads = new ThreadClass[AtLeast(4)]; for (int z = 0; z < addThreads.Length; z++) { addThreads[z] = new ThreadAnonymousInnerClassHelper(this, range, numCats, values, tw); } foreach (var t in addThreads) { t.Start(); } foreach (var t in addThreads) { t.Join(); } tw.Dispose(); DirectoryTaxonomyReader dtr = new DirectoryTaxonomyReader(dir); // +1 for root category if (values.Count + 1 != dtr.Count) { foreach (string value in values.Keys) { FacetLabel label = new FacetLabel(FacetsConfig.StringToPath(value)); if (dtr.GetOrdinal(label) == -1) { Console.WriteLine("FAIL: path=" + label + " not recognized"); } } Fail("mismatch number of categories"); } int[] parents = dtr.ParallelTaxonomyArrays.Parents; foreach (string cat in values.Keys) { FacetLabel cp = new FacetLabel(FacetsConfig.StringToPath(cat)); Assert.True(dtr.GetOrdinal(cp) > 0, "category not found " + cp); int level = cp.Length; int parentOrd = 0; // for root, parent is always virtual ROOT (ord=0) FacetLabel path = new FacetLabel(); for (int i = 0; i < level; i++) { path = cp.Subpath(i + 1); int ord = dtr.GetOrdinal(path); Assert.AreEqual(parentOrd, parents[ord], "invalid parent for cp=" + path); parentOrd = ord; // next level should have this parent } } IOUtils.Close(dtr, dir); }
private void TestTerms(Fields fieldsSource, ISet<Option> options, FieldInfo.IndexOptions maxTestOptions, FieldInfo.IndexOptions maxIndexOptions, bool alwaysTestMax) { if (options.Contains(Option.THREADS)) { int numThreads = TestUtil.NextInt(Random(), 2, 5); ThreadClass[] threads = new ThreadClass[numThreads]; for (int threadUpto = 0; threadUpto < numThreads; threadUpto++) { threads[threadUpto] = new TestThread(this, fieldsSource, options, maxTestOptions, maxIndexOptions, alwaysTestMax); threads[threadUpto].Start(); } for (int threadUpto = 0; threadUpto < numThreads; threadUpto++) { threads[threadUpto].Join(); } } else { TestTermsOneThread(fieldsSource, options, maxTestOptions, maxIndexOptions, alwaysTestMax); } }
public virtual void AssertThreadSafe(Analyzer analyzer) { int numTestPoints = 100; int numThreads = TestUtil.NextInt(Random(), 3, 5); Dictionary<string, BytesRef> map = new Dictionary<string, BytesRef>(); // create a map<String,SortKey> up front. // then with multiple threads, generate sort keys for all the keys in the map // and ensure they are the same as the ones we produced in serial fashion. for (int i = 0; i < numTestPoints; i++) { string term = TestUtil.RandomSimpleString(Random()); IOException priorException = null; TokenStream ts = analyzer.TokenStream("fake", new StreamReader(term)); try { ITermToBytesRefAttribute termAtt = ts.AddAttribute<ITermToBytesRefAttribute>(); BytesRef bytes = termAtt.BytesRef; ts.Reset(); Assert.IsTrue(ts.IncrementToken()); termAtt.FillBytesRef(); // ensure we make a copy of the actual bytes too map[term] = BytesRef.DeepCopyOf(bytes); Assert.IsFalse(ts.IncrementToken()); ts.End(); } catch (IOException e) { priorException = e; } finally { IOUtils.CloseWhileHandlingException(priorException, ts); } } ThreadClass[] threads = new ThreadClass[numThreads]; for (int i = 0; i < numThreads; i++) { threads[i] = new ThreadAnonymousInnerClassHelper(this, analyzer, map); } for (int i = 0; i < numThreads; i++) { threads[i].Start(); } for (int i = 0; i < numThreads; i++) { threads[i].Join(); } }
public virtual void Init(IndexReader reader) { this.reader = reader; timeElapsed = 0; t = new ThreadClass(new System.Threading.ThreadStart(this.Run)); t.Start(); }
public static bool Terminated(ThreadClass[] threads) { foreach (ThreadClass thread in threads) { if (ThreadState.Stopped != thread.State) { return false; } } return true; }
public static ThreadClass[] WaitThreads(int num, DocumentsWriterStallControl ctrl) { ThreadClass[] array = new ThreadClass[num]; for (int i = 0; i < array.Length; i++) { array[i] = new ThreadAnonymousInnerClassHelper2(ctrl); } return array; }
public virtual void TestHashCodeWithThreads() { AutomatonQuery[] queries = new AutomatonQuery[1000]; for (int i = 0; i < queries.Length; i++) { queries[i] = new AutomatonQuery(new Term("bogus", "bogus"), AutomatonTestUtil.RandomAutomaton(Random())); } CountDownLatch startingGun = new CountDownLatch(1); int numThreads = TestUtil.NextInt(Random(), 2, 5); ThreadClass[] threads = new ThreadClass[numThreads]; for (int threadID = 0; threadID < numThreads; threadID++) { ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, queries, startingGun); threads[threadID] = thread; thread.Start(); } startingGun.countDown(); foreach (ThreadClass thread in threads) { thread.Join(); } }
public virtual void TestCommitThreadSafety() { const int NUM_THREADS = 5; const double RUN_SEC = 0.5; Directory dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy())); TestUtil.ReduceOpenFiles(w.w); w.Commit(); AtomicBoolean failed = new AtomicBoolean(); ThreadClass[] threads = new ThreadClass[NUM_THREADS]; long endTime = DateTime.Now.Millisecond + ((long)(RUN_SEC * 1000)); for (int i = 0; i < NUM_THREADS; i++) { int finalI = i; threads[i] = new ThreadAnonymousInnerClassHelper(this, dir, w, failed, endTime, finalI); threads[i].Start(); } for (int i = 0; i < NUM_THREADS; i++) { threads[i].Join(); } Assert.IsFalse(failed.Get()); w.Dispose(); dir.Dispose(); }
public virtual void TestMixedTypesDifferentThreads() { Directory dir = NewDirectory(); IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); CountDownLatch startingGun = new CountDownLatch(1); AtomicBoolean hitExc = new AtomicBoolean(); ThreadClass[] threads = new ThreadClass[3]; for (int i = 0; i < 3; i++) { Field field; if (i == 0) { field = new SortedDocValuesField("foo", new BytesRef("hello")); } else if (i == 1) { field = new NumericDocValuesField("foo", 0); } else { field = new BinaryDocValuesField("foo", new BytesRef("bazz")); } Document doc = new Document(); doc.Add(field); threads[i] = new ThreadAnonymousInnerClassHelper(this, w, startingGun, hitExc, doc); threads[i].Start(); } startingGun.countDown(); foreach (ThreadClass t in threads) { t.Join(); } Assert.IsTrue(hitExc.Get()); w.Dispose(); dir.Dispose(); }
public virtual void Test() { Directory dir = NewDirectory(); MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); RandomIndexWriter w = new RandomIndexWriter(Random(), dir, analyzer, Similarity, TimeZone); LineFileDocs docs = new LineFileDocs(Random(), DefaultCodecSupportsDocValues()); int charsToIndex = AtLeast(100000); int charsIndexed = 0; //System.out.println("bytesToIndex=" + charsToIndex); while (charsIndexed < charsToIndex) { Document doc = docs.NextDoc(); charsIndexed += doc.Get("body").Length; w.AddDocument(doc); //System.out.println(" bytes=" + charsIndexed + " add: " + doc); } IndexReader r = w.Reader; //System.out.println("numDocs=" + r.NumDocs); w.Dispose(); IndexSearcher s = NewSearcher(r); Terms terms = MultiFields.GetFields(r).Terms("body"); int termCount = 0; TermsEnum termsEnum = terms.Iterator(null); while (termsEnum.Next() != null) { termCount++; } Assert.IsTrue(termCount > 0); // Target ~10 terms to search: double chance = 10.0 / termCount; termsEnum = terms.Iterator(termsEnum); IDictionary<BytesRef, TopDocs> answers = new Dictionary<BytesRef, TopDocs>(); while (termsEnum.Next() != null) { if (Random().NextDouble() <= chance) { BytesRef term = BytesRef.DeepCopyOf(termsEnum.Term()); answers[term] = s.Search(new TermQuery(new Term("body", term)), 100); } } if (answers.Count > 0) { CountdownEvent startingGun = new CountdownEvent(1); int numThreads = TestUtil.NextInt(Random(), 2, 5); ThreadClass[] threads = new ThreadClass[numThreads]; for (int threadID = 0; threadID < numThreads; threadID++) { ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, s, answers, startingGun); threads[threadID] = thread; thread.Start(); } startingGun.Signal(); foreach (ThreadClass thread in threads) { thread.Join(); } } r.Dispose(); dir.Dispose(); }