public override void Run() { try { r = new Random(@base + Range + Seed); for (int i = 0; i < Iterations; i++) { int what = NextInt(100); if (what < 5) { DeleteDoc(); } else if (what < 10) { DeleteByQuery(); } else { IndexDoc(); } } } catch (Exception e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); Assert.Fail(e.ToString()); } lock (this) { int dummy = Docs.Count; } }
public virtual void Test() { IndexReader reader = null; try { reader = DirectoryReader.Open(Directory); for (int i = 1; i <= NumThreads; i++) { TestTermPositionVectors(reader, i); } } catch (IOException ioe) { Assert.Fail(ioe.Message); } finally { if (reader != null) { try { /// <summary> /// close the opened reader </summary> reader.Dispose(); } catch (IOException ioe) { Console.WriteLine(ioe.ToString()); Console.Write(ioe.StackTrace); } } } }
public override void Run() { try { while (!Stop.Get()) { Ctrl.WaitIfStalled(); if (CheckPoint.Get()) { #if !NETSTANDARD1_6 try { #endif Assert.IsTrue(Sync.await()); #if !NETSTANDARD1_6 } catch (ThreadInterruptedException /*e*/) { Console.WriteLine("[Waiter] got interrupted - wait count: " + Sync.Waiter.CurrentCount); //throw new ThreadInterruptedException("Thread Interrupted Exception", e); throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details) } #endif } } } catch (Exception e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); Exceptions.Add(e); } }
public override void Run() { try { while (!Stop.Get()) { Ctrl.WaitIfStalled(); if (CheckPoint.Get()) { #if !NETSTANDARD1_6 try { #endif Assert.IsTrue(Sync.await()); #if !NETSTANDARD1_6 } catch (ThreadInterruptedException e) { Console.WriteLine("[Waiter] got interrupted - wait count: " + Sync.Waiter.CurrentCount); throw new ThreadInterruptedException("Thread Interrupted Exception", e); } #endif } } } catch (Exception e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); Exceptions.Add(e); } }
public override void Run() { try { while (!Stop.Get()) { int internalIters = Release && Random().NextBoolean() ? AtLeast(5) : 1; for (int i = 0; i < internalIters; i++) { Ctrl.UpdateStalled(Random().NextBoolean()); } if (CheckPoint.Get()) { Sync.UpdateJoin.Signal(); try { Assert.IsTrue(Sync.await()); } #if !NETSTANDARD1_6 catch (ThreadInterruptedException /*e*/) { Console.WriteLine("[Updater] got interrupted - wait count: " + Sync.Waiter.CurrentCount); //throw new ThreadInterruptedException("Thread Interrupted Exception", e); throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details) } #endif catch (Exception e) { Console.Write("signal failed with : " + e); throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details) } Sync.LeftCheckpoint.Signal(); } if (Random().NextBoolean()) { Thread.Sleep(0); } } } catch (Exception e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); Exceptions.Add(e); } if (!Sync.UpdateJoin.IsSet) { Sync.UpdateJoin.Signal(); } }
public override void Run() { try { while (!Stop.Get()) { int internalIters = Release && Random().NextBoolean() ? AtLeast(5) : 1; for (int i = 0; i < internalIters; i++) { Ctrl.UpdateStalled(Random().NextBoolean()); } if (CheckPoint.Get()) { Sync.UpdateJoin.Signal(); try { Assert.IsTrue(Sync.await()); } #if !NETSTANDARD1_6 catch (ThreadInterruptedException e) { Console.WriteLine("[Updater] got interrupted - wait count: " + Sync.Waiter.CurrentCount); throw new ThreadInterruptedException("Thread Interrupted Exception", e); } #endif catch (Exception e) { Console.Write("signal failed with : " + e); throw e; } Sync.LeftCheckpoint.Signal(); } if (Random().NextBoolean()) { Thread.Sleep(0); } } } catch (Exception e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); Exceptions.Add(e); } if (!Sync.UpdateJoin.IsSet) { Sync.UpdateJoin.Signal(); } }
public void Run() { try { // run the test 100 times for (int i = 0; i < RunsToDo; i++) { TestTermVectors(); } } catch (Exception e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); } return; }
public override void Run() { try { for (int j = 0; j < NumDocs; j++) { Document d = new Document(); d.Add(new TextField(Field, new PoolingPayloadTokenStream(OuterInstance, Pool))); Writer.AddDocument(d); } } catch (Exception e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); Assert.Fail(e.ToString()); } }
public override void Run() { try { signal.Wait(); manager.MaybeRefresh(); writer.DeleteDocuments(new TermQuery(new Term("foo", "barista"))); manager.MaybeRefresh(); // kick off another reopen so we inc. the internal gen } catch (Exception e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); } finally { latch.Reset(latch.CurrentCount == 0 ? 0 : latch.CurrentCount - 1); // let the add below finish } }
public virtual void TestIsCurrentWithThreads() { Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); IndexWriter writer = new IndexWriter(dir, conf); ReaderHolder holder = new ReaderHolder(); ReaderThread[] threads = new ReaderThread[AtLeast(3)]; CountdownEvent latch = new CountdownEvent(1); WriterThread writerThread = new WriterThread(holder, writer, AtLeast(500), Random(), latch); for (int i = 0; i < threads.Length; i++) { threads[i] = new ReaderThread(holder, latch); threads[i].Start(); } writerThread.Start(); writerThread.Join(); bool failed = writerThread.Failed != null; if (failed) { Console.WriteLine(writerThread.Failed.ToString()); Console.Write(writerThread.Failed.StackTrace); } for (int i = 0; i < threads.Length; i++) { threads[i].Join(); if (threads[i].Failed != null) { Console.WriteLine(threads[i].Failed.ToString()); Console.Write(threads[i].Failed.StackTrace); failed = true; } } Assert.IsFalse(failed); writer.Dispose(); dir.Dispose(); }
public virtual void Test() { MockDirectoryWrapper dir = NewMockFSDirectory(CreateTempDir("TestIndexWriterOutOfFileDescriptors")); dir.PreventDoubleWrite = false; double rate = Random.NextDouble() * 0.01; //System.out.println("rate=" + rate); dir.RandomIOExceptionRateOnOpen = rate; int iters = AtLeast(20); LineFileDocs docs = new LineFileDocs(Random, DefaultCodecSupportsDocValues); IndexReader r = null; DirectoryReader r2 = null; bool any = false; MockDirectoryWrapper dirCopy = null; int lastNumDocs = 0; for (int iter = 0; iter < iters; iter++) { IndexWriter w = null; if (VERBOSE) { Console.WriteLine("TEST: iter=" + iter); } try { MockAnalyzer analyzer = new MockAnalyzer(Random); analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); if (VERBOSE) { // Do this ourselves instead of relying on LTC so // we see incrementing messageID: iwc.SetInfoStream(new TextWriterInfoStream(Console.Out)); } var ms = iwc.MergeScheduler; if (ms is IConcurrentMergeScheduler) { ((IConcurrentMergeScheduler)ms).SetSuppressExceptions(); } w = new IndexWriter(dir, iwc); if (r != null && Random.Next(5) == 3) { if (Random.NextBoolean()) { if (VERBOSE) { Console.WriteLine("TEST: addIndexes IR[]"); } w.AddIndexes(new IndexReader[] { r }); } else { if (VERBOSE) { Console.WriteLine("TEST: addIndexes Directory[]"); } w.AddIndexes(new Directory[] { dirCopy }); } } else { if (VERBOSE) { Console.WriteLine("TEST: addDocument"); } w.AddDocument(docs.NextDoc()); } dir.RandomIOExceptionRateOnOpen = 0.0; w.Dispose(); w = null; // NOTE: this is O(N^2)! Only enable for temporary debugging: //dir.setRandomIOExceptionRateOnOpen(0.0); //TestUtil.CheckIndex(dir); //dir.setRandomIOExceptionRateOnOpen(rate); // Verify numDocs only increases, to catch IndexWriter // accidentally deleting the index: dir.RandomIOExceptionRateOnOpen = 0.0; Assert.IsTrue(DirectoryReader.IndexExists(dir)); if (r2 == null) { r2 = DirectoryReader.Open(dir); } else { DirectoryReader r3 = DirectoryReader.OpenIfChanged(r2); if (r3 != null) { r2.Dispose(); r2 = r3; } } Assert.IsTrue(r2.NumDocs >= lastNumDocs, "before=" + lastNumDocs + " after=" + r2.NumDocs); lastNumDocs = r2.NumDocs; //System.out.println("numDocs=" + lastNumDocs); dir.RandomIOExceptionRateOnOpen = rate; any = true; if (VERBOSE) { Console.WriteLine("TEST: iter=" + iter + ": success"); } } catch (IOException ioe) { if (VERBOSE) { Console.WriteLine("TEST: iter=" + iter + ": exception"); Console.WriteLine(ioe.ToString()); Console.Write(ioe.StackTrace); } if (w != null) { // NOTE: leave random IO exceptions enabled here, // to verify that rollback does not try to write // anything: w.Rollback(); } } if (any && r == null && Random.NextBoolean()) { // Make a copy of a non-empty index so we can use // it to addIndexes later: dir.RandomIOExceptionRateOnOpen = 0.0; r = DirectoryReader.Open(dir); dirCopy = NewMockFSDirectory(CreateTempDir("TestIndexWriterOutOfFileDescriptors.copy")); HashSet <string> files = new HashSet <string>(); foreach (string file in dir.ListAll()) { dir.Copy(dirCopy, file, file, IOContext.DEFAULT); files.Add(file); } dirCopy.Sync(files); // Have IW kiss the dir so we remove any leftover // files ... we can easily have leftover files at // the time we take a copy because we are holding // open a reader: (new IndexWriter(dirCopy, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)))).Dispose(); dirCopy.RandomIOExceptionRate = rate; dir.RandomIOExceptionRateOnOpen = rate; } } if (r2 != null) { r2.Dispose(); } if (r != null) { r.Dispose(); dirCopy.Dispose(); } dir.Dispose(); }
public override void Run() { // TODO: would be better if this were cross thread, so that we make sure one thread deleting anothers added docs works: IList <string> toDeleteIDs = new List <string>(); IList <SubDocs> toDeleteSubDocs = new List <SubDocs>(); while (Environment.TickCount < stopTime && !outerInstance.m_failed.Get()) { try { // Occasional longish pause if running // nightly if (LuceneTestCase.TEST_NIGHTLY && Random.Next(6) == 3) { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": now long sleep"); } Thread.Sleep(TestUtil.NextInt32(Random, 50, 500)); } // Rate limit ingest rate: if (Random.Next(7) == 5) { Thread.Sleep(TestUtil.NextInt32(Random, 1, 10)); if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": done sleep"); } } Document doc = docs.NextDoc(); if (doc == null) { break; } // Maybe add randomly named field string addedField; if (Random.NextBoolean()) { addedField = "extra" + Random.Next(40); doc.Add(NewTextField(addedField, "a random field", Field.Store.YES)); } else { addedField = null; } if (Random.NextBoolean()) { if (Random.NextBoolean()) { // Add/update doc block: string packID; SubDocs delSubDocs; if (toDeleteSubDocs.Count > 0 && Random.NextBoolean()) { delSubDocs = toDeleteSubDocs[Random.Next(toDeleteSubDocs.Count)]; Debug.Assert(!delSubDocs.Deleted); toDeleteSubDocs.Remove(delSubDocs); // Update doc block, replacing prior packID packID = delSubDocs.PackID; } else { delSubDocs = null; // Add doc block, using new packID packID = outerInstance.m_packCount.GetAndIncrement().ToString(CultureInfo.InvariantCulture); } Field packIDField = NewStringField("packID", packID, Field.Store.YES); IList <string> docIDs = new List <string>(); SubDocs subDocs = new SubDocs(packID, docIDs); IList <Document> docsList = new List <Document>(); allSubDocs.Add(subDocs); doc.Add(packIDField); docsList.Add(TestUtil.CloneDocument(doc)); docIDs.Add(doc.Get("docid")); int maxDocCount = TestUtil.NextInt32(Random, 1, 10); while (docsList.Count < maxDocCount) { doc = docs.NextDoc(); if (doc == null) { break; } docsList.Add(TestUtil.CloneDocument(doc)); docIDs.Add(doc.Get("docid")); } outerInstance.m_addCount.AddAndGet(docsList.Count); Term packIDTerm = new Term("packID", packID); if (delSubDocs != null) { delSubDocs.Deleted = true; delIDs.UnionWith(delSubDocs.SubIDs); outerInstance.m_delCount.AddAndGet(delSubDocs.SubIDs.Count); if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": update pack packID=" + delSubDocs.PackID + " count=" + docsList.Count + " docs=" + Arrays.ToString(docIDs)); } outerInstance.UpdateDocuments(packIDTerm, docsList); } else { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": add pack packID=" + packID + " count=" + docsList.Count + " docs=" + Arrays.ToString(docIDs)); } outerInstance.AddDocuments(packIDTerm, docsList); } doc.RemoveField("packID"); if (Random.Next(5) == 2) { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": buffer del id:" + packID); } toDeleteSubDocs.Add(subDocs); } } else { // Add single doc string docid = doc.Get("docid"); if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": add doc docid:" + docid); } outerInstance.AddDocument(new Term("docid", docid), doc); outerInstance.m_addCount.GetAndIncrement(); if (Random.Next(5) == 3) { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": buffer del id:" + doc.Get("docid")); } toDeleteIDs.Add(docid); } } } else { // Update single doc, but we never re-use // and ID so the delete will never // actually happen: if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": update doc id:" + doc.Get("docid")); } string docid = doc.Get("docid"); outerInstance.UpdateDocument(new Term("docid", docid), doc); outerInstance.m_addCount.GetAndIncrement(); if (Random.Next(5) == 3) { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": buffer del id:" + doc.Get("docid")); } toDeleteIDs.Add(docid); } } if (Random.Next(30) == 17) { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": apply " + toDeleteIDs.Count + " deletes"); } foreach (string id in toDeleteIDs) { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": del term=id:" + id); } outerInstance.DeleteDocuments(new Term("docid", id)); } int count = outerInstance.m_delCount.AddAndGet(toDeleteIDs.Count); if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": tot " + count + " deletes"); } delIDs.UnionWith(toDeleteIDs); toDeleteIDs.Clear(); foreach (SubDocs subDocs in toDeleteSubDocs) { Debug.Assert(!subDocs.Deleted); delPackIDs.Add(subDocs.PackID); outerInstance.DeleteDocuments(new Term("packID", subDocs.PackID)); subDocs.Deleted = true; if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": del subs: " + subDocs.SubIDs + " packID=" + subDocs.PackID); } delIDs.UnionWith(subDocs.SubIDs); outerInstance.m_delCount.AddAndGet(subDocs.SubIDs.Count); } toDeleteSubDocs.Clear(); } if (addedField != null) { doc.RemoveField(addedField); } } catch (Exception t) { Console.WriteLine(Thread.CurrentThread.Name + ": hit exc"); Console.WriteLine(t.ToString()); Console.Write(t.StackTrace); outerInstance.m_failed.Set(true); throw new Exception(t.ToString(), t); } } if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": indexing done"); } outerInstance.DoAfterIndexingThreadDone(); }
public void TestRandom() { string[] terms = new string[TestUtil.NextInt32(Random, 2, 10)]; ISet <string> seen = new HashSet <string>(); while (seen.size() < terms.Length) { string token = TestUtil.RandomSimpleString(Random, 1, 5); if (!seen.contains(token)) { terms[seen.size()] = token; seen.add(token); } } Analyzer a = new MockAnalyzer(Random); int numDocs = AtLeast(10); long totTokens = 0; string[][] docs = new string[numDocs][]; for (int i = 0; i < numDocs; i++) { docs[i] = new string[AtLeast(100)]; if (VERBOSE) { Console.Write(" doc " + i + ":"); } for (int j = 0; j < docs[i].Length; j++) { docs[i][j] = GetZipfToken(terms); if (VERBOSE) { Console.Write(" " + docs[i][j]); } } if (VERBOSE) { Console.WriteLine(); } totTokens += docs[i].Length; } int grams = TestUtil.NextInt32(Random, 1, 4); if (VERBOSE) { Console.WriteLine("TEST: " + terms.Length + " terms; " + numDocs + " docs; " + grams + " grams"); } // Build suggester model: FreeTextSuggester sug = new FreeTextSuggester(a, a, grams, (byte)0x20); sug.Build(new TestRandomInputIterator(this, docs)); // Build inefficient but hopefully correct model: List <IDictionary <string, int?> > gramCounts = new List <IDictionary <string, int?> >(grams); for (int gram = 0; gram < grams; gram++) { if (VERBOSE) { Console.WriteLine("TEST: build model for gram=" + gram); } IDictionary <string, int?> model = new HashMap <string, int?>(); gramCounts.Add(model); foreach (string[] doc in docs) { for (int i = 0; i < doc.Length - gram; i++) { StringBuilder b = new StringBuilder(); for (int j = i; j <= i + gram; j++) { if (j > i) { b.append(' '); } b.append(doc[j]); } string token = b.toString(); int? curCount = model.ContainsKey(token) ? model[token] : null; if (curCount == null) { model.Put(token, 1); } else { model.Put(token, 1 + curCount); } if (VERBOSE) { Console.WriteLine(" add '" + token + "' -> count=" + (model.ContainsKey(token) ? model[token].ToString() : "")); } } } } int lookups = AtLeast(100); for (int iter = 0; iter < lookups; iter++) { string[] tokens = new string[TestUtil.NextInt32(Random, 1, 5)]; for (int i = 0; i < tokens.Length; i++) { tokens[i] = GetZipfToken(terms); } // Maybe trim last token; be sure not to create the // empty string: int trimStart; if (tokens.Length == 1) { trimStart = 1; } else { trimStart = 0; } int trimAt = TestUtil.NextInt32(Random, trimStart, tokens[tokens.Length - 1].Length); tokens[tokens.Length - 1] = tokens[tokens.Length - 1].Substring(0, trimAt - 0); int num = TestUtil.NextInt32(Random, 1, 100); StringBuilder b = new StringBuilder(); foreach (string token in tokens) { b.append(' '); b.append(token); } string query = b.toString(); query = query.Substring(1); if (VERBOSE) { Console.WriteLine("\nTEST: iter=" + iter + " query='" + query + "' num=" + num); } // Expected: List <Lookup.LookupResult> expected = new List <Lookup.LookupResult>(); double backoff = 1.0; seen = new HashSet <string>(); if (VERBOSE) { Console.WriteLine(" compute expected"); } for (int i = grams - 1; i >= 0; i--) { if (VERBOSE) { Console.WriteLine(" grams=" + i); } if (tokens.Length < i + 1) { // Don't have enough tokens to use this model if (VERBOSE) { Console.WriteLine(" skip"); } continue; } if (i == 0 && tokens[tokens.Length - 1].Length == 0) { // Never suggest unigrams from empty string: if (VERBOSE) { Console.WriteLine(" skip unigram priors only"); } continue; } // Build up "context" ngram: b = new StringBuilder(); for (int j = tokens.Length - i - 1; j < tokens.Length - 1; j++) { b.append(' '); b.append(tokens[j]); } string context = b.toString(); if (context.Length > 0) { context = context.Substring(1); } if (VERBOSE) { Console.WriteLine(" context='" + context + "'"); } long contextCount; if (context.Length == 0) { contextCount = totTokens; } else { //int? count = gramCounts.get(i - 1).get(context); var gramCount = gramCounts[i - 1]; int?count = gramCount.ContainsKey(context) ? gramCount[context] : null; if (count == null) { // We never saw this context: backoff *= FreeTextSuggester.ALPHA; if (VERBOSE) { Console.WriteLine(" skip: never saw context"); } continue; } contextCount = count.GetValueOrDefault(); } if (VERBOSE) { Console.WriteLine(" contextCount=" + contextCount); } IDictionary <string, int?> model = gramCounts[i]; // First pass, gather all predictions for this model: if (VERBOSE) { Console.WriteLine(" find terms w/ prefix=" + tokens[tokens.Length - 1]); } List <Lookup.LookupResult> tmp = new List <Lookup.LookupResult>(); foreach (string term in terms) { if (term.StartsWith(tokens[tokens.Length - 1], StringComparison.Ordinal)) { if (VERBOSE) { Console.WriteLine(" term=" + term); } if (seen.contains(term)) { if (VERBOSE) { Console.WriteLine(" skip seen"); } continue; } string ngram = (context + " " + term).Trim(); //Integer count = model.get(ngram); int?count = model.ContainsKey(ngram) ? model[ngram] : null; if (count != null) { // LUCENENET NOTE: We need to calculate this as decimal because when using double it can sometimes // return numbers that are greater than long.MaxValue, which results in a negative long number. // This is also the way it is being done in the FreeTextSuggester to work around the issue. Lookup.LookupResult lr = new Lookup.LookupResult(ngram, (long)(long.MaxValue * ((decimal)backoff * (decimal)count / contextCount))); tmp.Add(lr); if (VERBOSE) { Console.WriteLine(" add tmp key='" + lr.Key + "' score=" + lr.Value); } } } } // Second pass, trim to only top N, and fold those // into overall suggestions: tmp.Sort(byScoreThenKey); if (tmp.size() > num) { //tmp.subList(num, tmp.size()).clear(); tmp.RemoveRange(num, tmp.size() - num); } foreach (Lookup.LookupResult result in tmp) { string key = result.Key.toString(); int idx = key.LastIndexOf(' '); string lastToken; if (idx != -1) { lastToken = key.Substring(idx + 1); } else { lastToken = key; } if (!seen.contains(lastToken)) { seen.add(lastToken); expected.Add(result); if (VERBOSE) { Console.WriteLine(" keep key='" + result.Key + "' score=" + result.Value); } } } backoff *= FreeTextSuggester.ALPHA; } expected.Sort(byScoreThenKey); if (expected.size() > num) { expected.RemoveRange(num, expected.size() - num); } // Actual: IList <Lookup.LookupResult> actual = sug.DoLookup(query, num); if (VERBOSE) { Console.WriteLine(" expected: " + expected); Console.WriteLine(" actual: " + actual); } assertEquals(expected.ToString(), actual.ToString()); } }
public virtual void TestParseInt() { int test; try { test = ArrayUtil.ParseInt32("".ToCharArray()); Assert.IsTrue(false); } #pragma warning disable 168 catch (FormatException e) #pragma warning restore 168 { //expected } try { test = ArrayUtil.ParseInt32("foo".ToCharArray()); Assert.IsTrue(false); } #pragma warning disable 168 catch (FormatException e) #pragma warning restore 168 { //expected } try { test = ArrayUtil.ParseInt32(Convert.ToString(long.MaxValue).ToCharArray()); Assert.IsTrue(false); } #pragma warning disable 168 catch (FormatException e) #pragma warning restore 168 { //expected } try { test = ArrayUtil.ParseInt32("0.34".ToCharArray()); Assert.IsTrue(false); } #pragma warning disable 168 catch (FormatException e) #pragma warning restore 168 { //expected } try { test = ArrayUtil.ParseInt32("1".ToCharArray()); Assert.IsTrue(test == 1, test + " does not equal: " + 1); test = ArrayUtil.ParseInt32("-10000".ToCharArray()); Assert.IsTrue(test == -10000, test + " does not equal: " + -10000); test = ArrayUtil.ParseInt32("1923".ToCharArray()); Assert.IsTrue(test == 1923, test + " does not equal: " + 1923); test = ArrayUtil.ParseInt32("-1".ToCharArray()); Assert.IsTrue(test == -1, test + " does not equal: " + -1); test = ArrayUtil.ParseInt32("foo 1923 bar".ToCharArray(), 4, 4); Assert.IsTrue(test == 1923, test + " does not equal: " + 1923); } catch (FormatException e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); Assert.IsTrue(false); } }
public virtual void TestAccquireReleaseRace() { DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl(); ctrl.UpdateStalled(false); AtomicBoolean stop = new AtomicBoolean(false); AtomicBoolean checkPoint = new AtomicBoolean(true); int numStallers = AtLeast(1); int numReleasers = AtLeast(1); int numWaiters = AtLeast(1); var sync = new Synchronizer(numStallers + numReleasers, numStallers + numReleasers + numWaiters); var threads = new ThreadClass[numReleasers + numStallers + numWaiters]; IList <Exception> exceptions = new SynchronizedList <Exception>(); for (int i = 0; i < numReleasers; i++) { threads[i] = new Updater(stop, checkPoint, ctrl, sync, true, exceptions); } for (int i = numReleasers; i < numReleasers + numStallers; i++) { threads[i] = new Updater(stop, checkPoint, ctrl, sync, false, exceptions); } for (int i = numReleasers + numStallers; i < numReleasers + numStallers + numWaiters; i++) { threads[i] = new Waiter(stop, checkPoint, ctrl, sync, exceptions); } Start(threads); int iters = AtLeast(10000); float checkPointProbability = TEST_NIGHTLY ? 0.5f : 0.1f; for (int i = 0; i < iters; i++) { if (checkPoint.Get()) { Assert.IsTrue(sync.UpdateJoin.Wait(new TimeSpan(0, 0, 0, 10)), "timed out waiting for update threads - deadlock?"); if (exceptions.Count > 0) { foreach (Exception throwable in exceptions) { Console.WriteLine(throwable.ToString()); Console.Write(throwable.StackTrace); } Assert.Fail("got exceptions in threads"); } if (ctrl.HasBlocked && ctrl.IsHealthy) { AssertState(numReleasers, numStallers, numWaiters, threads, ctrl); } checkPoint.Set(false); sync.Waiter.Signal(); sync.LeftCheckpoint.Wait(); } Assert.IsFalse(checkPoint.Get()); Assert.AreEqual(0, sync.Waiter.CurrentCount); if (checkPointProbability >= (float)Random().NextDouble()) { sync.Reset(numStallers + numReleasers, numStallers + numReleasers + numWaiters); checkPoint.Set(true); } } if (!checkPoint.Get()) { sync.Reset(numStallers + numReleasers, numStallers + numReleasers + numWaiters); checkPoint.Set(true); } Assert.IsTrue(sync.UpdateJoin.Wait(new TimeSpan(0, 0, 0, 10))); AssertState(numReleasers, numStallers, numWaiters, threads, ctrl); checkPoint.Set(false); stop.Set(true); sync.Waiter.Signal(); sync.LeftCheckpoint.Wait(); for (int i = 0; i < threads.Length; i++) { ctrl.UpdateStalled(false); threads[i].Join(2000); if (threads[i].IsAlive && threads[i] is Waiter) { if (threads[i].State == ThreadState.WaitSleepJoin) { Assert.Fail("waiter is not released - anyThreadsStalled: " + ctrl.AnyStalledThreads()); } } } }