public virtual void  TestRandomExceptions()
        {
            MockRAMDirectory dir = new MockRAMDirectory();

            random = new Random((int)(DateTime.Now.Ticks & 0x7fffffff));
            MockIndexWriter writer = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);

            ((ConcurrentMergeScheduler)writer.GetMergeScheduler()).SetSuppressExceptions();
            //writer.setMaxBufferedDocs(10);
            writer.SetRAMBufferSizeMB(0.1);

            if (DEBUG)
            {
                System.IO.StreamWriter temp_writer;
                temp_writer           = new System.IO.StreamWriter(System.Console.OpenStandardOutput(), System.Console.Out.Encoding);
                temp_writer.AutoFlush = true;
                writer.SetInfoStream(temp_writer);
            }

            IndexerThread thread = new IndexerThread(this, 0, writer);

            thread.Run();
            if (thread.failure != null)
            {
                System.Console.Out.WriteLine(thread.failure.StackTrace);
                Assert.Fail("thread " + thread.Name + ": hit unexpected failure");
            }

            writer.Commit();

            try
            {
                writer.Close();
            }
            catch (System.Exception t)
            {
                System.Console.Out.WriteLine("exception during close:");
                System.Console.Out.WriteLine(t.StackTrace);
                writer.Rollback();
            }

            // Confirm that when doc hits exception partway through tokenization, it's deleted:
            IndexReader r2     = IndexReader.Open(dir);
            int         count  = r2.DocFreq(new Term("content4", "aaa"));
            int         count2 = r2.DocFreq(new Term("content4", "ddd"));

            Assert.AreEqual(count, count2);
            r2.Close();

            _TestUtil.CheckIndex(dir);
        }
Ejemplo n.º 2
0
		public virtual void  TestRandomExceptionsThreads()
		{
			
			MockRAMDirectory dir = new MockRAMDirectory();
			MockIndexWriter writer = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			((ConcurrentMergeScheduler) writer.GetMergeScheduler()).SetSuppressExceptions();
			//writer.setMaxBufferedDocs(10);
			writer.SetRAMBufferSizeMB(0.2);
			
			if (DEBUG)
			{
				System.IO.StreamWriter temp_writer;
				temp_writer = new System.IO.StreamWriter(System.Console.OpenStandardOutput(), System.Console.Out.Encoding);
				temp_writer.AutoFlush = true;
				writer.SetInfoStream(temp_writer);
			}
			
			int NUM_THREADS = 4;
			
			IndexerThread[] threads = new IndexerThread[NUM_THREADS];
			for (int i = 0; i < NUM_THREADS; i++)
			{
				threads[i] = new IndexerThread(this, i, writer);
				threads[i].Start();
			}
			
			for (int i = 0; i < NUM_THREADS; i++)
				threads[i].Join();
			
			for (int i = 0; i < NUM_THREADS; i++)
				if (threads[i].failure != null)
					Assert.Fail("thread " + threads[i].Name + ": hit unexpected failure");
			
			writer.Commit();
			
			try
			{
				writer.Close();
			}
			catch (System.Exception t)
			{
				System.Console.Out.WriteLine("exception during close:");
				System.Console.Out.WriteLine(t.StackTrace);
				writer.Rollback();
			}
			
			// Confirm that when doc hits exception partway through tokenization, it's deleted:
			IndexReader r2 = IndexReader.Open(dir);
			int count = r2.DocFreq(new Term("content4", "aaa"));
			int count2 = r2.DocFreq(new Term("content4", "ddd"));
			Assert.AreEqual(count, count2);
			r2.Close();
			
			_TestUtil.CheckIndex(dir);
		}