public virtual void  TestSubclassConcurrentMergeScheduler()
        {
            MockRAMDirectory dir = new MockRAMDirectory();

            dir.FailOn(new FailOnlyOnMerge());

            Document doc     = new Document();
            Field    idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

            doc.Add(idField);

            IndexWriter      writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
            MyMergeScheduler ms     = new MyMergeScheduler(this);

            writer.SetMergeScheduler(ms);
            writer.SetMaxBufferedDocs(2);
            writer.SetRAMBufferSizeMB(Lucene.Net.Index.IndexWriter.DISABLE_AUTO_FLUSH);
            for (int i = 0; i < 20; i++)
            {
                writer.AddDocument(doc);
            }

            ms.Sync();
            writer.Close();

            Assert.IsTrue(mergeThreadCreated);
            Assert.IsTrue(mergeCalled);
            Assert.IsTrue(excCalled);
            dir.Close();
            Assert.IsTrue(ConcurrentMergeScheduler.AnyUnhandledExceptions());
        }
Exemple #2
0
        public virtual void TestFlushExceptions()
        {
            MockRAMDirectory directory = new MockRAMDirectory();
            FailOnlyOnFlush  failure   = new FailOnlyOnFlush();

            directory.FailOn(failure);

            IndexWriter writer           = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
            ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();

            writer.SetMergeScheduler(cms);
            writer.SetMaxBufferedDocs(2);
            Document doc     = new Document();
            Field    idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

            doc.Add(idField);
            int extraCount = 0;

            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 20; j++)
                {
                    idField.SetValue(System.Convert.ToString(i * 20 + j));
                    writer.AddDocument(doc);
                }

                while (true)
                {
                    // must cycle here because sometimes the merge flushes
                    // the doc we just added and so there's nothing to
                    // flush, and we don't hit the exception
                    writer.AddDocument(doc);
                    failure.SetDoFail();
                    try
                    {
                        writer.Flush(true, false, true);
                        if (failure.hitExc)
                        {
                            Assert.Fail("failed to hit IOException");
                        }
                        extraCount++;
                    }
                    catch (System.IO.IOException ioe)
                    {
                        failure.ClearDoFail();
                        break;
                    }
                }
            }

            writer.Close();
            IndexReader reader = IndexReader.Open(directory, true);

            Assert.AreEqual(200 + extraCount, reader.NumDocs());
            reader.Close();
            directory.Close();
        }
        public virtual void TestFlushExceptions()
        {
            MockRAMDirectory directory = new MockRAMDirectory();
            FailOnlyOnFlush failure = new FailOnlyOnFlush();
            directory.FailOn(failure);

            IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
            ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
            writer.SetMergeScheduler(cms);
            writer.SetMaxBufferedDocs(2);
            Document doc = new Document();
            Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
            doc.Add(idField);
            int extraCount = 0;

            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 20; j++)
                {
                    idField.SetValue(System.Convert.ToString(i*20 + j));
                    writer.AddDocument(doc);
                }

                while (true)
                {
                    // must cycle here because sometimes the merge flushes
                    // the doc we just added and so there's nothing to
                    // flush, and we don't hit the exception
                    writer.AddDocument(doc);
                    failure.SetDoFail();
                    try
                    {
                        writer.Flush(true, false, true);
                        if (failure.hitExc)
                            Assert.Fail("failed to hit IOException");
                        extraCount++;
                    }
                    catch (System.IO.IOException ioe)
                    {
                        failure.ClearDoFail();
                        break;
                    }
                }
            }

            writer.Close();
            IndexReader reader = IndexReader.Open(directory, true);
            Assert.AreEqual(200 + extraCount, reader.NumDocs());
            reader.Close();
            directory.Close();
        }
        public virtual void  TestErrorInDocsWriterAdd()
        {
            MockRAMDirectory.Failure failure = new AnonymousClassFailure1(this);

            // create a couple of files

            System.String[] keywords  = new System.String[] { "1", "2" };
            System.String[] unindexed = new System.String[] { "Netherlands", "Italy" };
            System.String[] unstored  = new System.String[] { "Amsterdam has lots of bridges", "Venice has lots of canals" };
            System.String[] text      = new System.String[] { "Amsterdam", "Venice" };

            for (int pass = 0; pass < 2; pass++)
            {
                bool             autoCommit = (0 == pass);
                MockRAMDirectory dir        = new MockRAMDirectory();
                IndexWriter      modifier   = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);

                dir.FailOn(failure.Reset());

                for (int i = 0; i < keywords.Length; i++)
                {
                    Document doc = new Document();
                    doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
                    doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
                    doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
                    doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
                    try
                    {
                        modifier.AddDocument(doc);
                    }
                    catch (System.IO.IOException io)
                    {
                        break;
                    }
                }

                System.String[] startFiles = dir.ListAll();
                SegmentInfos    infos      = new SegmentInfos();
                infos.Read(dir);
                new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
                System.String[] endFiles = dir.ListAll();

                if (!SupportClass.CollectionsHelper.CompareStringArrays(startFiles, endFiles))
                {
                    Assert.Fail("docswriter abort() failed to delete unreferenced files:\n  before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
                }

                modifier.Close();
            }
        }
Exemple #5
0
        public virtual void  TestFlushExceptions()
        {
            MockRAMDirectory directory = new MockRAMDirectory();
            FailOnlyOnFlush  failure   = new FailOnlyOnFlush();

            directory.FailOn(failure);

            IndexWriter writer           = new IndexWriter(directory, true, ANALYZER, true);
            ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();

            writer.SetMergeScheduler(cms);
            writer.SetMaxBufferedDocs(2);
            Document doc     = new Document();
            Field    idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

            doc.Add(idField);
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 20; j++)
                {
                    idField.SetValue(System.Convert.ToString(i * 20 + j));
                    writer.AddDocument(doc);
                }

                writer.AddDocument(doc);

                failure.SetDoFail();
                try
                {
                    writer.Flush();
                    Assert.Fail("failed to hit IOException");
                }
                catch (System.IO.IOException ioe)
                {
                    failure.ClearDoFail();
                }
            }

            writer.Close();
            IndexReader reader = IndexReader.Open(directory);

            Assert.AreEqual(200, reader.NumDocs());
            reader.Close();
            directory.Close();
        }
		public virtual void  TestFlushExceptions()
		{
			
			MockRAMDirectory directory = new MockRAMDirectory();
			FailOnlyOnFlush failure = new FailOnlyOnFlush();
			directory.FailOn(failure);
			
			IndexWriter writer = new IndexWriter(directory, true, ANALYZER, true);
			ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
			writer.SetMergeScheduler(cms);
			writer.SetMaxBufferedDocs(2);
			Document doc = new Document();
			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
			doc.Add(idField);
			for (int i = 0; i < 10; i++)
			{
				for (int j = 0; j < 20; j++)
				{
					idField.SetValue(System.Convert.ToString(i * 20 + j));
					writer.AddDocument(doc);
				}
				
				writer.AddDocument(doc);
				
				failure.SetDoFail();
				try
				{
					writer.Flush();
					Assert.Fail("failed to hit IOException");
				}
				catch (System.IO.IOException ioe)
				{
					failure.ClearDoFail();
				}
			}
			
			writer.Close();
			IndexReader reader = IndexReader.Open(directory);
			Assert.AreEqual(200, reader.NumDocs());
			reader.Close();
			directory.Close();
		}
Exemple #7
0
        public virtual void TestErrorAfterApplyDeletes()
        {
            MockRAMDirectory.Failure failure = new AnonymousClassFailure(this);

            // create a couple of files

            System.String[] keywords  = new System.String[] { "1", "2" };
            System.String[] unindexed = new System.String[] { "Netherlands", "Italy" };
            System.String[] unstored  = new System.String[]
            { "Amsterdam has lots of bridges", "Venice has lots of canals" };
            System.String[] text = new System.String[] { "Amsterdam", "Venice" };

            MockRAMDirectory dir      = new MockRAMDirectory();
            IndexWriter      modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED, null);

            modifier.UseCompoundFile = true;
            modifier.SetMaxBufferedDeleteTerms(2);

            dir.FailOn(failure.Reset());

            for (int i = 0; i < keywords.Length; i++)
            {
                Document doc = new Document();
                doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
                doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
                doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
                modifier.AddDocument(doc, null);
            }
            // flush (and commit if ac)

            modifier.Optimize(null);
            modifier.Commit(null);

            // one of the two files hits

            Term term     = new Term("city", "Amsterdam");
            int  hitCount = GetHitCount(dir, term);

            Assert.AreEqual(1, hitCount);

            // open the writer again (closed above)

            // delete the doc
            // max buf del terms is two, so this is buffered

            modifier.DeleteDocuments(null, term);

            // add a doc (needed for the !ac case; see below)
            // doc remains buffered

            Document doc2 = new Document();

            modifier.AddDocument(doc2, null);

            // commit the changes, the buffered deletes, and the new doc

            // The failure object will fail on the first write after the del
            // file gets created when processing the buffered delete

            // in the ac case, this will be when writing the new segments
            // files so we really don't need the new doc, but it's harmless

            // in the !ac case, a new segments file won't be created but in
            // this case, creation of the cfs file happens next so we need
            // the doc (to test that it's okay that we don't lose deletes if
            // failing while creating the cfs file)

            bool failed = false;

            try
            {
                modifier.Commit(null);
            }
            catch (System.IO.IOException ioe)
            {
                failed = true;
            }

            Assert.IsTrue(failed);

            // The commit above failed, so we need to retry it (which will
            // succeed, because the failure is a one-shot)

            modifier.Commit(null);

            hitCount = GetHitCount(dir, term);

            // Make sure the delete was successfully flushed:
            Assert.AreEqual(0, hitCount);

            modifier.Close();
            dir.Close();
        }
		public virtual void  TestSubclassConcurrentMergeScheduler()
		{
			MockRAMDirectory dir = new MockRAMDirectory();
			dir.FailOn(new FailOnlyOnMerge());
			
			Document doc = new Document();
			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
			doc.Add(idField);
			
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			MyMergeScheduler ms = new MyMergeScheduler(this);
			writer.SetMergeScheduler(ms);
			writer.SetMaxBufferedDocs(2);
			writer.SetRAMBufferSizeMB(Lucene.Net.Index.IndexWriter.DISABLE_AUTO_FLUSH);
			for (int i = 0; i < 20; i++)
				writer.AddDocument(doc);
			
			ms.Sync();
			writer.Close();
			
			Assert.IsTrue(mergeThreadCreated);
			Assert.IsTrue(mergeCalled);
			Assert.IsTrue(excCalled);
			dir.Close();
			Assert.IsTrue(ConcurrentMergeScheduler.AnyUnhandledExceptions());
		}
		public virtual void  TestErrorInDocsWriterAdd()
		{
			
			MockRAMDirectory.Failure failure = new AnonymousClassFailure1(this);
			
			// create a couple of files
			
			System.String[] keywords = new System.String[]{"1", "2"};
			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
			
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				MockRAMDirectory dir = new MockRAMDirectory();
				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
				
				dir.FailOn(failure.Reset());
				
				for (int i = 0; i < keywords.Length; i++)
				{
					Document doc = new Document();
					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
					try
					{
						modifier.AddDocument(doc);
					}
					catch (System.IO.IOException io)
					{
						break;
					}
				}
				
				System.String[] startFiles = dir.ListAll();
				SegmentInfos infos = new SegmentInfos();
				infos.Read(dir);
				new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null,null);
				System.String[] endFiles = dir.ListAll();
				
				if (!SupportClass.CollectionsHelper.CompareStringArrays(startFiles, endFiles))
				{
					Assert.Fail("docswriter abort() failed to delete unreferenced files:\n  before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
				}
				
				modifier.Close();
			}
		}
		public virtual void  TestErrorAfterApplyDeletes()
		{
			
			MockRAMDirectory.Failure failure = new AnonymousClassFailure(this);
			
			// create a couple of files
			
			System.String[] keywords = new System.String[]{"1", "2"};
			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
			
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				MockRAMDirectory dir = new MockRAMDirectory();
				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
				modifier.SetUseCompoundFile(true);
				modifier.SetMaxBufferedDeleteTerms(2);
				
				dir.FailOn(failure.Reset());
				
				for (int i = 0; i < keywords.Length; i++)
				{
					Document doc = new Document();
					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
					modifier.AddDocument(doc);
				}
				// flush (and commit if ac)
				
				modifier.Optimize();
				modifier.Commit();
				
				// one of the two files hits
				
				Term term = new Term("city", "Amsterdam");
				int hitCount = GetHitCount(dir, term);
				Assert.AreEqual(1, hitCount);
				
				// open the writer again (closed above)
				
				// delete the doc
				// max buf del terms is two, so this is buffered
				
				modifier.DeleteDocuments(term);
				
				// add a doc (needed for the !ac case; see below)
				// doc remains buffered
				
				Document doc2 = new Document();
				modifier.AddDocument(doc2);
				
				// commit the changes, the buffered deletes, and the new doc
				
				// The failure object will fail on the first write after the del
				// file gets created when processing the buffered delete
				
				// in the ac case, this will be when writing the new segments
				// files so we really don't need the new doc, but it's harmless
				
				// in the !ac case, a new segments file won't be created but in
				// this case, creation of the cfs file happens next so we need
				// the doc (to test that it's okay that we don't lose deletes if
				// failing while creating the cfs file)
				
				bool failed = false;
				try
				{
					modifier.Commit();
				}
				catch (System.IO.IOException ioe)
				{
					failed = true;
				}
				
				Assert.IsTrue(failed);
				
				// The commit above failed, so we need to retry it (which will
				// succeed, because the failure is a one-shot)
				
				modifier.Commit();
				
				hitCount = GetHitCount(dir, term);
				
				// Make sure the delete was successfully flushed:
				Assert.AreEqual(0, hitCount);
				
				modifier.Close();
				dir.Close();
			}
		}
		// Runs test, with one thread, using the specific failure
		// to trigger an IOException
		public virtual void  _testSingleThreadFailure(MockRAMDirectory.Failure failure)
		{
			MockRAMDirectory dir = new MockRAMDirectory();
			
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer());
			writer.SetMaxBufferedDocs(2);
			Document doc = new Document();
			doc.Add(new Field("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
			
			for (int i = 0; i < 6; i++)
				writer.AddDocument(doc);
			
			dir.FailOn(failure);
			failure.SetDoFail();
			try
			{
				writer.AddDocument(doc);
				writer.AddDocument(doc);
				Assert.Fail("did not hit exception");
			}
			catch (System.IO.IOException)
			{
			}
			failure.ClearDoFail();
			writer.AddDocument(doc);
			writer.Close(false);
		}
		// Runs test, with multiple threads, using the specific
		// failure to trigger an IOException
		public virtual void  _testMultipleThreadsFailure(MockRAMDirectory.Failure failure)
		{
			
			int NUM_THREADS = 3;
			
			for (int iter = 0; iter < 5; iter++)
			{
				MockRAMDirectory dir = new MockRAMDirectory();
				IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer());
				ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
				// We expect disk full exceptions in the merge threads
				cms.SetSuppressExceptions_ForNUnitTest();
				writer.SetMergeScheduler(cms);
				writer.SetMaxBufferedDocs(2);
				writer.SetMergeFactor(4);
				
				IndexerThread[] threads = new IndexerThread[NUM_THREADS];
				//bool diskFull = false;
				
				for (int i = 0; i < NUM_THREADS; i++)
					threads[i] = new IndexerThread(this, writer, true);
				
				for (int i = 0; i < NUM_THREADS; i++)
					threads[i].Start();
				
				try
				{
					System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 10));
				}
				catch (System.Threading.ThreadInterruptedException)
				{
					SupportClass.ThreadClass.Current().Interrupt();
				}
				
				dir.FailOn(failure);
				failure.SetDoFail();
				
				for (int i = 0; i < NUM_THREADS; i++)
				{
					while (true)
					{
						try
						{
							threads[i].Join();
							break;
						}
						catch (System.Threading.ThreadInterruptedException)
						{
							SupportClass.ThreadClass.Current().Interrupt();
						}
					}
					if (threads[i].IsAlive)
						Assert.Fail("thread seems to be hung");
					else
						Assert.IsTrue(threads[i].error == null, "hit unexpected Throwable");
				}
				
				bool success = false;
				try
				{
					writer.Close(false);
					success = true;
				}
				catch (System.IO.IOException)
				{
				}
				
				if (success)
				{
					IndexReader reader = IndexReader.Open(dir);
					for (int j = 0; j < reader.MaxDoc(); j++)
					{
						if (!reader.IsDeleted(j))
						{
							reader.Document(j);
							reader.GetTermFreqVectors(j);
						}
					}
					reader.Close();
				}
				
				dir.Close();
			}
		}
		public virtual void  TestDocumentsWriterAbort()
		{
			MockRAMDirectory dir = new MockRAMDirectory();
			FailOnlyOnFlush failure = new FailOnlyOnFlush();
			failure.SetDoFail();
			dir.FailOn(failure);
			
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer());
			writer.SetMaxBufferedDocs(2);
			Document doc = new Document();
			System.String contents = "aa bb cc dd ee ff gg hh ii jj kk";
			doc.Add(new Field("content", contents, Field.Store.NO, Field.Index.TOKENIZED));
			bool hitError = false;
			for (int i = 0; i < 200; i++)
			{
				try
				{
					writer.AddDocument(doc);
				}
				catch (System.IO.IOException)
				{
					// only one flush should fail:
					Assert.IsFalse(hitError);
					hitError = true;
				}
			}
			Assert.IsTrue(hitError);
			writer.Close();
			IndexReader reader = IndexReader.Open(dir);
			Assert.AreEqual(198, reader.DocFreq(new Term("content", "aa")));
			reader.Close();
		}
        public void TestCorruptionAfterDiskFullDuringMerge()
        {
            MockRAMDirectory dir = new MockRAMDirectory();
            dir.SetPreventDoubleWrite(false);
            IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            w.SetMergeScheduler(new SerialMergeScheduler());

            ((LogMergePolicy)w.GetMergePolicy()).SetMergeFactor(2);

            Document doc = new Document();
            doc.Add(new Field("f", "doctor who", Field.Store.YES, Field.Index.ANALYZED));
            w.AddDocument(doc);

            w.Commit();

            w.DeleteDocuments(new Term("f", "who"));
            w.AddDocument(doc);

            // disk fills up!
            FailTwiceDuringMerge ftdm = new FailTwiceDuringMerge();
            ftdm.SetDoFail();
            dir.FailOn(ftdm);

            try
            {
                w.Commit();
                Assert.Fail("fake disk full IOExceptions not hit");
            }
            catch (System.IO.IOException ioe)
            {
                // expected
                Assert.IsTrue(ftdm.didFail1 || ftdm.didFail2);
            }
            _TestUtil.CheckIndex(dir);
            ftdm.ClearDoFail();
            w.AddDocument(doc);
            w.Close();

            _TestUtil.CheckIndex(dir);
            dir.Close();
        }
Exemple #15
0
		// Runs test, with multiple threads, using the specific
		// failure to trigger an IOException
		public virtual void  _testMultipleThreadsFailure(MockRAMDirectory.Failure failure)
		{
			
			int NUM_THREADS = 3;
			
			for (int iter = 0; iter < 5; iter++)
			{
				MockRAMDirectory dir = new MockRAMDirectory();
				IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
				ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
				// We expect disk full exceptions in the merge threads
				cms.SetSuppressExceptions();
				writer.SetMergeScheduler(cms);
				writer.SetMaxBufferedDocs(2);
				writer.SetMergeFactor(4);
				
				IndexerThread[] threads = new IndexerThread[NUM_THREADS];
				
				for (int i = 0; i < NUM_THREADS; i++)
					threads[i] = new IndexerThread(this, writer, true);
				
				for (int i = 0; i < NUM_THREADS; i++)
					threads[i].Start();
				
				System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 10));
				
				dir.FailOn(failure);
				failure.SetDoFail();
				
				for (int i = 0; i < NUM_THREADS; i++)
				{
					threads[i].Join();
					Assert.IsTrue(threads[i].error == null, "hit unexpected Throwable");
				}
				
				bool success = false;
				try
				{
					writer.Close(false);
					success = true;
				}
				catch (System.IO.IOException ioe)
				{
					failure.ClearDoFail();
					writer.Close(false);
				}
				
				if (success)
				{
					IndexReader reader = IndexReader.Open(dir);
					for (int j = 0; j < reader.MaxDoc(); j++)
					{
						if (!reader.IsDeleted(j))
						{
							reader.Document(j);
							reader.GetTermFreqVectors(j);
						}
					}
					reader.Close();
				}
				
				dir.Close();
			}
		}
Exemple #16
0
		public virtual void  TestExceptionsDuringCommit()
		{
			MockRAMDirectory dir = new MockRAMDirectory();
			FailOnlyInCommit failure = new FailOnlyInCommit();
			IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
			Document doc = new Document();
			doc.Add(new Field("field", "a field", Field.Store.YES, Field.Index.ANALYZED));
			w.AddDocument(doc);
			dir.FailOn(failure);
			try
			{
				w.Close();
				Assert.Fail();
			}
			catch (System.IO.IOException ioe)
			{
				Assert.Fail("expected only RuntimeException");
			}
			catch (System.SystemException re)
			{
				// Expected
			}
			Assert.IsTrue(failure.fail1 && failure.fail2);
			w.Abort();
			dir.Close();
		}
Exemple #17
0
		public virtual void  TestExceptionDuringSync()
		{
			MockRAMDirectory dir = new MockRAMDirectory();
			FailOnlyInSync failure = new FailOnlyInSync();
			dir.FailOn(failure);
			
			IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer());
			failure.SetDoFail();
			
			ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
			// We expect sync exceptions in the merge threads
			cms.SetSuppressExceptions();
			writer.SetMergeScheduler(cms);
			writer.SetMaxBufferedDocs(2);
			writer.SetMergeFactor(5);
			
			for (int i = 0; i < 23; i++)
				AddDoc(writer);
			
			cms.Sync();
			Assert.IsTrue(failure.didFail);
			failure.ClearDoFail();
			writer.Close();
			
			IndexReader reader = IndexReader.Open(dir);
			Assert.AreEqual(23, reader.NumDocs());
			reader.Close();
			dir.Close();
		}