SetMaxBufferedDeleteTerms() public method

Determines the minimal number of delete terms required before the buffered in-memory delete terms are applied and flushed. If there are documents buffered in memory at the time, they are merged and a new segment is created.

Disabled by default (writer flushes by RAM usage).

is enabled but smaller than 1

public SetMaxBufferedDeleteTerms ( int maxBufferedDeleteTerms ) : void
maxBufferedDeleteTerms int
return void
Ejemplo n.º 1
0
        public virtual void  TestRAMDeletes()
        {
            for (int pass = 0; pass < 2; pass++)
            {
                for (int t = 0; t < 2; t++)
                {
                    bool        autoCommit = (0 == pass);
                    Directory   dir        = new MockRAMDirectory();
                    IndexWriter modifier   = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
                    modifier.SetMaxBufferedDocs(4);
                    modifier.SetMaxBufferedDeleteTerms(4);

                    int id            = 0;
                    int value_Renamed = 100;

                    AddDoc(modifier, ++id, value_Renamed);
                    if (0 == t)
                    {
                        modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
                    }
                    else
                    {
                        modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
                    }
                    AddDoc(modifier, ++id, value_Renamed);
                    if (0 == t)
                    {
                        modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
                        Assert.AreEqual(2, modifier.GetNumBufferedDeleteTerms());
                        Assert.AreEqual(1, modifier.GetBufferedDeleteTermsSize());
                    }
                    else
                    {
                        modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
                    }

                    AddDoc(modifier, ++id, value_Renamed);
                    Assert.AreEqual(0, modifier.GetSegmentCount());
                    modifier.Flush();

                    modifier.Commit();

                    IndexReader reader = IndexReader.Open(dir);
                    Assert.AreEqual(1, reader.NumDocs());

                    int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
                    Assert.AreEqual(1, hitCount);
                    reader.Close();
                    modifier.Close();
                    dir.Close();
                }
            }
        }
Ejemplo n.º 2
0
        public virtual void TestMaxBufferedDeletes()
        {
            Directory   dir    = new MockRAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true,
                                                 IndexWriter.MaxFieldLength.UNLIMITED, null);

            writer.SetMaxBufferedDeleteTerms(1);
            writer.DeleteDocuments(null, new Term("foobar", "1"));
            writer.DeleteDocuments(null, new Term("foobar", "1"));
            writer.DeleteDocuments(null, new Term("foobar", "1"));
            Assert.AreEqual(3, writer.GetFlushDeletesCount());
            writer.Close();
            dir.Close();
        }
Ejemplo n.º 3
0
        public virtual void  TestDeleteAll()
        {
            for (int pass = 0; pass < 2; pass++)
            {
                bool        autoCommit = (0 == pass);
                Directory   dir        = new MockRAMDirectory();
                IndexWriter modifier   = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
                modifier.SetMaxBufferedDocs(2);
                modifier.SetMaxBufferedDeleteTerms(2);

                int id            = 0;
                int value_Renamed = 100;

                for (int i = 0; i < 7; i++)
                {
                    AddDoc(modifier, ++id, value_Renamed);
                }
                modifier.Commit();

                IndexReader reader = IndexReader.Open(dir);
                Assert.AreEqual(7, reader.NumDocs());
                reader.Close();

                // Add 1 doc (so we will have something buffered)
                AddDoc(modifier, 99, value_Renamed);

                // Delete all
                modifier.DeleteAll();

                // Delete all shouldn't be on disk yet
                reader = IndexReader.Open(dir);
                Assert.AreEqual(7, reader.NumDocs());
                reader.Close();

                // Add a doc and update a doc (after the deleteAll, before the commit)
                AddDoc(modifier, 101, value_Renamed);
                UpdateDoc(modifier, 102, value_Renamed);

                // commit the delete all
                modifier.Commit();

                // Validate there are no docs left
                reader = IndexReader.Open(dir);
                Assert.AreEqual(2, reader.NumDocs());
                reader.Close();

                modifier.Close();
                dir.Close();
            }
        }
Ejemplo n.º 4
0
        public virtual void  TestBatchDeletes()
        {
            for (int pass = 0; pass < 2; pass++)
            {
                bool        autoCommit = (0 == pass);
                Directory   dir        = new MockRAMDirectory();
                IndexWriter modifier   = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
                modifier.SetMaxBufferedDocs(2);
                modifier.SetMaxBufferedDeleteTerms(2);

                int id            = 0;
                int value_Renamed = 100;

                for (int i = 0; i < 7; i++)
                {
                    AddDoc(modifier, ++id, value_Renamed);
                }
                modifier.Commit();

                IndexReader reader = IndexReader.Open(dir);
                Assert.AreEqual(7, reader.NumDocs());
                reader.Close();

                id = 0;
                modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
                modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));

                modifier.Commit();

                reader = IndexReader.Open(dir);
                Assert.AreEqual(5, reader.NumDocs());
                reader.Close();

                Term[] terms = new Term[3];
                for (int i = 0; i < terms.Length; i++)
                {
                    terms[i] = new Term("id", System.Convert.ToString(++id));
                }
                modifier.DeleteDocuments(terms);
                modifier.Commit();
                reader = IndexReader.Open(dir);
                Assert.AreEqual(2, reader.NumDocs());
                reader.Close();

                modifier.Close();
                dir.Close();
            }
        }
Ejemplo n.º 5
0
 public virtual void  TestMaxBufferedDeletes()
 {
     for (int pass = 0; pass < 2; pass++)
     {
         bool        autoCommit = (0 == pass);
         Directory   dir        = new MockRAMDirectory();
         IndexWriter writer     = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
         writer.SetMaxBufferedDeleteTerms(1);
         writer.DeleteDocuments(new Term("foobar", "1"));
         writer.DeleteDocuments(new Term("foobar", "1"));
         writer.DeleteDocuments(new Term("foobar", "1"));
         Assert.AreEqual(3, writer.GetFlushDeletesCount());
         writer.Close();
         dir.Close();
     }
 }
Ejemplo n.º 6
0
        public virtual void TestBatchDeletes()
        {
            Directory   dir      = new MockRAMDirectory();
            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true,
                                                   IndexWriter.MaxFieldLength.UNLIMITED, null);

            modifier.SetMaxBufferedDocs(2);
            modifier.SetMaxBufferedDeleteTerms(2);

            int id            = 0;
            int value_Renamed = 100;

            for (int i = 0; i < 7; i++)
            {
                AddDoc(modifier, ++id, value_Renamed);
            }
            modifier.Commit(null);

            IndexReader reader = IndexReader.Open(dir, true, null);

            Assert.AreEqual(7, reader.NumDocs());
            reader.Close();

            id = 0;
            modifier.DeleteDocuments(null, new Term("id", System.Convert.ToString(++id)));
            modifier.DeleteDocuments(null, new Term("id", System.Convert.ToString(++id)));

            modifier.Commit(null);

            reader = IndexReader.Open(dir, true, null);
            Assert.AreEqual(5, reader.NumDocs());
            reader.Close();

            Term[] terms = new Term[3];
            for (int i = 0; i < terms.Length; i++)
            {
                terms[i] = new Term("id", System.Convert.ToString(++id));
            }
            modifier.DeleteDocuments(null, terms);
            modifier.Commit(null);
            reader = IndexReader.Open(dir, true, null);
            Assert.AreEqual(2, reader.NumDocs());
            reader.Close();

            modifier.Close();
            dir.Close();
        }
Ejemplo n.º 7
0
        public virtual void  TestDeleteAllNRT()
        {
            Directory   dir      = new MockRAMDirectory();
            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED, null);

            modifier.SetMaxBufferedDocs(2);
            modifier.SetMaxBufferedDeleteTerms(2);

            int id            = 0;
            int value_Renamed = 100;

            for (int i = 0; i < 7; i++)
            {
                AddDoc(modifier, ++id, value_Renamed);
            }
            modifier.Commit(null);

            IndexReader reader = modifier.GetReader(null);

            Assert.AreEqual(7, reader.NumDocs());
            reader.Close();

            AddDoc(modifier, ++id, value_Renamed);
            AddDoc(modifier, ++id, value_Renamed);

            // Delete all
            modifier.DeleteAll(null);

            reader = modifier.GetReader(null);
            Assert.AreEqual(0, reader.NumDocs());
            reader.Close();


            // Roll it back
            modifier.Rollback(null);
            modifier.Close();

            // Validate that the docs are still there
            reader = IndexReader.Open(dir, true, null);
            Assert.AreEqual(7, reader.NumDocs());
            reader.Close();

            dir.Close();
        }
Ejemplo n.º 8
0
        public virtual void  TestSimpleCase()
        {
            System.String[] keywords  = new System.String[] { "1", "2" };
            System.String[] unindexed = new System.String[] { "Netherlands", "Italy" };
            System.String[] unstored  = new System.String[] { "Amsterdam has lots of bridges", "Venice has lots of canals" };
            System.String[] text      = new System.String[] { "Amsterdam", "Venice" };

            for (int pass = 0; pass < 2; pass++)
            {
                bool autoCommit = (0 == pass);

                Directory   dir      = new MockRAMDirectory();
                IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
                modifier.SetUseCompoundFile(true);
                modifier.SetMaxBufferedDeleteTerms(1);

                for (int i = 0; i < keywords.Length; i++)
                {
                    Document doc = new Document();
                    doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
                    doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
                    doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
                    doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
                    modifier.AddDocument(doc);
                }
                modifier.Optimize();
                modifier.Commit();

                Term term     = new Term("city", "Amsterdam");
                int  hitCount = GetHitCount(dir, term);
                Assert.AreEqual(1, hitCount);
                modifier.DeleteDocuments(term);
                modifier.Commit();
                hitCount = GetHitCount(dir, term);
                Assert.AreEqual(0, hitCount);

                modifier.Close();
                dir.Close();
            }
        }
Ejemplo n.º 9
0
        public virtual void  TestNonRAMDelete()
        {
            for (int pass = 0; pass < 2; pass++)
            {
                bool autoCommit = (0 == pass);

                Directory   dir      = new MockRAMDirectory();
                IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
                modifier.SetMaxBufferedDocs(2);
                modifier.SetMaxBufferedDeleteTerms(2);

                int id            = 0;
                int value_Renamed = 100;

                for (int i = 0; i < 7; i++)
                {
                    AddDoc(modifier, ++id, value_Renamed);
                }
                modifier.Commit();

                Assert.AreEqual(0, modifier.GetNumBufferedDocuments());
                Assert.IsTrue(0 < modifier.GetSegmentCount());

                modifier.Commit();

                IndexReader reader = IndexReader.Open(dir);
                Assert.AreEqual(7, reader.NumDocs());
                reader.Close();

                modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));

                modifier.Commit();

                reader = IndexReader.Open(dir);
                Assert.AreEqual(0, reader.NumDocs());
                reader.Close();
                modifier.Close();
                dir.Close();
            }
        }
Ejemplo n.º 10
0
        public virtual void TestNonRAMDelete()
        {
            Directory   dir      = new MockRAMDirectory();
            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true,
                                                   IndexWriter.MaxFieldLength.UNLIMITED, null);

            modifier.SetMaxBufferedDocs(2);
            modifier.SetMaxBufferedDeleteTerms(2);

            int id            = 0;
            int value_Renamed = 100;

            for (int i = 0; i < 7; i++)
            {
                AddDoc(modifier, ++id, value_Renamed);
            }
            modifier.Commit(null);

            Assert.AreEqual(0, modifier.GetNumBufferedDocuments());
            Assert.IsTrue(0 < modifier.GetSegmentCount());

            modifier.Commit(null);

            IndexReader reader = IndexReader.Open(dir, true, null);

            Assert.AreEqual(7, reader.NumDocs());
            reader.Close();

            modifier.DeleteDocuments(null, new Term("value", System.Convert.ToString(value_Renamed)));

            modifier.Commit(null);

            reader = IndexReader.Open(dir, true, null);
            Assert.AreEqual(0, reader.NumDocs());
            reader.Close();
            modifier.Close();
            dir.Close();
        }
Ejemplo n.º 11
0
        public virtual void TestBothDeletes()
        {
            Directory   dir      = new MockRAMDirectory();
            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);

            modifier.SetMaxBufferedDocs(100);
            modifier.SetMaxBufferedDeleteTerms(100);

            int id            = 0;
            int value_Renamed = 100;

            for (int i = 0; i < 5; i++)
            {
                AddDoc(modifier, ++id, value_Renamed);
            }

            value_Renamed = 200;
            for (int i = 0; i < 5; i++)
            {
                AddDoc(modifier, ++id, value_Renamed);
            }
            modifier.Commit();

            for (int i = 0; i < 5; i++)
            {
                AddDoc(modifier, ++id, value_Renamed);
            }
            modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));

            modifier.Commit();

            IndexReader reader = IndexReader.Open(dir, true);

            Assert.AreEqual(5, reader.NumDocs());
            modifier.Close();
        }
Ejemplo n.º 12
0
		public virtual void  TestDeleteAll()
		{
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				Directory dir = new MockRAMDirectory();
				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
				modifier.SetMaxBufferedDocs(2);
				modifier.SetMaxBufferedDeleteTerms(2);
				
				int id = 0;
				int value_Renamed = 100;
				
				for (int i = 0; i < 7; i++)
				{
					AddDoc(modifier, ++id, value_Renamed);
				}
				modifier.Commit();
				
				IndexReader reader = IndexReader.Open(dir);
				Assert.AreEqual(7, reader.NumDocs());
				reader.Close();
				
				// Add 1 doc (so we will have something buffered)
				AddDoc(modifier, 99, value_Renamed);
				
				// Delete all
				modifier.DeleteAll();
				
				// Delete all shouldn't be on disk yet
				reader = IndexReader.Open(dir);
				Assert.AreEqual(7, reader.NumDocs());
				reader.Close();
				
				// Add a doc and update a doc (after the deleteAll, before the commit)
				AddDoc(modifier, 101, value_Renamed);
				UpdateDoc(modifier, 102, value_Renamed);
				
				// commit the delete all
				modifier.Commit();
				
				// Validate there are no docs left
				reader = IndexReader.Open(dir);
				Assert.AreEqual(2, reader.NumDocs());
				reader.Close();
				
				modifier.Close();
				dir.Close();
			}
		}
Ejemplo n.º 13
0
		public virtual void  TestDeleteAllNRT()
		{
			Directory dir = new MockRAMDirectory();
			IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
			modifier.SetMaxBufferedDocs(2);
			modifier.SetMaxBufferedDeleteTerms(2);
			
			int id = 0;
			int value_Renamed = 100;
			
			for (int i = 0; i < 7; i++)
			{
				AddDoc(modifier, ++id, value_Renamed);
			}
			modifier.Commit();
			
			IndexReader reader = modifier.GetReader();
			Assert.AreEqual(7, reader.NumDocs());
			reader.Close();
			
			AddDoc(modifier, ++id, value_Renamed);
			AddDoc(modifier, ++id, value_Renamed);
			
			// Delete all
			modifier.DeleteAll();
			
			reader = modifier.GetReader();
			Assert.AreEqual(0, reader.NumDocs());
			reader.Close();
			
			
			// Roll it back
			modifier.Rollback();
			modifier.Close();
			
			// Validate that the docs are still there
			reader = IndexReader.Open(dir);
			Assert.AreEqual(7, reader.NumDocs());
			reader.Close();
			
			dir.Close();
		}
Ejemplo n.º 14
0
		public virtual void  TestBothDeletes()
		{
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				
				Directory dir = new MockRAMDirectory();
				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
				modifier.SetMaxBufferedDocs(100);
				modifier.SetMaxBufferedDeleteTerms(100);
				
				int id = 0;
				int value_Renamed = 100;
				
				for (int i = 0; i < 5; i++)
				{
					AddDoc(modifier, ++id, value_Renamed);
				}
				
				value_Renamed = 200;
				for (int i = 0; i < 5; i++)
				{
					AddDoc(modifier, ++id, value_Renamed);
				}
				modifier.Commit();
				
				for (int i = 0; i < 5; i++)
				{
					AddDoc(modifier, ++id, value_Renamed);
				}
				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
				
				modifier.Commit();
				
				IndexReader reader = IndexReader.Open(dir);
				Assert.AreEqual(5, reader.NumDocs());
				modifier.Close();
			}
		}
Ejemplo n.º 15
0
		public virtual void  TestBatchDeletes()
		{
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				Directory dir = new MockRAMDirectory();
				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
				modifier.SetMaxBufferedDocs(2);
				modifier.SetMaxBufferedDeleteTerms(2);
				
				int id = 0;
				int value_Renamed = 100;
				
				for (int i = 0; i < 7; i++)
				{
					AddDoc(modifier, ++id, value_Renamed);
				}
				modifier.Commit();
				
				IndexReader reader = IndexReader.Open(dir);
				Assert.AreEqual(7, reader.NumDocs());
				reader.Close();
				
				id = 0;
				modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
				modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
				
				modifier.Commit();
				
				reader = IndexReader.Open(dir);
				Assert.AreEqual(5, reader.NumDocs());
				reader.Close();
				
				Term[] terms = new Term[3];
				for (int i = 0; i < terms.Length; i++)
				{
					terms[i] = new Term("id", System.Convert.ToString(++id));
				}
				modifier.DeleteDocuments(terms);
				modifier.Commit();
				reader = IndexReader.Open(dir);
				Assert.AreEqual(2, reader.NumDocs());
				reader.Close();
				
				modifier.Close();
				dir.Close();
			}
		}
Ejemplo n.º 16
0
		public virtual void  TestMaxBufferedDeletes()
		{
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				Directory dir = new MockRAMDirectory();
				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
				writer.SetMaxBufferedDeleteTerms(1);
				writer.DeleteDocuments(new Term("foobar", "1"));
				writer.DeleteDocuments(new Term("foobar", "1"));
				writer.DeleteDocuments(new Term("foobar", "1"));
				Assert.AreEqual(3, writer.GetFlushDeletesCount());
				writer.Close();
				dir.Close();
			}
		}
Ejemplo n.º 17
0
		public virtual void  TestRAMDeletes()
		{
			for (int pass = 0; pass < 2; pass++)
			{
				for (int t = 0; t < 2; t++)
				{
					bool autoCommit = (0 == pass);
					Directory dir = new MockRAMDirectory();
					IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
					modifier.SetMaxBufferedDocs(4);
					modifier.SetMaxBufferedDeleteTerms(4);
					
					int id = 0;
					int value_Renamed = 100;
					
					AddDoc(modifier, ++id, value_Renamed);
					if (0 == t)
						modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
					else
						modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
					AddDoc(modifier, ++id, value_Renamed);
					if (0 == t)
					{
						modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
						Assert.AreEqual(2, modifier.GetNumBufferedDeleteTerms());
						Assert.AreEqual(1, modifier.GetBufferedDeleteTermsSize());
					}
					else
						modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
					
					AddDoc(modifier, ++id, value_Renamed);
					Assert.AreEqual(0, modifier.GetSegmentCount());
					modifier.Flush();
					
					modifier.Commit();
					
					IndexReader reader = IndexReader.Open(dir);
					Assert.AreEqual(1, reader.NumDocs());
					
					int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
					Assert.AreEqual(1, hitCount);
					reader.Close();
					modifier.Close();
					dir.Close();
				}
			}
		}
Ejemplo n.º 18
0
		public virtual void  TestChangingRAMBuffer2()
		{
			RAMDirectory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			writer.SetMaxBufferedDocs(10);
			writer.SetMaxBufferedDeleteTerms(10);
			writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
			
			for (int j = 1; j < 52; j++)
			{
				Document doc = new Document();
				doc.Add(new Field("field", "aaa" + j, Field.Store.YES, Field.Index.TOKENIZED));
				writer.AddDocument(doc);
			}
			
			long lastGen = - 1;
			for (int j = 1; j < 52; j++)
			{
				writer.DeleteDocuments(new Term("field", "aaa" + j));
				_TestUtil.SyncConcurrentMerges(writer);
				long gen = SegmentInfos.GenerationFromSegmentsFileName(SegmentInfos.GetCurrentSegmentFileName(dir.List()));
				if (j == 1)
					lastGen = gen;
				else if (j < 10)
				{
					// No new files should be created
					Assert.AreEqual(gen, lastGen);
				}
				else if (10 == j)
				{
					Assert.IsTrue(gen > lastGen);
					lastGen = gen;
					writer.SetRAMBufferSizeMB(0.000001);
					writer.SetMaxBufferedDeleteTerms(IndexWriter.DISABLE_AUTO_FLUSH);
				}
				else if (j < 20)
				{
					Assert.IsTrue(gen > lastGen);
					lastGen = gen;
				}
				else if (20 == j)
				{
					writer.SetRAMBufferSizeMB(16);
					writer.SetMaxBufferedDeleteTerms(IndexWriter.DISABLE_AUTO_FLUSH);
					lastGen = gen;
				}
				else if (j < 30)
				{
					Assert.AreEqual(gen, lastGen);
				}
				else if (30 == j)
				{
					writer.SetRAMBufferSizeMB(0.000001);
					writer.SetMaxBufferedDeleteTerms(IndexWriter.DISABLE_AUTO_FLUSH);
				}
				else if (j < 40)
				{
					Assert.IsTrue(gen > lastGen);
					lastGen = gen;
				}
				else if (40 == j)
				{
					writer.SetMaxBufferedDeleteTerms(10);
					writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
					lastGen = gen;
				}
				else if (j < 50)
				{
					Assert.AreEqual(gen, lastGen);
					writer.SetMaxBufferedDeleteTerms(10);
					writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
				}
				else if (50 == j)
				{
					Assert.IsTrue(gen > lastGen);
				}
			}
			writer.Close();
			dir.Close();
		}
Ejemplo n.º 19
0
        public virtual void TestNonRAMDelete()
        {
            Directory dir = new MockRAMDirectory();
            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true,
                                                   IndexWriter.MaxFieldLength.UNLIMITED);
            modifier.SetMaxBufferedDocs(2);
            modifier.SetMaxBufferedDeleteTerms(2);

            int id = 0;
            int value_Renamed = 100;

            for (int i = 0; i < 7; i++)
            {
                AddDoc(modifier, ++id, value_Renamed);
            }
            modifier.Commit();

            Assert.AreEqual(0, modifier.GetNumBufferedDocuments());
            Assert.IsTrue(0 < modifier.GetSegmentCount());

            modifier.Commit();

            IndexReader reader = IndexReader.Open(dir, true);
            Assert.AreEqual(7, reader.NumDocs());
            reader.Close();

            modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));

            modifier.Commit();

            reader = IndexReader.Open(dir, true);
            Assert.AreEqual(0, reader.NumDocs());
            reader.Close();
            modifier.Close();
            dir.Close();
        }
Ejemplo n.º 20
0
        /// <summary> Make sure if modifier tries to commit but hits disk full that modifier
        /// remains consistent and usable. Similar to TestIndexReader.testDiskFull().
        /// </summary>
        private void TestOperationsOnDiskFull(bool updates)
        {
            bool debug       = false;
            Term searchTerm  = new Term("content", "aaa");
            int  START_COUNT = 157;
            int  END_COUNT   = 144;

            // First build up a starting index:
            MockRAMDirectory startDir = new MockRAMDirectory();
            IndexWriter      writer   = new IndexWriter(startDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED, null);

            for (int i = 0; i < 157; i++)
            {
                Document d = new Document();
                d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
                d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
                writer.AddDocument(d, null);
            }
            writer.Close();

            long diskUsage = startDir.SizeInBytes();
            long diskFree  = diskUsage + 10;

            System.IO.IOException err = null;

            bool done = false;

            // Iterate w/ ever increasing free disk space:
            while (!done)
            {
                MockRAMDirectory dir = new MockRAMDirectory(startDir);
                dir.SetPreventDoubleWrite(false);
                IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);

                modifier.SetMaxBufferedDocs(1000);         // use flush or close
                modifier.SetMaxBufferedDeleteTerms(1000);  // use flush or close

                // For each disk size, first try to commit against
                // dir that will hit random IOExceptions & disk
                // full; after, give it infinite disk space & turn
                // off random IOExceptions & retry w/ same reader:
                bool success = false;

                for (int x = 0; x < 2; x++)
                {
                    double        rate      = 0.1;
                    double        diskRatio = ((double)diskFree) / diskUsage;
                    long          thisDiskFree;
                    System.String testName;

                    if (0 == x)
                    {
                        thisDiskFree = diskFree;
                        if (diskRatio >= 2.0)
                        {
                            rate /= 2;
                        }
                        if (diskRatio >= 4.0)
                        {
                            rate /= 2;
                        }
                        if (diskRatio >= 6.0)
                        {
                            rate = 0.0;
                        }
                        if (debug)
                        {
                            System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
                        }
                        testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
                    }
                    else
                    {
                        thisDiskFree = 0;
                        rate         = 0.0;
                        if (debug)
                        {
                            System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
                        }
                        testName = "reader re-use after disk full";
                    }

                    dir.SetMaxSizeInBytes(thisDiskFree);
                    dir.SetRandomIOExceptionRate(rate, diskFree);

                    try
                    {
                        if (0 == x)
                        {
                            int docId = 12;
                            for (int i = 0; i < 13; i++)
                            {
                                if (updates)
                                {
                                    Document d = new Document();
                                    d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES,
                                                    Field.Index.NOT_ANALYZED));
                                    d.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED));
                                    modifier.UpdateDocument(new Term("id", System.Convert.ToString(docId)), d, null);
                                }
                                else
                                {
                                    // deletes
                                    modifier.DeleteDocuments(null, new Term("id", System.Convert.ToString(docId)));
                                    // modifier.setNorm(docId, "contents", (float)2.0);
                                }
                                docId += 12;
                            }
                        }
                        modifier.Close();
                        success = true;
                        if (0 == x)
                        {
                            done = true;
                        }
                    }
                    catch (System.IO.IOException e)
                    {
                        if (debug)
                        {
                            System.Console.Out.WriteLine("  hit IOException: " + e);
                            System.Console.Out.WriteLine(e.StackTrace);
                        }
                        err = e;
                        if (1 == x)
                        {
                            System.Console.Error.WriteLine(e.StackTrace);
                            Assert.Fail(testName + " hit IOException after disk space was freed up");
                        }
                    }

                    // If the close() succeeded, make sure there are
                    // no unreferenced files.
                    if (success)
                    {
                        Lucene.Net.Util._TestUtil.CheckIndex(dir);
                        TestIndexWriter.AssertNoUnreferencedFiles(dir, "after writer.close");
                    }

                    // Finally, verify index is not corrupt, and, if
                    // we succeeded, we see all docs changed, and if
                    // we failed, we see either all docs or no docs
                    // changed (transactional semantics):
                    IndexReader newReader = null;
                    try
                    {
                        newReader = IndexReader.Open((Directory)dir, true, null);
                    }
                    catch (System.IO.IOException e)
                    {
                        System.Console.Error.WriteLine(e.StackTrace);
                        Assert.Fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
                    }

                    IndexSearcher searcher = new IndexSearcher(newReader);
                    ScoreDoc[]    hits     = null;
                    try
                    {
                        hits = searcher.Search(new TermQuery(searchTerm), null, 1000, null).ScoreDocs;
                    }
                    catch (System.IO.IOException e)
                    {
                        System.Console.Error.WriteLine(e.StackTrace);
                        Assert.Fail(testName + ": exception when searching: " + e);
                    }
                    int result2 = hits.Length;
                    if (success)
                    {
                        if (x == 0 && result2 != END_COUNT)
                        {
                            Assert.Fail(testName +
                                        ": method did not throw exception but hits.length for search on term 'aaa' is " +
                                        result2 + " instead of expected " + END_COUNT);
                        }
                        else if (x == 1 && result2 != START_COUNT && result2 != END_COUNT)
                        {
                            // It's possible that the first exception was
                            // "recoverable" wrt pending deletes, in which
                            // case the pending deletes are retained and
                            // then re-flushing (with plenty of disk
                            // space) will succeed in flushing the
                            // deletes:
                            Assert.Fail(testName +
                                        ": method did not throw exception but hits.length for search on term 'aaa' is " +
                                        result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
                        }
                    }
                    else
                    {
                        // On hitting exception we still may have added
                        // all docs:
                        if (result2 != START_COUNT && result2 != END_COUNT)
                        {
                            System.Console.Error.WriteLine(err.StackTrace);
                            Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " +
                                        result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
                        }
                    }

                    searcher.Close();
                    newReader.Close();

                    if (result2 == END_COUNT)
                    {
                        break;
                    }
                }

                dir.Close();

                // Try again with 10 more bytes of free space:
                diskFree += 10;
            }
        }
Ejemplo n.º 21
0
        public virtual void TestErrorAfterApplyDeletes()
        {
            MockRAMDirectory.Failure failure = new AnonymousClassFailure(this);

            // create a couple of files

            System.String[] keywords  = new System.String[] { "1", "2" };
            System.String[] unindexed = new System.String[] { "Netherlands", "Italy" };
            System.String[] unstored  = new System.String[]
            { "Amsterdam has lots of bridges", "Venice has lots of canals" };
            System.String[] text = new System.String[] { "Amsterdam", "Venice" };

            MockRAMDirectory dir      = new MockRAMDirectory();
            IndexWriter      modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED, null);

            modifier.UseCompoundFile = true;
            modifier.SetMaxBufferedDeleteTerms(2);

            dir.FailOn(failure.Reset());

            for (int i = 0; i < keywords.Length; i++)
            {
                Document doc = new Document();
                doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
                doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
                doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
                modifier.AddDocument(doc, null);
            }
            // flush (and commit if ac)

            modifier.Optimize(null);
            modifier.Commit(null);

            // one of the two files hits

            Term term     = new Term("city", "Amsterdam");
            int  hitCount = GetHitCount(dir, term);

            Assert.AreEqual(1, hitCount);

            // open the writer again (closed above)

            // delete the doc
            // max buf del terms is two, so this is buffered

            modifier.DeleteDocuments(null, term);

            // add a doc (needed for the !ac case; see below)
            // doc remains buffered

            Document doc2 = new Document();

            modifier.AddDocument(doc2, null);

            // commit the changes, the buffered deletes, and the new doc

            // The failure object will fail on the first write after the del
            // file gets created when processing the buffered delete

            // in the ac case, this will be when writing the new segments
            // files so we really don't need the new doc, but it's harmless

            // in the !ac case, a new segments file won't be created but in
            // this case, creation of the cfs file happens next so we need
            // the doc (to test that it's okay that we don't lose deletes if
            // failing while creating the cfs file)

            bool failed = false;

            try
            {
                modifier.Commit(null);
            }
            catch (System.IO.IOException ioe)
            {
                failed = true;
            }

            Assert.IsTrue(failed);

            // The commit above failed, so we need to retry it (which will
            // succeed, because the failure is a one-shot)

            modifier.Commit(null);

            hitCount = GetHitCount(dir, term);

            // Make sure the delete was successfully flushed:
            Assert.AreEqual(0, hitCount);

            modifier.Close();
            dir.Close();
        }
		public virtual void  TestNonRAMDelete()
		{
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				
				Directory dir = new RAMDirectory();
				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
				modifier.SetMaxBufferedDocs(2);
				modifier.SetMaxBufferedDeleteTerms(2);
				
				int id = 0;
				int value_Renamed = 100;
				
				for (int i = 0; i < 7; i++)
				{
					AddDoc(modifier, ++id, value_Renamed);
				}
				modifier.Flush();
				
				Assert.AreEqual(0, modifier.GetNumBufferedDocuments());
				Assert.IsTrue(0 < modifier.GetSegmentCount());
				
				if (!autoCommit)
				{
					modifier.Close();
				}
				
				IndexReader reader = IndexReader.Open(dir);
				Assert.AreEqual(7, reader.NumDocs());
				reader.Close();
				
				if (!autoCommit)
				{
					modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
					modifier.SetMaxBufferedDocs(2);
					modifier.SetMaxBufferedDeleteTerms(2);
				}
				
				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
				
				if (!autoCommit)
				{
					modifier.Close();
				}
				
				reader = IndexReader.Open(dir);
				Assert.AreEqual(0, reader.NumDocs());
				reader.Close();
				if (autoCommit)
				{
					modifier.Close();
				}
				dir.Close();
			}
		}
		/// <summary> Make sure if modifier tries to commit but hits disk full that modifier
		/// remains consistent and usable. Similar to TestIndexReader.testDiskFull().
		/// </summary>
		private void  TestOperationsOnDiskFull(bool updates)
		{
			
			bool debug = false;
			Term searchTerm = new Term("content", "aaa");
			int START_COUNT = 157;
			int END_COUNT = 144;
			
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				
				// First build up a starting index:
				RAMDirectory startDir = new RAMDirectory();
				IndexWriter writer = new IndexWriter(startDir, autoCommit, new WhitespaceAnalyzer(), true);
				for (int i = 0; i < 157; i++)
				{
					Document d = new Document();
					d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
					d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.TOKENIZED));
					writer.AddDocument(d);
				}
				writer.Close();
				
				long diskUsage = startDir.SizeInBytes();
				long diskFree = diskUsage + 10;
				
				System.IO.IOException err = null;
				
				bool done = false;
				
				// Iterate w/ ever increasing free disk space:
				while (!done)
				{
					MockRAMDirectory dir = new MockRAMDirectory(startDir);
					IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
					
					modifier.SetMaxBufferedDocs(1000); // use flush or close
					modifier.SetMaxBufferedDeleteTerms(1000); // use flush or close
					
					// For each disk size, first try to commit against
					// dir that will hit random IOExceptions & disk
					// full; after, give it infinite disk space & turn
					// off random IOExceptions & retry w/ same reader:
					bool success = false;
					
					for (int x = 0; x < 2; x++)
					{
						
						double rate = 0.1;
						//UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1042'"
						double diskRatio = ((double) diskFree) / diskUsage;
						long thisDiskFree;
						System.String testName;
						
						if (0 == x)
						{
							thisDiskFree = diskFree;
							if (diskRatio >= 2.0)
							{
								rate /= 2;
							}
							if (diskRatio >= 4.0)
							{
								rate /= 2;
							}
							if (diskRatio >= 6.0)
							{
								rate = 0.0;
							}
							if (debug)
							{
								System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
							}
							testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
						}
						else
						{
							thisDiskFree = 0;
							rate = 0.0;
							if (debug)
							{
								System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
							}
							testName = "reader re-use after disk full";
						}
						
						dir.SetMaxSizeInBytes(thisDiskFree);
						dir.SetRandomIOExceptionRate(rate, diskFree);
						
						try
						{
							if (0 == x)
							{
								int docId = 12;
								for (int i = 0; i < 13; i++)
								{
									if (updates)
									{
										Document d = new Document();
										d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
										d.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.TOKENIZED));
										modifier.UpdateDocument(new Term("id", System.Convert.ToString(docId)), d);
									}
									else
									{
										// deletes
										modifier.DeleteDocuments(new Term("id", System.Convert.ToString(docId)));
										// modifier.setNorm(docId, "contents", (float)2.0);
									}
									docId += 12;
								}
							}
							modifier.Close();
							success = true;
							if (0 == x)
							{
								done = true;
							}
						}
						catch (System.IO.IOException e)
						{
							if (debug)
							{
								//UPGRADE_TODO: The equivalent in .NET for method 'java.lang.Throwable.toString' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'"
								System.Console.Out.WriteLine("  hit IOException: " + e);
								System.Console.Out.WriteLine(e.StackTrace);
							}
							err = e;
							if (1 == x)
							{
								System.Console.Error.WriteLine(e.StackTrace);
								Assert.Fail(testName + " hit IOException after disk space was freed up");
							}
						}
						
						// Whether we succeeded or failed, check that all
						// un-referenced files were in fact deleted (ie,
						// we did not create garbage). Just create a
						// new IndexFileDeleter, have it delete
						// unreferenced files, then verify that in fact
						// no files were deleted:
						System.String[] startFiles = dir.List();
						SegmentInfos infos = new SegmentInfos();
						infos.Read(dir);
						new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
						System.String[] endFiles = dir.List();
						
						//UPGRADE_TODO: Method 'java.util.Arrays.sort' was converted to 'System.Array.Sort' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javautilArrayssort_javalangObject[]'"
						System.Array.Sort(startFiles);
						//UPGRADE_TODO: Method 'java.util.Arrays.sort' was converted to 'System.Array.Sort' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javautilArrayssort_javalangObject[]'"
						System.Array.Sort(endFiles);
						
						// for(int i=0;i<startFiles.length;i++) {
						// System.out.println(" startFiles: " + i + ": " + startFiles[i]);
						// }
						
						if (!SupportClass.Compare.CompareStringArrays(startFiles, endFiles))
						{
							System.String successStr;
							if (success)
							{
								successStr = "success";
							}
							else
							{
								successStr = "IOException";
								System.Console.Error.WriteLine(err.StackTrace);
							}
							Assert.Fail("reader.close() failed to delete unreferenced files after " + successStr + " (" + diskFree + " bytes): before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
						}
						
						// Finally, verify index is not corrupt, and, if
						// we succeeded, we see all docs changed, and if
						// we failed, we see either all docs or no docs
						// changed (transactional semantics):
						IndexReader newReader = null;
						try
						{
							newReader = IndexReader.Open(dir);
						}
						catch (System.IO.IOException e)
						{
							System.Console.Error.WriteLine(e.StackTrace);
							//UPGRADE_TODO: The equivalent in .NET for method 'java.lang.Throwable.toString' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'"
							Assert.Fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
						}
						
						IndexSearcher searcher = new IndexSearcher(newReader);
						Hits hits = null;
						try
						{
							hits = searcher.Search(new TermQuery(searchTerm));
						}
						catch (System.IO.IOException e)
						{
							System.Console.Error.WriteLine(e.StackTrace);
							//UPGRADE_TODO: The equivalent in .NET for method 'java.lang.Throwable.toString' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'"
							Assert.Fail(testName + ": exception when searching: " + e);
						}
						int result2 = hits.Length();
						if (success)
						{
							if (x == 0 && result2 != END_COUNT)
							{
								Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
							}
							else if (x == 1 && result2 != START_COUNT && result2 != END_COUNT)
							{
								// It's possible that the first exception was
								// "recoverable" wrt pending deletes, in which
								// case the pending deletes are retained and
								// then re-flushing (with plenty of disk
								// space) will succeed in flushing the
								// deletes:
								Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
							}
						}
						else
						{
							// On hitting exception we still may have added
							// all docs:
							if (result2 != START_COUNT && result2 != END_COUNT)
							{
								System.Console.Error.WriteLine(err.StackTrace);
								Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
							}
						}
						
						searcher.Close();
						newReader.Close();
						
						if (result2 == END_COUNT)
						{
							break;
						}
					}
					
					dir.Close();
					
					// Try again with 10 more bytes of free space:
					diskFree += 10;
				}
			}
		}
Ejemplo n.º 24
0
		public virtual void  TestChangingRAMBuffer2()
		{
			RAMDirectory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			writer.SetMaxBufferedDocs(10);
			writer.SetMaxBufferedDeleteTerms(10);
			writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
			
			for (int j = 1; j < 52; j++)
			{
				Document doc = new Document();
				doc.Add(new Field("field", "aaa" + j, Field.Store.YES, Field.Index.ANALYZED));
				writer.AddDocument(doc);
			}
			
			int lastFlushCount = - 1;
			for (int j = 1; j < 52; j++)
			{
				writer.DeleteDocuments(new Term("field", "aaa" + j));
				_TestUtil.SyncConcurrentMerges(writer);
				int flushCount = writer.GetFlushCount();
				if (j == 1)
					lastFlushCount = flushCount;
				else if (j < 10)
				{
					// No new files should be created
					Assert.AreEqual(flushCount, lastFlushCount);
				}
				else if (10 == j)
				{
					Assert.IsTrue(flushCount > lastFlushCount);
					lastFlushCount = flushCount;
					writer.SetRAMBufferSizeMB(0.000001);
					writer.SetMaxBufferedDeleteTerms(1);
				}
				else if (j < 20)
				{
					Assert.IsTrue(flushCount > lastFlushCount);
					lastFlushCount = flushCount;
				}
				else if (20 == j)
				{
					writer.SetRAMBufferSizeMB(16);
					writer.SetMaxBufferedDeleteTerms(IndexWriter.DISABLE_AUTO_FLUSH);
					lastFlushCount = flushCount;
				}
				else if (j < 30)
				{
					Assert.AreEqual(flushCount, lastFlushCount);
				}
				else if (30 == j)
				{
					writer.SetRAMBufferSizeMB(0.000001);
					writer.SetMaxBufferedDeleteTerms(IndexWriter.DISABLE_AUTO_FLUSH);
					writer.SetMaxBufferedDeleteTerms(1);
				}
				else if (j < 40)
				{
					Assert.IsTrue(flushCount > lastFlushCount);
					lastFlushCount = flushCount;
				}
				else if (40 == j)
				{
					writer.SetMaxBufferedDeleteTerms(10);
					writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
					lastFlushCount = flushCount;
				}
				else if (j < 50)
				{
					Assert.AreEqual(flushCount, lastFlushCount);
					writer.SetMaxBufferedDeleteTerms(10);
					writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
				}
				else if (50 == j)
				{
					Assert.IsTrue(flushCount > lastFlushCount);
				}
			}
			writer.Close();
			dir.Close();
		}
Ejemplo n.º 25
0
		/// <summary> Make sure if modifier tries to commit but hits disk full that modifier
		/// remains consistent and usable. Similar to TestIndexReader.testDiskFull().
		/// </summary>
		private void  TestOperationsOnDiskFull(bool updates)
		{
			
			bool debug = false;
			Term searchTerm = new Term("content", "aaa");
			int START_COUNT = 157;
			int END_COUNT = 144;
			
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				
				// First build up a starting index:
				MockRAMDirectory startDir = new MockRAMDirectory();
				IndexWriter writer = new IndexWriter(startDir, autoCommit, new WhitespaceAnalyzer(), true);
				for (int i = 0; i < 157; i++)
				{
					Document d = new Document();
					d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
					d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
					writer.AddDocument(d);
				}
				writer.Close();
				
				long diskUsage = startDir.SizeInBytes();
				long diskFree = diskUsage + 10;
				
				System.IO.IOException err = null;
				
				bool done = false;
				
				// Iterate w/ ever increasing free disk space:
				while (!done)
				{
					MockRAMDirectory dir = new MockRAMDirectory(startDir);
					dir.SetPreventDoubleWrite(false);
					IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
					
					modifier.SetMaxBufferedDocs(1000); // use flush or close
					modifier.SetMaxBufferedDeleteTerms(1000); // use flush or close
					
					// For each disk size, first try to commit against
					// dir that will hit random IOExceptions & disk
					// full; after, give it infinite disk space & turn
					// off random IOExceptions & retry w/ same reader:
					bool success = false;
					
					for (int x = 0; x < 2; x++)
					{
						
						double rate = 0.1;
						double diskRatio = ((double) diskFree) / diskUsage;
						long thisDiskFree;
						System.String testName;
						
						if (0 == x)
						{
							thisDiskFree = diskFree;
							if (diskRatio >= 2.0)
							{
								rate /= 2;
							}
							if (diskRatio >= 4.0)
							{
								rate /= 2;
							}
							if (diskRatio >= 6.0)
							{
								rate = 0.0;
							}
							if (debug)
							{
								System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
							}
							testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
						}
						else
						{
							thisDiskFree = 0;
							rate = 0.0;
							if (debug)
							{
								System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
							}
							testName = "reader re-use after disk full";
						}
						
						dir.SetMaxSizeInBytes(thisDiskFree);
						dir.SetRandomIOExceptionRate(rate, diskFree);
						
						try
						{
							if (0 == x)
							{
								int docId = 12;
								for (int i = 0; i < 13; i++)
								{
									if (updates)
									{
										Document d = new Document();
										d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
										d.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED));
										modifier.UpdateDocument(new Term("id", System.Convert.ToString(docId)), d);
									}
									else
									{
										// deletes
										modifier.DeleteDocuments(new Term("id", System.Convert.ToString(docId)));
										// modifier.setNorm(docId, "contents", (float)2.0);
									}
									docId += 12;
								}
							}
							modifier.Close();
							success = true;
							if (0 == x)
							{
								done = true;
							}
						}
						catch (System.IO.IOException e)
						{
							if (debug)
							{
								System.Console.Out.WriteLine("  hit IOException: " + e);
								System.Console.Out.WriteLine(e.StackTrace);
							}
							err = e;
							if (1 == x)
							{
								System.Console.Error.WriteLine(e.StackTrace);
								Assert.Fail(testName + " hit IOException after disk space was freed up");
							}
						}
						
						// If the close() succeeded, make sure there are
						// no unreferenced files.
                        if (success)
                        {
                            Lucene.Net.Util._TestUtil.CheckIndex(dir);
                            TestIndexWriter.AssertNoUnreferencedFiles(dir, "after writer.close");
                        }
						
						// Finally, verify index is not corrupt, and, if
						// we succeeded, we see all docs changed, and if
						// we failed, we see either all docs or no docs
						// changed (transactional semantics):
						IndexReader newReader = null;
						try
						{
							newReader = IndexReader.Open(dir);
						}
						catch (System.IO.IOException e)
						{
							System.Console.Error.WriteLine(e.StackTrace);
							Assert.Fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
						}
						
						IndexSearcher searcher = new IndexSearcher(newReader);
						ScoreDoc[] hits = null;
						try
						{
							hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
						}
						catch (System.IO.IOException e)
						{
							System.Console.Error.WriteLine(e.StackTrace);
							Assert.Fail(testName + ": exception when searching: " + e);
						}
						int result2 = hits.Length;
						if (success)
						{
							if (x == 0 && result2 != END_COUNT)
							{
								Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
							}
							else if (x == 1 && result2 != START_COUNT && result2 != END_COUNT)
							{
								// It's possible that the first exception was
								// "recoverable" wrt pending deletes, in which
								// case the pending deletes are retained and
								// then re-flushing (with plenty of disk
								// space) will succeed in flushing the
								// deletes:
								Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
							}
						}
						else
						{
							// On hitting exception we still may have added
							// all docs:
							if (result2 != START_COUNT && result2 != END_COUNT)
							{
								System.Console.Error.WriteLine(err.StackTrace);
								Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
							}
						}
						
						searcher.Close();
						newReader.Close();
						
						if (result2 == END_COUNT)
						{
							break;
						}
					}
					
					dir.Close();
					
					// Try again with 10 more bytes of free space:
					diskFree += 10;
				}
			}
		}
Ejemplo n.º 26
0
		public virtual void  TestSimpleCase()
		{
			System.String[] keywords = new System.String[]{"1", "2"};
			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
			
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				
				Directory dir = new MockRAMDirectory();
				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
				modifier.SetUseCompoundFile(true);
				modifier.SetMaxBufferedDeleteTerms(1);
				
				for (int i = 0; i < keywords.Length; i++)
				{
					Document doc = new Document();
					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
					modifier.AddDocument(doc);
				}
				modifier.Optimize();
				modifier.Commit();
				
				Term term = new Term("city", "Amsterdam");
				int hitCount = GetHitCount(dir, term);
				Assert.AreEqual(1, hitCount);
				modifier.DeleteDocuments(term);
				modifier.Commit();
				hitCount = GetHitCount(dir, term);
				Assert.AreEqual(0, hitCount);
				
				modifier.Close();
				dir.Close();
			}
		}
Ejemplo n.º 27
0
		public virtual void  TestErrorAfterApplyDeletes()
		{
			
			MockRAMDirectory.Failure failure = new AnonymousClassFailure(this);
			
			// create a couple of files
			
			System.String[] keywords = new System.String[]{"1", "2"};
			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
			
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				MockRAMDirectory dir = new MockRAMDirectory();
				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
				modifier.SetUseCompoundFile(true);
				modifier.SetMaxBufferedDeleteTerms(2);
				
				dir.FailOn(failure.Reset());
				
				for (int i = 0; i < keywords.Length; i++)
				{
					Document doc = new Document();
					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
					modifier.AddDocument(doc);
				}
				// flush (and commit if ac)
				
				modifier.Optimize();
				modifier.Commit();
				
				// one of the two files hits
				
				Term term = new Term("city", "Amsterdam");
				int hitCount = GetHitCount(dir, term);
				Assert.AreEqual(1, hitCount);
				
				// open the writer again (closed above)
				
				// delete the doc
				// max buf del terms is two, so this is buffered
				
				modifier.DeleteDocuments(term);
				
				// add a doc (needed for the !ac case; see below)
				// doc remains buffered
				
				Document doc2 = new Document();
				modifier.AddDocument(doc2);
				
				// commit the changes, the buffered deletes, and the new doc
				
				// The failure object will fail on the first write after the del
				// file gets created when processing the buffered delete
				
				// in the ac case, this will be when writing the new segments
				// files so we really don't need the new doc, but it's harmless
				
				// in the !ac case, a new segments file won't be created but in
				// this case, creation of the cfs file happens next so we need
				// the doc (to test that it's okay that we don't lose deletes if
				// failing while creating the cfs file)
				
				bool failed = false;
				try
				{
					modifier.Commit();
				}
				catch (System.IO.IOException ioe)
				{
					failed = true;
				}
				
				Assert.IsTrue(failed);
				
				// The commit above failed, so we need to retry it (which will
				// succeed, because the failure is a one-shot)
				
				modifier.Commit();
				
				hitCount = GetHitCount(dir, term);
				
				// Make sure the delete was successfully flushed:
				Assert.AreEqual(0, hitCount);
				
				modifier.Close();
				dir.Close();
			}
		}
Ejemplo n.º 28
0
 public virtual void TestMaxBufferedDeletes()
 {
     Directory dir = new MockRAMDirectory();
     IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true,
                                          IndexWriter.MaxFieldLength.UNLIMITED);
     writer.SetMaxBufferedDeleteTerms(1);
     writer.DeleteDocuments(new Term("foobar", "1"));
     writer.DeleteDocuments(new Term("foobar", "1"));
     writer.DeleteDocuments(new Term("foobar", "1"));
     Assert.AreEqual(3, writer.GetFlushDeletesCount());
     writer.Close();
     dir.Close();
 }