Exemple #1
0
        public virtual void  TestExpungeDeletes()
        {
            Directory   dir = new MockRAMDirectory();
            IndexWriter w   = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED, null);
            Document    doc = new Document();

            doc.Add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
            Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);

            doc.Add(id);
            id.SetValue("0");
            w.AddDocument(doc, null);
            id.SetValue("1");
            w.AddDocument(doc, null);
            w.DeleteDocuments(null, new Term("id", "0"));

            IndexReader r = w.GetReader(null);

            w.ExpungeDeletes(null);
            w.Close();
            r.Close();
            r = IndexReader.Open(dir, true, null);
            Assert.AreEqual(1, r.NumDocs());
            Assert.IsFalse(r.HasDeletions);
            r.Close();
            dir.Close();
        }
Exemple #2
0
        public void TestDeletesNumDocs()
        {
            Directory   dir = new MockRAMDirectory();
            IndexWriter w   = new IndexWriter(dir, new WhitespaceAnalyzer(),
                                              IndexWriter.MaxFieldLength.LIMITED);
            Document doc = new Document();

            doc.Add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
            Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);

            doc.Add(id);
            id.SetValue("0");
            w.AddDocument(doc);
            id.SetValue("1");
            w.AddDocument(doc);
            IndexReader r = w.GetReader();

            Assert.AreEqual(2, r.NumDocs());
            r.Close();

            w.DeleteDocuments(new Term("id", "0"));
            r = w.GetReader();
            Assert.AreEqual(1, r.NumDocs());
            r.Close();

            w.DeleteDocuments(new Term("id", "1"));
            r = w.GetReader();
            Assert.AreEqual(0, r.NumDocs());
            r.Close();

            w.Close();
            dir.Close();
        }
Exemple #3
0
        public virtual void TestFlushExceptions()
        {
            MockRAMDirectory directory = new MockRAMDirectory();
            FailOnlyOnFlush  failure   = new FailOnlyOnFlush();

            directory.FailOn(failure);

            IndexWriter writer           = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
            ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();

            writer.SetMergeScheduler(cms);
            writer.SetMaxBufferedDocs(2);
            Document doc     = new Document();
            Field    idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

            doc.Add(idField);
            int extraCount = 0;

            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 20; j++)
                {
                    idField.SetValue(System.Convert.ToString(i * 20 + j));
                    writer.AddDocument(doc);
                }

                while (true)
                {
                    // must cycle here because sometimes the merge flushes
                    // the doc we just added and so there's nothing to
                    // flush, and we don't hit the exception
                    writer.AddDocument(doc);
                    failure.SetDoFail();
                    try
                    {
                        writer.Flush(true, false, true);
                        if (failure.hitExc)
                        {
                            Assert.Fail("failed to hit IOException");
                        }
                        extraCount++;
                    }
                    catch (System.IO.IOException ioe)
                    {
                        failure.ClearDoFail();
                        break;
                    }
                }
            }

            writer.Close();
            IndexReader reader = IndexReader.Open(directory, true);

            Assert.AreEqual(200 + extraCount, reader.NumDocs());
            reader.Close();
            directory.Close();
        }
Exemple #4
0
        public virtual void  TestNoWaitClose()
        {
            RAMDirectory directory = new MockRAMDirectory();

            Document doc     = new Document();
            Field    idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

            doc.Add(idField);

            for (int pass = 0; pass < 2; pass++)
            {
                bool        autoCommit = pass == 0;
                IndexWriter writer     = new IndexWriter(directory, autoCommit, ANALYZER, true);

                for (int iter = 0; iter < 10; iter++)
                {
                    ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
                    writer.SetMergeScheduler(cms);
                    writer.SetMaxBufferedDocs(2);
                    writer.SetMergeFactor(100);

                    for (int j = 0; j < 201; j++)
                    {
                        idField.SetValue(System.Convert.ToString(iter * 201 + j));
                        writer.AddDocument(doc);
                    }

                    int delID = iter * 201;
                    for (int j = 0; j < 20; j++)
                    {
                        writer.DeleteDocuments(new Term("id", System.Convert.ToString(delID)));
                        delID += 5;
                    }

                    // Force a bunch of merge threads to kick off so we
                    // stress out aborting them on close:
                    writer.SetMergeFactor(3);
                    writer.AddDocument(doc);
                    writer.Flush();

                    writer.Close(false);

                    IndexReader reader = IndexReader.Open(directory);
                    Assert.AreEqual((1 + iter) * 182, reader.NumDocs());
                    reader.Close();

                    // Reopen
                    writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
                }
                writer.Close();
            }

            directory.Close();
        }
        public virtual void TestNoWaitClose()
        {
            RAMDirectory directory = new MockRAMDirectory();

            Document doc     = new Document();
            Field    idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

            doc.Add(idField);

            IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED, null);

            for (int iter = 0; iter < 10; iter++)
            {
                ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
                writer.SetMergeScheduler(cms, null);
                writer.SetMaxBufferedDocs(2);
                writer.MergeFactor = 100;

                for (int j = 0; j < 201; j++)
                {
                    idField.SetValue(System.Convert.ToString(iter * 201 + j));
                    writer.AddDocument(doc, null);
                }

                int delID = iter * 201;
                for (int j = 0; j < 20; j++)
                {
                    writer.DeleteDocuments(null, new Term("id", delID.ToString()));
                    delID += 5;
                }

                // Force a bunch of merge threads to kick off so we
                // stress out aborting them on close:
                writer.MergeFactor = 3;
                writer.AddDocument(doc, null);
                writer.Commit(null);

                writer.Close(false);

                IndexReader reader = IndexReader.Open((Directory)directory, true, null);
                Assert.AreEqual((1 + iter) * 182, reader.NumDocs());
                reader.Close();

                // Reopen
                writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED, null);
            }
            writer.Close();

            directory.Close();
        }
Exemple #6
0
        public virtual void  TestDeleteMerging()
        {
            RAMDirectory directory = new MockRAMDirectory();

            IndexWriter writer           = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
            ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();

            writer.SetMergeScheduler(cms);

            LogDocMergePolicy mp = new LogDocMergePolicy(writer);

            writer.SetMergePolicy(mp);

            // Force degenerate merging so we can get a mix of
            // merging of segments with and without deletes at the
            // start:
            mp.MinMergeDocs = 1000;

            Document doc     = new Document();
            Field    idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

            doc.Add(idField);
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 100; j++)
                {
                    idField.SetValue(System.Convert.ToString(i * 100 + j));
                    writer.AddDocument(doc);
                }

                int delID = i;
                while (delID < 100 * (1 + i))
                {
                    writer.DeleteDocuments(new Term("id", "" + delID));
                    delID += 10;
                }

                writer.Commit();
            }

            writer.Close();
            IndexReader reader = IndexReader.Open(directory, true);

            // Verify that we did not lose any deletes...
            Assert.AreEqual(450, reader.NumDocs());
            reader.Close();
            directory.Close();
        }
Exemple #7
0
        public virtual void  TestFlushExceptions()
        {
            MockRAMDirectory directory = new MockRAMDirectory();
            FailOnlyOnFlush  failure   = new FailOnlyOnFlush();

            directory.FailOn(failure);

            IndexWriter writer           = new IndexWriter(directory, true, ANALYZER, true);
            ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();

            writer.SetMergeScheduler(cms);
            writer.SetMaxBufferedDocs(2);
            Document doc     = new Document();
            Field    idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

            doc.Add(idField);
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 20; j++)
                {
                    idField.SetValue(System.Convert.ToString(i * 20 + j));
                    writer.AddDocument(doc);
                }

                writer.AddDocument(doc);

                failure.SetDoFail();
                try
                {
                    writer.Flush();
                    Assert.Fail("failed to hit IOException");
                }
                catch (System.IO.IOException ioe)
                {
                    failure.ClearDoFail();
                }
            }

            writer.Close();
            IndexReader reader = IndexReader.Open(directory);

            Assert.AreEqual(200, reader.NumDocs());
            reader.Close();
            directory.Close();
        }
        public void TestDeletesNumDocs()
        {
            Directory dir = new MockRAMDirectory();
            IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(),
                                                       IndexWriter.MaxFieldLength.LIMITED);
            Document doc = new Document();
            doc.Add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
            Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
            doc.Add(id);
            id.SetValue("0");
            w.AddDocument(doc);
            id.SetValue("1");
            w.AddDocument(doc);
            IndexReader r = w.GetReader();
            Assert.AreEqual(2, r.NumDocs());
            r.Close();

            w.DeleteDocuments(new Term("id", "0"));
            r = w.GetReader();
            Assert.AreEqual(1, r.NumDocs());
            r.Close();

            w.DeleteDocuments(new Term("id", "1"));
            r = w.GetReader();
            Assert.AreEqual(0, r.NumDocs());
            r.Close();

            w.Close();
            dir.Close();
        }
		public virtual void  TestFlushExceptions()
		{
			
			MockRAMDirectory directory = new MockRAMDirectory();
			FailOnlyOnFlush failure = new FailOnlyOnFlush();
			directory.FailOn(failure);
			
			IndexWriter writer = new IndexWriter(directory, true, ANALYZER, true);
			ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
			writer.SetMergeScheduler(cms);
			writer.SetMaxBufferedDocs(2);
			Document doc = new Document();
			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
			doc.Add(idField);
			for (int i = 0; i < 10; i++)
			{
				for (int j = 0; j < 20; j++)
				{
					idField.SetValue(System.Convert.ToString(i * 20 + j));
					writer.AddDocument(doc);
				}
				
				writer.AddDocument(doc);
				
				failure.SetDoFail();
				try
				{
					writer.Flush();
					Assert.Fail("failed to hit IOException");
				}
				catch (System.IO.IOException ioe)
				{
					failure.ClearDoFail();
				}
			}
			
			writer.Close();
			IndexReader reader = IndexReader.Open(directory);
			Assert.AreEqual(200, reader.NumDocs());
			reader.Close();
			directory.Close();
		}
		public virtual void  TestNoWaitClose()
		{
			RAMDirectory directory = new MockRAMDirectory();
			
			Document doc = new Document();
			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
			doc.Add(idField);
			
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = pass == 0;
				IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
				
				for (int iter = 0; iter < 10; iter++)
				{
					ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
					writer.SetMergeScheduler(cms);
					writer.SetMaxBufferedDocs(2);
					writer.SetMergeFactor(100);
					
					for (int j = 0; j < 201; j++)
					{
						idField.SetValue(System.Convert.ToString(iter * 201 + j));
						writer.AddDocument(doc);
					}
					
					int delID = iter * 201;
					for (int j = 0; j < 20; j++)
					{
						writer.DeleteDocuments(new Term("id", System.Convert.ToString(delID)));
						delID += 5;
					}
					
					// Force a bunch of merge threads to kick off so we
					// stress out aborting them on close:
					writer.SetMergeFactor(3);
					writer.AddDocument(doc);
					writer.Flush();
					
					writer.Close(false);
					
					IndexReader reader = IndexReader.Open(directory);
					Assert.AreEqual((1 + iter) * 182, reader.NumDocs());
					reader.Close();
					
					// Reopen
					writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
				}
				writer.Close();
			}
			
			directory.Close();
		}
		public virtual void  TestDeleteMerging()
		{
			
			RAMDirectory directory = new MockRAMDirectory();
			
			IndexWriter writer = new IndexWriter(directory, true, ANALYZER, true);
			ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
			writer.SetMergeScheduler(cms);
			
			LogDocMergePolicy mp = new LogDocMergePolicy(writer);
			writer.SetMergePolicy(mp);
			
			// Force degenerate merging so we can get a mix of
			// merging of segments with and without deletes at the
			// start:
			mp.SetMinMergeDocs(1000);
			
			Document doc = new Document();
			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
			doc.Add(idField);
			for (int i = 0; i < 10; i++)
			{
				for (int j = 0; j < 100; j++)
				{
					idField.SetValue(System.Convert.ToString(i * 100 + j));
					writer.AddDocument(doc);
				}
				
				int delID = i;
				while (delID < 100 * (1 + i))
				{
					writer.DeleteDocuments(new Term("id", "" + delID));
					delID += 10;
				}
				
				writer.Flush();
			}
			
			writer.Close();
			IndexReader reader = IndexReader.Open(directory);
			// Verify that we did not lose any deletes...
			Assert.AreEqual(450, reader.NumDocs());
			reader.Close();
			directory.Close();
		}
Exemple #12
0
		private void  Create()
		{
			
			// NOTE: put seed in here to make failures
			// deterministic, but do not commit with a seed (to
			// better test):
			dir = new MockRAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
			writer.SetMaxBufferedDocs(17);
			
			Document doc = new Document();
			Document doc2 = new Document();
			
			Field id = new Field("id", "", Field.Store.YES, Field.Index.NO);
			doc.Add(id);
			doc2.Add(id);
			
			Field contents = new Field("contents", "", Field.Store.NO, Field.Index.ANALYZED);
			doc.Add(contents);
			doc2.Add(contents);
			
			Field byteField = new Field("byte", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(byteField);
			doc2.Add(byteField);
			
			Field shortField = new Field("short", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(shortField);
			doc2.Add(shortField);
			
			Field intField = new Field("int", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(intField);
			doc2.Add(intField);
			
			Field longField = new Field("long", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(longField);
			doc2.Add(longField);
			
			Field floatField = new Field("float", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(floatField);
			doc2.Add(floatField);
			
			Field doubleField = new Field("double", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(doubleField);
			doc2.Add(doubleField);
			
			// we use two diff string fields so our FieldCache usage
			// is less suspicious to cache inspection
			Field stringField = new Field("string", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(stringField);
			Field stringFieldIdx = new Field("stringIdx", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(stringFieldIdx);
			// doc2 doesn't have stringField or stringFieldIdx, so we get nulls
			
			for (int i = 0; i < NUM_DOCS; i++)
			{
				id.SetValue("" + i);
				if (i % 1000 == 0)
				{
					contents.SetValue("a b c z");
				}
				else if (i % 100 == 0)
				{
					contents.SetValue("a b c y");
				}
				else if (i % 10 == 0)
				{
					contents.SetValue("a b c x");
				}
				else
				{
					contents.SetValue("a b c");
				}
				byteField.SetValue("" + NextInt((sbyte) System.SByte.MinValue, (sbyte) System.SByte.MaxValue));
				if (NextInt(10) == 3)
				{
					shortField.SetValue("" + System.Int16.MinValue);
				}
				else if (NextInt(10) == 7)
				{
					shortField.SetValue("" + System.Int16.MaxValue);
				}
				else
				{
					shortField.SetValue("" + NextInt(System.Int16.MinValue, System.Int16.MaxValue));
				}
				
				if (NextInt(10) == 3)
				{
					intField.SetValue("" + System.Int32.MinValue);
				}
				else if (NextInt(10) == 7)
				{
					intField.SetValue("" + System.Int32.MaxValue);
				}
				else
				{
					intField.SetValue("" + this.r.Next());
				}
				
				if (NextInt(10) == 3)
				{
					longField.SetValue("" + System.Int64.MinValue);
				}
				else if (NextInt(10) == 7)
				{
					longField.SetValue("" + System.Int64.MaxValue);
				}
				else
				{
					longField.SetValue("" + this.r.Next(System.Int32.MaxValue));
				}
				floatField.SetValue("" + (float) this.r.NextDouble());
				doubleField.SetValue("" + this.r.NextDouble());
				if (i % 197 == 0)
				{
					writer.AddDocument(doc2);
				}
				else
				{
					System.String r = RandomString(NextInt(20));
					stringField.SetValue(r);
					stringFieldIdx.SetValue(r);
					writer.AddDocument(doc);
				}
			}
			writer.Close();
			searcherMultiSegment = new IndexSearcher(dir);
			searcherMultiSegment.SetDefaultFieldSortScoring(true, true);
			
			dir2 = new MockRAMDirectory(dir);
			writer = new IndexWriter(dir2, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
			writer.Optimize();
			writer.Close();
			searcherSingleSegment = new IndexSearcher(dir2);
			searcherSingleSegment.SetDefaultFieldSortScoring(true, true);
			dir3 = new MockRAMDirectory(dir);
			writer = new IndexWriter(dir3, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
			writer.Optimize(3);
			writer.Close();
			searcherFewSegment = new IndexSearcher(dir3);
			searcherFewSegment.SetDefaultFieldSortScoring(true, true);
		}
Exemple #13
0
        private void  Create()
        {
            // NOTE: put seed in here to make failures
            // deterministic, but do not commit with a seed (to
            // better test):
            dir = new MockRAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);

            writer.SetMaxBufferedDocs(17);

            Document doc  = new Document();
            Document doc2 = new Document();

            Field id = new Field("id", "", Field.Store.YES, Field.Index.NO);

            doc.Add(id);
            doc2.Add(id);

            Field contents = new Field("contents", "", Field.Store.NO, Field.Index.ANALYZED);

            doc.Add(contents);
            doc2.Add(contents);

            Field byteField = new Field("byte", "", Field.Store.NO, Field.Index.NOT_ANALYZED);

            doc.Add(byteField);
            doc2.Add(byteField);

            Field shortField = new Field("short", "", Field.Store.NO, Field.Index.NOT_ANALYZED);

            doc.Add(shortField);
            doc2.Add(shortField);

            Field intField = new Field("int", "", Field.Store.NO, Field.Index.NOT_ANALYZED);

            doc.Add(intField);
            doc2.Add(intField);

            Field longField = new Field("long", "", Field.Store.NO, Field.Index.NOT_ANALYZED);

            doc.Add(longField);
            doc2.Add(longField);

            Field floatField = new Field("float", "", Field.Store.NO, Field.Index.NOT_ANALYZED);

            doc.Add(floatField);
            doc2.Add(floatField);

            Field doubleField = new Field("double", "", Field.Store.NO, Field.Index.NOT_ANALYZED);

            doc.Add(doubleField);
            doc2.Add(doubleField);

            // we use two diff string fields so our FieldCache usage
            // is less suspicious to cache inspection
            Field stringField = new Field("string", "", Field.Store.NO, Field.Index.NOT_ANALYZED);

            doc.Add(stringField);
            Field stringFieldIdx = new Field("stringIdx", "", Field.Store.NO, Field.Index.NOT_ANALYZED);

            doc.Add(stringFieldIdx);
            // doc2 doesn't have stringField or stringFieldIdx, so we get nulls

            for (int i = 0; i < NUM_DOCS; i++)
            {
                id.SetValue("" + i);
                if (i % 1000 == 0)
                {
                    contents.SetValue("a b c z");
                }
                else if (i % 100 == 0)
                {
                    contents.SetValue("a b c y");
                }
                else if (i % 10 == 0)
                {
                    contents.SetValue("a b c x");
                }
                else
                {
                    contents.SetValue("a b c");
                }
                byteField.SetValue("" + NextInt((sbyte)System.SByte.MinValue, (sbyte)System.SByte.MaxValue));
                if (NextInt(10) == 3)
                {
                    shortField.SetValue("" + System.Int16.MinValue);
                }
                else if (NextInt(10) == 7)
                {
                    shortField.SetValue("" + System.Int16.MaxValue);
                }
                else
                {
                    shortField.SetValue("" + NextInt(System.Int16.MinValue, System.Int16.MaxValue));
                }

                if (NextInt(10) == 3)
                {
                    intField.SetValue("" + System.Int32.MinValue);
                }
                else if (NextInt(10) == 7)
                {
                    intField.SetValue("" + System.Int32.MaxValue);
                }
                else
                {
                    intField.SetValue("" + this.r.Next());
                }

                if (NextInt(10) == 3)
                {
                    longField.SetValue("" + System.Int64.MinValue);
                }
                else if (NextInt(10) == 7)
                {
                    longField.SetValue("" + System.Int64.MaxValue);
                }
                else
                {
                    longField.SetValue("" + this.r.Next(System.Int32.MaxValue));
                }
                floatField.SetValue("" + (float)this.r.NextDouble());
                doubleField.SetValue("" + this.r.NextDouble());
                if (i % 197 == 0)
                {
                    writer.AddDocument(doc2);
                }
                else
                {
                    System.String r = RandomString(NextInt(20));
                    stringField.SetValue(r);
                    stringFieldIdx.SetValue(r);
                    writer.AddDocument(doc);
                }
            }
            writer.Close();
            searcherMultiSegment = new IndexSearcher(dir);
            searcherMultiSegment.SetDefaultFieldSortScoring(true, true);

            dir2   = new MockRAMDirectory(dir);
            writer = new IndexWriter(dir2, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
            writer.Optimize();
            writer.Close();
            searcherSingleSegment = new IndexSearcher(dir2);
            searcherSingleSegment.SetDefaultFieldSortScoring(true, true);
            dir3   = new MockRAMDirectory(dir);
            writer = new IndexWriter(dir3, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
            writer.Optimize(3);
            writer.Close();
            searcherFewSegment = new IndexSearcher(dir3);
            searcherFewSegment.SetDefaultFieldSortScoring(true, true);
        }
Exemple #14
0
            override public void  Run()
            {
                Document doc = new Document();

                doc.Add(new Field("content1", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.ANALYZED));
                doc.Add(new Field("content6", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
                doc.Add(new Field("content2", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("content3", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.NO));

                doc.Add(new Field("content4", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.ANALYZED));
                doc.Add(new Field("content5", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.NOT_ANALYZED));

                doc.Add(new Field("content7", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));

                Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

                doc.Add(idField);

                long stopTime = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) + 3000;

                while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < stopTime)
                {
                    System.Threading.Thread.SetData(Enclosing_Instance.doFail, this);
                    System.String id = "" + r.Next(50);
                    idField.SetValue(id);
                    Term idTerm = new Term("id", id);
                    try
                    {
                        writer.UpdateDocument(idTerm, doc);
                    }
                    catch (System.SystemException re)
                    {
                        if (Lucene.Net.Index.TestIndexWriterExceptions.DEBUG)
                        {
                            System.Console.Out.WriteLine("EXC: ");
                            System.Console.Out.WriteLine(re.StackTrace);
                        }
                        try
                        {
                            _TestUtil.CheckIndex(writer.GetDirectory());
                        }
                        catch (System.IO.IOException ioe)
                        {
                            System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": unexpected exception1");
                            System.Console.Out.WriteLine(ioe.StackTrace);
                            failure = ioe;
                            break;
                        }
                    }
                    catch (System.Exception t)
                    {
                        System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": unexpected exception2");
                        System.Console.Out.WriteLine(t.StackTrace);
                        failure = t;
                        break;
                    }

                    System.Threading.Thread.SetData(Enclosing_Instance.doFail, null);

                    // After a possible exception (above) I should be able
                    // to add a new document without hitting an
                    // exception:
                    try
                    {
                        writer.UpdateDocument(idTerm, doc);
                    }
                    catch (System.Exception t)
                    {
                        System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": unexpected exception3");
                        System.Console.Out.WriteLine(t.StackTrace);
                        failure = t;
                        break;
                    }
                }
            }
			override public void  Run()
			{
				
				Document doc = new Document();
				
				doc.Add(new Field("content1", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.ANALYZED));
				doc.Add(new Field("content6", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
				doc.Add(new Field("content2", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.NOT_ANALYZED));
				doc.Add(new Field("content3", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.NO));
				
				doc.Add(new Field("content4", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.ANALYZED));
				doc.Add(new Field("content5", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.NOT_ANALYZED));
				
				doc.Add(new Field("content7", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
				
				Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
				doc.Add(idField);
				
				long stopTime = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) + 3000;
				
				while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < stopTime)
				{
					System.Threading.Thread.SetData(Enclosing_Instance.doFail, this);
					System.String id = "" + r.Next(50);
					idField.SetValue(id);
					Term idTerm = new Term("id", id);
					try
					{
						writer.UpdateDocument(idTerm, doc);
					}
					catch (System.SystemException re)
					{
						if (Lucene.Net.Index.TestIndexWriterExceptions.DEBUG)
						{
							System.Console.Out.WriteLine("EXC: ");
							System.Console.Out.WriteLine(re.StackTrace);
						}
						try
						{
							_TestUtil.CheckIndex(writer.GetDirectory());
						}
						catch (System.IO.IOException ioe)
						{
							System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": unexpected exception1");
							System.Console.Out.WriteLine(ioe.StackTrace);
							failure = ioe;
							break;
						}
					}
					catch (System.Exception t)
					{
						System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": unexpected exception2");
						System.Console.Out.WriteLine(t.StackTrace);
						failure = t;
						break;
					}
					
					System.Threading.Thread.SetData(Enclosing_Instance.doFail, null);
					
					// After a possible exception (above) I should be able
					// to add a new document without hitting an
					// exception:
					try
					{
						writer.UpdateDocument(idTerm, doc);
					}
					catch (System.Exception t)
					{
						System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": unexpected exception3");
						System.Console.Out.WriteLine(t.StackTrace);
						failure = t;
						break;
					}
				}
			}
        public virtual void TestFlushExceptions()
        {
            MockRAMDirectory directory = new MockRAMDirectory();
            FailOnlyOnFlush failure = new FailOnlyOnFlush();
            directory.FailOn(failure);

            IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
            ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
            writer.SetMergeScheduler(cms);
            writer.SetMaxBufferedDocs(2);
            Document doc = new Document();
            Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
            doc.Add(idField);
            int extraCount = 0;

            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 20; j++)
                {
                    idField.SetValue(System.Convert.ToString(i*20 + j));
                    writer.AddDocument(doc);
                }

                while (true)
                {
                    // must cycle here because sometimes the merge flushes
                    // the doc we just added and so there's nothing to
                    // flush, and we don't hit the exception
                    writer.AddDocument(doc);
                    failure.SetDoFail();
                    try
                    {
                        writer.Flush(true, false, true);
                        if (failure.hitExc)
                            Assert.Fail("failed to hit IOException");
                        extraCount++;
                    }
                    catch (System.IO.IOException ioe)
                    {
                        failure.ClearDoFail();
                        break;
                    }
                }
            }

            writer.Close();
            IndexReader reader = IndexReader.Open(directory, true);
            Assert.AreEqual(200 + extraCount, reader.NumDocs());
            reader.Close();
            directory.Close();
        }
Exemple #17
0
 override public void Run()
 {
     try
     {
         Document doc = new Document();
         IndexReader r = IndexReader.Open(dir);
         Field f = new Field("f", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
         doc.Add(f);
         int count = 0;
         while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < endTime && failed.Count == 0)
         {
             for (int j = 0; j < 10; j++)
             {
                 String s = finalI + "_" + (count++).ToString();
                 f.SetValue(s);
                 w.AddDocument(doc);
                 w.Commit();
                 IndexReader r2 = r.Reopen();
                 Assert.IsTrue(r2 != r);
                 r.Close();
                 r = r2;
                 Assert.AreEqual(1, r.DocFreq(new Term("f", s)), "term=f:" + s);
             }
         }
         r.Close();
     }
     catch (Exception t)
     {
         lock (failed)
         {
             failed.Add(this);
         }
         throw t;
     }
 }
Exemple #18
0
		public virtual void  TestNoWaitClose()
		{
			RAMDirectory directory = new MockRAMDirectory();
			
			Document doc = new Document();
			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
			doc.Add(idField);
			
			for (int pass = 0; pass < 3; pass++)
			{
				bool autoCommit = pass % 2 == 0;
				IndexWriter writer = new IndexWriter(directory, autoCommit, new WhitespaceAnalyzer(), true);
				
				//System.out.println("TEST: pass="******" ac=" + autoCommit + " cms=" + (pass >= 2));
				for (int iter = 0; iter < 10; iter++)
				{
					//System.out.println("TEST: iter=" + iter);
					MergeScheduler ms;
					if (pass >= 2)
						ms = new ConcurrentMergeScheduler();
					else
						ms = new SerialMergeScheduler();
					
					writer.SetMergeScheduler(ms);
					writer.SetMaxBufferedDocs(2);
					writer.SetMergeFactor(100);
					
					for (int j = 0; j < 199; j++)
					{
						idField.SetValue(System.Convert.ToString(iter * 201 + j));
						writer.AddDocument(doc);
					}
					
					int delID = iter * 199;
					for (int j = 0; j < 20; j++)
					{
						writer.DeleteDocuments(new Term("id", System.Convert.ToString(delID)));
						delID += 5;
					}
					
					// Force a bunch of merge threads to kick off so we
					// stress out aborting them on close:
					writer.SetMergeFactor(2);
					
					IndexWriter finalWriter = writer;
					System.Collections.ArrayList failure = new System.Collections.ArrayList();
					SupportClass.ThreadClass t1 = new AnonymousClassThread1(finalWriter, doc, failure, this);
					
					if (failure.Count > 0)
					{
						throw (System.Exception) failure[0];
					}
					
					t1.Start();
					
					writer.Close(false);
					t1.Join();
					
					// Make sure reader can read
					IndexReader reader = IndexReader.Open(directory);
					reader.Close();
					
					// Reopen
					writer = new IndexWriter(directory, autoCommit, new WhitespaceAnalyzer(), false);
				}
				writer.Close();
			}
			
			directory.Close();
		}