public virtual void  TestErrorInDocsWriterAdd()
        {
            MockRAMDirectory.Failure failure = new AnonymousClassFailure1(this);

            // create a couple of files

            System.String[] keywords  = new System.String[] { "1", "2" };
            System.String[] unindexed = new System.String[] { "Netherlands", "Italy" };
            System.String[] unstored  = new System.String[] { "Amsterdam has lots of bridges", "Venice has lots of canals" };
            System.String[] text      = new System.String[] { "Amsterdam", "Venice" };

            for (int pass = 0; pass < 2; pass++)
            {
                bool             autoCommit = (0 == pass);
                MockRAMDirectory dir        = new MockRAMDirectory();
                IndexWriter      modifier   = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);

                dir.FailOn(failure.Reset());

                for (int i = 0; i < keywords.Length; i++)
                {
                    Document doc = new Document();
                    doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
                    doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
                    doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
                    doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
                    try
                    {
                        modifier.AddDocument(doc);
                    }
                    catch (System.IO.IOException io)
                    {
                        break;
                    }
                }

                System.String[] startFiles = dir.ListAll();
                SegmentInfos    infos      = new SegmentInfos();
                infos.Read(dir);
                new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
                System.String[] endFiles = dir.ListAll();

                if (!SupportClass.CollectionsHelper.CompareStringArrays(startFiles, endFiles))
                {
                    Assert.Fail("docswriter abort() failed to delete unreferenced files:\n  before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
                }

                modifier.Close();
            }
        }
		public virtual void  TestErrorInDocsWriterAdd()
		{
			
			MockRAMDirectory.Failure failure = new AnonymousClassFailure1(this);
			
			// create a couple of files
			
			System.String[] keywords = new System.String[]{"1", "2"};
			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
			
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				MockRAMDirectory dir = new MockRAMDirectory();
				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
				
				dir.FailOn(failure.Reset());
				
				for (int i = 0; i < keywords.Length; i++)
				{
					Document doc = new Document();
					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
					try
					{
						modifier.AddDocument(doc);
					}
					catch (System.IO.IOException io)
					{
						break;
					}
				}
				
				System.String[] startFiles = dir.ListAll();
				SegmentInfos infos = new SegmentInfos();
				infos.Read(dir);
				new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null,null);
				System.String[] endFiles = dir.ListAll();
				
				if (!SupportClass.CollectionsHelper.CompareStringArrays(startFiles, endFiles))
				{
					Assert.Fail("docswriter abort() failed to delete unreferenced files:\n  before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
				}
				
				modifier.Close();
			}
		}
Exemple #3
0
		public virtual void  TestDiskFull()
		{
			
			bool debug = false;
			Term searchTerm = new Term("content", "aaa");
			int START_COUNT = 157;
			int END_COUNT = 144;
			
			// First build up a starting index:
			RAMDirectory startDir = new MockRAMDirectory();
			IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			for (int i = 0; i < 157; i++)
			{
				Document d = new Document();
				d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
				d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
				writer.AddDocument(d);
			}
			writer.Close();
			
			long diskUsage = startDir.SizeInBytes();
			long diskFree = diskUsage + 100;
			
			System.IO.IOException err = null;
			
			bool done = false;
			
			// Iterate w/ ever increasing free disk space:
			while (!done)
			{
				MockRAMDirectory dir = new MockRAMDirectory(startDir);
				
				// If IndexReader hits disk full, it can write to
				// the same files again.
				dir.SetPreventDoubleWrite(false);
				
				IndexReader reader = IndexReader.Open(dir, false);
				
				// For each disk size, first try to commit against
				// dir that will hit random IOExceptions & disk
				// full; after, give it infinite disk space & turn
				// off random IOExceptions & retry w/ same reader:
				bool success = false;
				
				for (int x = 0; x < 2; x++)
				{
					
					double rate = 0.05;
					double diskRatio = ((double) diskFree) / diskUsage;
					long thisDiskFree;
					System.String testName;
					
					if (0 == x)
					{
						thisDiskFree = diskFree;
						if (diskRatio >= 2.0)
						{
							rate /= 2;
						}
						if (diskRatio >= 4.0)
						{
							rate /= 2;
						}
						if (diskRatio >= 6.0)
						{
							rate = 0.0;
						}
						if (debug)
						{
							System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
						}
						testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
					}
					else
					{
						thisDiskFree = 0;
						rate = 0.0;
						if (debug)
						{
							System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
						}
						testName = "reader re-use after disk full";
					}
					
					dir.SetMaxSizeInBytes(thisDiskFree);
					dir.SetRandomIOExceptionRate(rate, diskFree);
					
					try
					{
						if (0 == x)
						{
							int docId = 12;
							for (int i = 0; i < 13; i++)
							{
								reader.DeleteDocument(docId);
								reader.SetNorm(docId, "contents", (float) 2.0);
								docId += 12;
							}
						}
						reader.Close();
						success = true;
						if (0 == x)
						{
							done = true;
						}
					}
					catch (System.IO.IOException e)
					{
						if (debug)
						{
							System.Console.Out.WriteLine("  hit IOException: " + e);
							System.Console.Out.WriteLine(e.StackTrace);
						}
						err = e;
						if (1 == x)
						{
							System.Console.Error.WriteLine(e.StackTrace);
							Assert.Fail(testName + " hit IOException after disk space was freed up");
						}
					}
					
					// Whether we succeeded or failed, check that all
					// un-referenced files were in fact deleted (ie,
					// we did not create garbage).  Just create a
					// new IndexFileDeleter, have it delete
					// unreferenced files, then verify that in fact
					// no files were deleted:
					System.String[] startFiles = dir.ListAll();
					SegmentInfos infos = new SegmentInfos();
					infos.Read(dir);
					new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null,null);
					System.String[] endFiles = dir.ListAll();
					
					System.Array.Sort(startFiles);
					System.Array.Sort(endFiles);
					
					//for(int i=0;i<startFiles.length;i++) {
					//  System.out.println("  startFiles: " + i + ": " + startFiles[i]);
					//}
					
					if (!CollectionsHelper.Equals(startFiles, endFiles))
					{
						System.String successStr;
						if (success)
						{
							successStr = "success";
						}
						else
						{
							successStr = "IOException";
							System.Console.Error.WriteLine(err.StackTrace);
						}
						Assert.Fail("reader.close() failed to delete unreferenced files after " + successStr + " (" + diskFree + " bytes): before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
					}
					
					// Finally, verify index is not corrupt, and, if
					// we succeeded, we see all docs changed, and if
					// we failed, we see either all docs or no docs
					// changed (transactional semantics):
					IndexReader newReader = null;
					try
					{
						newReader = IndexReader.Open(dir, false);
					}
					catch (System.IO.IOException e)
					{
						System.Console.Error.WriteLine(e.StackTrace);
						Assert.Fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
					}
					/*
					int result = newReader.docFreq(searchTerm);
					if (success) {
					if (result != END_COUNT) {
					fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + END_COUNT);
					}
					} else {
					// On hitting exception we still may have added
					// all docs:
					if (result != START_COUNT && result != END_COUNT) {
					err.printStackTrace();
					fail(testName + ": method did throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT + " or " + END_COUNT);
					}
					}
					*/
					
					IndexSearcher searcher = new IndexSearcher(newReader);
					ScoreDoc[] hits = null;
					try
					{
						hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
					}
					catch (System.IO.IOException e)
					{
						System.Console.Error.WriteLine(e.StackTrace);
						Assert.Fail(testName + ": exception when searching: " + e);
					}
					int result2 = hits.Length;
					if (success)
					{
						if (result2 != END_COUNT)
						{
							Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
						}
					}
					else
					{
						// On hitting exception we still may have added
						// all docs:
						if (result2 != START_COUNT && result2 != END_COUNT)
						{
							System.Console.Error.WriteLine(err.StackTrace);
							Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT);
						}
					}
					
					searcher.Close();
					newReader.Close();
					
					if (result2 == END_COUNT)
					{
						break;
					}
				}
				
				dir.Close();
				
				// Try again with 10 more bytes of free space:
				diskFree += 10;
			}
			
			startDir.Close();
		}
		public virtual void  TestOptimizeTempSpaceUsage()
		{
			
			MockRAMDirectory dir = new MockRAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			for (int j = 0; j < 500; j++)
			{
				AddDocWithIndex(writer, j);
			}
			writer.Close();
			
			long startDiskUsage = 0;
			System.String[] files = dir.ListAll();
			for (int i = 0; i < files.Length; i++)
			{
				startDiskUsage += dir.FileLength(files[i]);
			}
			
			dir.ResetMaxUsedSizeInBytes();
			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
			writer.Optimize();
			writer.Close();
			long maxDiskUsage = dir.GetMaxUsedSizeInBytes();
			
			Assert.IsTrue(maxDiskUsage <= 2 * startDiskUsage, "optimized used too much temporary space: starting usage was " + startDiskUsage + " bytes; max temp usage was " + maxDiskUsage + " but should have been " + (2 * startDiskUsage) + " (= 2X starting usage)");
			dir.Close();
		}
        public void testNoUnwantedTVFiles()
        {

            Directory dir = new MockRAMDirectory();
            IndexWriter indexWriter = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            indexWriter.SetRAMBufferSizeMB(0.01);
            indexWriter.SetUseCompoundFile(false);

            String BIG = "alskjhlaksjghlaksjfhalksvjepgjioefgjnsdfjgefgjhelkgjhqewlrkhgwlekgrhwelkgjhwelkgrhwlkejg";
            BIG = BIG + BIG + BIG + BIG;

            for (int i = 0; i < 2; i++)
            {
                Document doc = new Document();
                doc.Add(new Field("id", i.ToString() + BIG, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));
                doc.Add(new Field("str", i.ToString() + BIG, Field.Store.YES, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("str2", i.ToString() + BIG, Field.Store.YES, Field.Index.ANALYZED));
                doc.Add(new Field("str3", i.ToString() + BIG, Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
                indexWriter.AddDocument(doc);
            }

            indexWriter.Close();

            _TestUtil.CheckIndex(dir);

            AssertNoUnreferencedFiles(dir, "no tv files");
            String[] files = dir.ListAll();
            for (int idx = 0; idx < files.Length; idx++)
            {
                String file = files[idx];
                Assert.IsTrue(!file.EndsWith(IndexFileNames.VECTORS_FIELDS_EXTENSION));
                Assert.IsTrue(!file.EndsWith(IndexFileNames.VECTORS_INDEX_EXTENSION));
                Assert.IsTrue(!file.EndsWith(IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
            }

            dir.Close();
        }