Inheritance: RAMDirectory
コード例 #1
1
ファイル: TestRollback.cs プロジェクト: Mpdreamz/lucene.net
        public void TestRollbackIntegrityWithBufferFlush()
        {
            Directory dir = new MockRAMDirectory();
            IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            for (int i = 0; i < 5; i++)
            {
                Document doc = new Document();
                doc.Add(new Field("pk", i.ToString(), Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
                w.AddDocument(doc);
            }
            w.Close();

            // If buffer size is small enough to cause a flush, errors ensue...
            w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            w.SetMaxBufferedDocs(2);

            Term pkTerm = new Term("pk", "");
            for (int i = 0; i < 3; i++)
            {
                Document doc = new Document();
                String value = i.ToString();
                doc.Add(new Field("pk", value, Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
                doc.Add(new Field("text", "foo", Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
                w.UpdateDocument(pkTerm.CreateTerm(value), doc);
            }
            w.Rollback();

            IndexReader r = IndexReader.Open(dir, true);
            Assert.AreEqual(5, r.NumDocs(), "index should contain same number of docs post rollback");
            r.Close();
            dir.Close();
        }
コード例 #2
0
        public virtual void  TestCompressionTools()
        {
            IFieldable binaryFldCompressed = new Field("binaryCompressed", CompressionTools.Compress(System.Text.UTF8Encoding.UTF8.GetBytes(binaryValCompressed)), Field.Store.YES);
            IFieldable stringFldCompressed = new Field("stringCompressed", CompressionTools.CompressString(binaryValCompressed), Field.Store.YES);

            Document doc = new Document();

            doc.Add(binaryFldCompressed);
            doc.Add(stringFldCompressed);

            /* add the doc to a ram index */
            MockRAMDirectory dir    = new MockRAMDirectory();
            IndexWriter      writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED, null);

            writer.AddDocument(doc, null);
            writer.Close();

            /* open a reader and fetch the document */
            IndexReader reader        = IndexReader.Open((Directory)dir, false, null);
            Document    docFromReader = reader.Document(0, null);

            Assert.IsTrue(docFromReader != null);

            /* fetch the binary compressed field and compare it's content with the original one */
            System.String binaryFldCompressedTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(CompressionTools.Decompress(docFromReader.GetBinaryValue("binaryCompressed", null))));
            Assert.IsTrue(binaryFldCompressedTest.Equals(binaryValCompressed));
            Assert.IsTrue(CompressionTools.DecompressString(docFromReader.GetBinaryValue("stringCompressed", null)).Equals(binaryValCompressed));

            reader.Close();
            dir.Close();
        }
コード例 #3
0
        public virtual void TestNoExtraFiles()
        {
            RAMDirectory directory = new MockRAMDirectory();
            IndexWriter  writer    = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED, null);

            for (int iter = 0; iter < 7; iter++)
            {
                ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
                writer.SetMergeScheduler(cms, null);
                writer.SetMaxBufferedDocs(2);

                for (int j = 0; j < 21; j++)
                {
                    Document doc = new Document();
                    doc.Add(new Field("content", "a b c", Field.Store.NO, Field.Index.ANALYZED));
                    writer.AddDocument(doc, null);
                }

                writer.Close();
                TestIndexWriter.AssertNoUnreferencedFiles(directory, "testNoExtraFiles");
                // Reopen
                writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED, null);
            }
            writer.Close();
            directory.Close();
        }
コード例 #4
0
ファイル: TestRAMDirectory.cs プロジェクト: stgwilli/ravendb
        public virtual void  TestRAMDirectorySize()
        {
            MockRAMDirectory ramDir = new MockRAMDirectory(indexDir.FullName);
            IndexWriter      writer = new IndexWriter(ramDir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);

            writer.Optimize();

            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());

            SupportClass.ThreadClass[] threads = new SupportClass.ThreadClass[numThreads];
            for (int i = 0; i < numThreads; i++)
            {
                int num = i;
                threads[i] = new AnonymousClassThread(num, writer, ramDir, this);
            }
            for (int i = 0; i < numThreads; i++)
            {
                threads[i].Start();
            }
            for (int i = 0; i < numThreads; i++)
            {
                threads[i].Join();
            }

            writer.Optimize();
            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());

            writer.Close();
        }
コード例 #5
0
        public virtual void  TestNoPrxFile()
        {
            Directory   ram      = new MockRAMDirectory();
            Analyzer    analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
            IndexWriter writer   = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED, null);

            writer.SetMaxBufferedDocs(3);
            writer.MergeFactor     = 2;
            writer.UseCompoundFile = false;
            Document d = new Document();

            Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);

            f1.OmitTermFreqAndPositions = true;
            d.Add(f1);

            for (int i = 0; i < 30; i++)
            {
                writer.AddDocument(d, null);
            }

            writer.Commit(null);

            AssertNoPrx(ram);

            // force merge
            writer.Optimize(null);
            // flush
            writer.Close();

            AssertNoPrx(ram);
            _TestUtil.CheckIndex(ram);
            ram.Close();
        }
コード例 #6
0
		public virtual void  TestSorting()
		{
			Directory directory = new MockRAMDirectory();
			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			writer.SetMaxBufferedDocs(2);
			writer.SetMergeFactor(1000);
			writer.AddDocument(Adoc(new System.String[]{"id", "a", "title", "ipod", "str_s", "a"}));
			writer.AddDocument(Adoc(new System.String[]{"id", "b", "title", "ipod ipod", "str_s", "b"}));
			writer.AddDocument(Adoc(new System.String[]{"id", "c", "title", "ipod ipod ipod", "str_s", "c"}));
			writer.AddDocument(Adoc(new System.String[]{"id", "x", "title", "boosted", "str_s", "x"}));
			writer.AddDocument(Adoc(new System.String[]{"id", "y", "title", "boosted boosted", "str_s", "y"}));
			writer.AddDocument(Adoc(new System.String[]{"id", "z", "title", "boosted boosted boosted", "str_s", "z"}));
			
			IndexReader r = writer.GetReader();
			writer.Close();
			
			IndexSearcher searcher = new IndexSearcher(r);
			
			RunTest(searcher, true);
			RunTest(searcher, false);
			
			searcher.Close();
			r.Close();
			directory.Close();
		}
コード例 #7
0
ファイル: TestRAMDirectory.cs プロジェクト: stgwilli/ravendb
        public virtual void  TestRAMDirectoryString()
        {
            MockRAMDirectory ramDir = new MockRAMDirectory(indexDir.FullName);

            // Check size
            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());

            // open reader to test document count
            IndexReader reader = IndexReader.Open(ramDir);

            Assert.AreEqual(docsToAdd, reader.NumDocs());

            // open search zo check if all doc's are there
            IndexSearcher searcher = new IndexSearcher(reader);

            // search for all documents
            for (int i = 0; i < docsToAdd; i++)
            {
                Document doc = searcher.Doc(i);
                Assert.IsTrue(doc.GetField("content") != null);
            }

            // cleanup
            reader.Close();
            searcher.Close();
        }
コード例 #8
0
		public virtual void  TestMultiValueSource()
		{
			Directory dir = new MockRAMDirectory();
			IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
			Document doc = new Document();
			Field f = new Field("field", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(f);
			
			for (int i = 0; i < 17; i++)
			{
				f.SetValue("" + i);
				w.AddDocument(doc);
				w.Commit();
			}
			
			IndexReader r = w.GetReader();
			w.Close();
			
			Assert.IsTrue(r.GetSequentialSubReaders().Length > 1);
			
			ValueSource s1 = new IntFieldSource("field");
			DocValues v1 = s1.GetValues(r);
			DocValues v2 = new MultiValueSource(s1).GetValues(r);
			
			for (int i = 0; i < r.MaxDoc(); i++)
			{
				Assert.AreEqual(v1.IntVal(i), i);
				Assert.AreEqual(v2.IntVal(i), i);
			}
			
			Lucene.Net.Search.FieldCache_Fields.DEFAULT.PurgeAllCaches();
			
			r.Close();
			dir.Close();
		}
コード例 #9
0
		public virtual void  TestRandom()
		{
			Directory dir1 = new MockRAMDirectory();
			Directory dir2 = new MockRAMDirectory();
            System.Collections.IDictionary docs = IndexRandom(10, 100, 100, dir1);
			IndexSerial(docs, dir2);
			VerifyEquals(dir1, dir2, "id");
		}
コード例 #10
0
        public virtual void  TestMixedMerge()
        {
            Directory   ram      = new MockRAMDirectory();
            Analyzer    analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
            IndexWriter writer   = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED, null);

            writer.SetMaxBufferedDocs(3);
            writer.MergeFactor = 2;
            Document d = new Document();

            // this field will have Tf
            Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);

            d.Add(f1);

            // this field will NOT have Tf
            Field f2 = new Field("f2", "This field has NO Tf in all docs", Field.Store.NO, Field.Index.ANALYZED);

            f2.OmitTermFreqAndPositions = true;
            d.Add(f2);

            for (int i = 0; i < 30; i++)
            {
                writer.AddDocument(d, null);
            }

            // now we add another document which has term freq for field f2 and not for f1 and verify if the SegmentMerger
            // keep things constant
            d = new Document();

            // Reverese
            f1.OmitTermFreqAndPositions = true;
            d.Add(f1);

            f2.OmitTermFreqAndPositions = false;
            d.Add(f2);

            for (int i = 0; i < 30; i++)
            {
                writer.AddDocument(d, null);
            }

            // force merge
            writer.Optimize(null);
            // flush
            writer.Close();

            _TestUtil.CheckIndex(ram);

            SegmentReader reader = SegmentReader.GetOnlySegmentReader(ram, null);
            FieldInfos    fi     = reader.FieldInfos();

            Assert.IsTrue(fi.FieldInfo("f1").omitTermFreqAndPositions_ForNUnit, "OmitTermFreqAndPositions field bit should be set.");
            Assert.IsTrue(fi.FieldInfo("f2").omitTermFreqAndPositions_ForNUnit, "OmitTermFreqAndPositions field bit should be set.");

            reader.Close();
            ram.Close();
        }
コード例 #11
0
ファイル: TestCrash.cs プロジェクト: ravendb/lucenenet
        private void  Crash(IndexWriter writer)
        {
            MockRAMDirectory         dir = (MockRAMDirectory)writer.Directory;
            ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler)writer.MergeScheduler;

            dir.Crash();
            cms.Sync();
            dir.ClearCrash();
        }
コード例 #12
0
        public virtual void TestFlushExceptions()
        {
            MockRAMDirectory directory = new MockRAMDirectory();
            FailOnlyOnFlush  failure   = new FailOnlyOnFlush();

            directory.FailOn(failure);

            IndexWriter writer           = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED, null);
            ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();

            writer.SetMergeScheduler(cms, null);
            writer.SetMaxBufferedDocs(2);
            Document doc     = new Document();
            Field    idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

            doc.Add(idField);
            int extraCount = 0;

            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 20; j++)
                {
                    idField.SetValue(System.Convert.ToString(i * 20 + j));
                    writer.AddDocument(doc, null);
                }

                while (true)
                {
                    // must cycle here because sometimes the merge flushes
                    // the doc we just added and so there's nothing to
                    // flush, and we don't hit the exception
                    writer.AddDocument(doc, null);
                    failure.SetDoFail();
                    try
                    {
                        writer.Flush(true, false, true, null);
                        if (failure.hitExc)
                        {
                            Assert.Fail("failed to hit IOException");
                        }
                        extraCount++;
                    }
                    catch (System.IO.IOException ioe)
                    {
                        failure.ClearDoFail();
                        break;
                    }
                }
            }

            writer.Close();
            IndexReader reader = IndexReader.Open((Directory)directory, true, null);

            Assert.AreEqual(200 + extraCount, reader.NumDocs());
            reader.Close();
            directory.Close();
        }
コード例 #13
0
ファイル: TestCrash.cs プロジェクト: ravendb/lucenenet
        public virtual void  TestCrashWhileIndexing()
        {
            IndexWriter      writer = InitIndex();
            MockRAMDirectory dir    = (MockRAMDirectory)writer.Directory;

            Crash(writer);
            IndexReader reader = IndexReader.Open((Directory)dir, true, null);

            Assert.IsTrue(reader.NumDocs() < 157);
        }
コード例 #14
0
        public virtual void  TestFilterIndexReader_Renamed()
        {
            RAMDirectory directory = new MockRAMDirectory();
            IndexWriter  writer    = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            Document d1 = new Document();

            d1.Add(new Field("default", "one two", Field.Store.YES, Field.Index.ANALYZED));
            writer.AddDocument(d1, null);

            Document d2 = new Document();

            d2.Add(new Field("default", "one three", Field.Store.YES, Field.Index.ANALYZED));
            writer.AddDocument(d2, null);

            Document d3 = new Document();

            d3.Add(new Field("default", "two four", Field.Store.YES, Field.Index.ANALYZED));
            writer.AddDocument(d3, null);

            writer.Close();

            IndexReader reader = new TestReader(IndexReader.Open((Directory)directory, true, null));

            Assert.IsTrue(reader.IsOptimized());

            TermEnum terms = reader.Terms(null);

            while (terms.Next(null))
            {
                Assert.IsTrue(terms.Term.Text.IndexOf('e') != -1);
            }
            terms.Close();

            TermPositions positions = reader.TermPositions(new Term("default", "one"), null);

            while (positions.Next(null))
            {
                Assert.IsTrue((positions.Doc % 2) == 1);
            }

            int NUM_DOCS = 3;

            TermDocs td = reader.TermDocs(null);

            for (int i = 0; i < NUM_DOCS; i++)
            {
                Assert.IsTrue(td.Next(null));
                Assert.AreEqual(i, td.Doc);
                Assert.AreEqual(1, td.Freq);
            }
            td.Close();
            reader.Close();
            directory.Close();
        }
コード例 #15
0
        public virtual void  TestNormsRefCounting()
        {
            Directory dir1 = new MockRAMDirectory();

            TestIndexReaderReopen.CreateIndex(dir1, false);
            IndexReader reader1 = IndexReader.Open(dir1, false);

            IndexReader   reader2C        = (IndexReader)reader1.Clone();
            SegmentReader segmentReader2C = SegmentReader.GetOnlySegmentReader(reader2C);

            segmentReader2C.Norms("field1"); // load the norms for the field
            Norm reader2CNorm = segmentReader2C.norms_ForNUnit["field1"];

            Assert.IsTrue(reader2CNorm.BytesRef().RefCount() == 2, "reader2CNorm.bytesRef()=" + reader2CNorm.BytesRef());



            IndexReader   reader3C        = (IndexReader)reader2C.Clone();
            SegmentReader segmentReader3C = SegmentReader.GetOnlySegmentReader(reader3C);
            Norm          reader3CCNorm   = segmentReader3C.norms_ForNUnit["field1"];

            Assert.AreEqual(3, reader3CCNorm.BytesRef().RefCount());

            // edit a norm and the refcount should be 1
            IndexReader   reader4C        = (IndexReader)reader3C.Clone();
            SegmentReader segmentReader4C = SegmentReader.GetOnlySegmentReader(reader4C);

            Assert.AreEqual(4, reader3CCNorm.BytesRef().RefCount());
            reader4C.SetNorm(5, "field1", 0.33f);

            // generate a cannot update exception in reader1
            Assert.Throws <LockObtainFailedException>(() => reader3C.SetNorm(1, "field1", 0.99f), "did not hit expected exception");

            // norm values should be different
            Assert.IsTrue(Similarity.DecodeNorm(segmentReader3C.Norms("field1")[5]) != Similarity.DecodeNorm(segmentReader4C.Norms("field1")[5]));
            Norm reader4CCNorm = segmentReader4C.norms_ForNUnit["field1"];

            Assert.AreEqual(3, reader3CCNorm.BytesRef().RefCount());
            Assert.AreEqual(1, reader4CCNorm.BytesRef().RefCount());

            IndexReader   reader5C        = (IndexReader)reader4C.Clone();
            SegmentReader segmentReader5C = SegmentReader.GetOnlySegmentReader(reader5C);
            Norm          reader5CCNorm   = segmentReader5C.norms_ForNUnit["field1"];

            reader5C.SetNorm(5, "field1", 0.7f);
            Assert.AreEqual(1, reader5CCNorm.BytesRef().RefCount());

            reader5C.Close();
            reader4C.Close();
            reader3C.Close();
            reader2C.Close();
            reader1.Close();
            dir1.Close();
        }
コード例 #16
0
        public virtual void TestNoWaitClose()
        {
            RAMDirectory directory = new MockRAMDirectory();

            Document doc     = new Document();
            Field    idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

            doc.Add(idField);

            IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED, null);

            for (int iter = 0; iter < 10; iter++)
            {
                ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
                writer.SetMergeScheduler(cms, null);
                writer.SetMaxBufferedDocs(2);
                writer.MergeFactor = 100;

                for (int j = 0; j < 201; j++)
                {
                    idField.SetValue(System.Convert.ToString(iter * 201 + j));
                    writer.AddDocument(doc, null);
                }

                int delID = iter * 201;
                for (int j = 0; j < 20; j++)
                {
                    writer.DeleteDocuments(null, new Term("id", delID.ToString()));
                    delID += 5;
                }

                // Force a bunch of merge threads to kick off so we
                // stress out aborting them on close:
                writer.MergeFactor = 3;
                writer.AddDocument(doc, null);
                writer.Commit(null);

                writer.Close(false);

                IndexReader reader = IndexReader.Open((Directory)directory, true, null);
                Assert.AreEqual((1 + iter) * 182, reader.NumDocs());
                reader.Close();

                // Reopen
                writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED, null);
            }
            writer.Close();

            directory.Close();
        }
コード例 #17
0
ファイル: TestCrash.cs プロジェクト: ravendb/lucenenet
        public virtual void  TestWriterAfterCrash()
        {
            IndexWriter      writer = InitIndex();
            MockRAMDirectory dir    = (MockRAMDirectory)writer.Directory;

            dir.SetPreventDoubleWrite(false);
            Crash(writer);
            writer = InitIndex(dir);
            writer.Close();

            IndexReader reader = IndexReader.Open((Directory)dir, false, null);

            Assert.IsTrue(reader.NumDocs() < 314);
        }
コード例 #18
0
            public override /*virtual*/ void  Eval(MockRAMDirectory dir)
            {
                System.Diagnostics.StackTrace trace = StackTraceHelper.Create();
                var frames = trace.GetFrames();

                for (int i = 0; i < frames.Length; i++)
                {
                    System.Diagnostics.StackFrame sf = frames[i];
                    if ("DoMerge".Equals(sf.GetMethod().Name))
                    {
                        throw new System.IO.IOException("now failing during merge");
                    }
                }
            }
コード例 #19
0
        public virtual void  TestDeleteMerging()
        {
            RAMDirectory directory = new MockRAMDirectory();

            IndexWriter writer           = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED, null);
            ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();

            writer.SetMergeScheduler(cms, null);

            LogDocMergePolicy mp = new LogDocMergePolicy(writer);

            writer.SetMergePolicy(mp);

            // Force degenerate merging so we can get a mix of
            // merging of segments with and without deletes at the
            // start:
            mp.MinMergeDocs = 1000;

            Document doc     = new Document();
            Field    idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

            doc.Add(idField);
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 100; j++)
                {
                    idField.SetValue(System.Convert.ToString(i * 100 + j));
                    writer.AddDocument(doc, null);
                }

                int delID = i;
                while (delID < 100 * (1 + i))
                {
                    writer.DeleteDocuments(null, new Term("id", "" + delID));
                    delID += 10;
                }

                writer.Commit(null);
            }

            writer.Close();
            IndexReader reader = IndexReader.Open((Directory)directory, true, null);

            // Verify that we did not lose any deletes...
            Assert.AreEqual(450, reader.NumDocs());
            reader.Close();
            directory.Close();
        }
コード例 #20
0
        public virtual void  TestBinaryFieldInIndex()
        {
            IFieldable binaryFldStored = new Field("binaryStored", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValStored), Field.Store.YES);
            IFieldable stringFldStored = new Field("stringStored", binaryValStored, Field.Store.YES, Field.Index.NO, Field.TermVector.NO);

            // binary fields with store off are not allowed
            Assert.Throws <ArgumentException>(
                () => new Field("fail", System.Text.Encoding.UTF8.GetBytes(binaryValStored), Field.Store.NO));

            Document doc = new Document();

            doc.Add(binaryFldStored);

            doc.Add(stringFldStored);

            /* test for field count */
            Assert.AreEqual(2, doc.fields_ForNUnit.Count);

            /* add the doc to a ram index */
            MockRAMDirectory dir    = new MockRAMDirectory();
            IndexWriter      writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED, null);

            writer.AddDocument(doc, null);
            writer.Close();

            /* open a reader and fetch the document */
            IndexReader reader        = IndexReader.Open((Directory)dir, false, null);
            Document    docFromReader = reader.Document(0, null);

            Assert.IsTrue(docFromReader != null);

            /* fetch the binary stored field and compare it's content with the original one */
            System.String binaryFldStoredTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(docFromReader.GetBinaryValue("binaryStored", null)));
            Assert.IsTrue(binaryFldStoredTest.Equals(binaryValStored));

            /* fetch the string field and compare it's content with the original one */
            System.String stringFldStoredTest = docFromReader.Get("stringStored", null);
            Assert.IsTrue(stringFldStoredTest.Equals(binaryValStored));

            /* delete the document from index */
            reader.DeleteDocument(0, null);
            Assert.AreEqual(0, reader.NumDocs());

            reader.Close();
            dir.Close();
        }
コード例 #21
0
        public virtual void  TestBasic()
        {
            HashSet <string> fileExtensions = new HashSet <string>();

            fileExtensions.Add("fdt");
            fileExtensions.Add("fdx");

            Directory    primaryDir   = new MockRAMDirectory();
            RAMDirectory secondaryDir = new MockRAMDirectory();

            FileSwitchDirectory fsd    = new FileSwitchDirectory(fileExtensions, primaryDir, secondaryDir, true);
            IndexWriter         writer = new IndexWriter(fsd, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED, null);

            writer.UseCompoundFile = false;
            TestIndexWriterReader.CreateIndexNoClose(true, "ram", writer);
            IndexReader reader = writer.GetReader(null);

            Assert.AreEqual(100, reader.MaxDoc);
            writer.Commit(null);
            // we should see only fdx,fdt files here
            System.String[] files = primaryDir.ListAll(null);
            Assert.IsTrue(files.Length > 0);
            for (int x = 0; x < files.Length; x++)
            {
                System.String ext = FileSwitchDirectory.GetExtension(files[x]);
                Assert.IsTrue(fileExtensions.Contains(ext));
            }
            files = secondaryDir.ListAll(null);
            Assert.IsTrue(files.Length > 0);
            // we should not see fdx,fdt files here
            for (int x = 0; x < files.Length; x++)
            {
                System.String ext = FileSwitchDirectory.GetExtension(files[x]);
                Assert.IsFalse(fileExtensions.Contains(ext));
            }
            reader.Close();
            writer.Close();

            files = fsd.ListAll(null);
            for (int i = 0; i < files.Length; i++)
            {
                Assert.IsNotNull(files[i]);
            }
            fsd.Close();
        }
コード例 #22
0
 public override void  Eval(MockRAMDirectory dir)
 {
     if (doFail && !(Thread.CurrentThread.Name ?? "").Contains("Merge Thread"))
     {
         System.Diagnostics.StackTrace trace = StackTraceHelper.Create();
         var frames = trace.GetFrames();
         for (int i = 0; i < frames.Length; i++)
         {
             System.Diagnostics.StackFrame sf = frames[i];
             if ("DoFlush".Equals(sf.GetMethod().Name))
             {
                 hitExc = true;
                 //Console.WriteLine(trace);
                 throw new System.IO.IOException("now failing during flush");
             }
         }
     }
 }
コード例 #23
0
ファイル: TestCrash.cs プロジェクト: ravendb/lucenenet
        public virtual void  TestCrashAfterCloseNoWait()
        {
            IndexWriter      writer = InitIndex();
            MockRAMDirectory dir    = (MockRAMDirectory)writer.Directory;

            writer.Close(false);

            dir.Crash();

            /*
             * String[] l = dir.list();
             * Arrays.sort(l);
             * for(int i=0;i<l.length;i++)
             * System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
             */
            IndexReader reader = IndexReader.Open((Directory)dir, false, null);

            Assert.AreEqual(157, reader.NumDocs());
        }
コード例 #24
0
 public virtual void  TestBasic()
 {
     HashSet<string> fileExtensions = new HashSet<string>();
     fileExtensions.Add("fdt");
     fileExtensions.Add("fdx");
     
     Directory primaryDir = new MockRAMDirectory();
     RAMDirectory secondaryDir = new MockRAMDirectory();
     
     FileSwitchDirectory fsd = new FileSwitchDirectory(fileExtensions, primaryDir, secondaryDir, true);
     IndexWriter writer = new IndexWriter(fsd, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
     writer.UseCompoundFile = false;
     TestIndexWriterReader.CreateIndexNoClose(true, "ram", writer);
     IndexReader reader = writer.GetReader();
     Assert.AreEqual(100, reader.MaxDoc);
     writer.Commit();
     // we should see only fdx,fdt files here
     System.String[] files = primaryDir.ListAll();
     Assert.IsTrue(files.Length > 0);
     for (int x = 0; x < files.Length; x++)
     {
         System.String ext = FileSwitchDirectory.GetExtension(files[x]);
         Assert.IsTrue(fileExtensions.Contains(ext));
     }
     files = secondaryDir.ListAll();
     Assert.IsTrue(files.Length > 0);
     // we should not see fdx,fdt files here
     for (int x = 0; x < files.Length; x++)
     {
         System.String ext = FileSwitchDirectory.GetExtension(files[x]);
         Assert.IsFalse(fileExtensions.Contains(ext));
     }
     reader.Close();
     writer.Close();
     
     files = fsd.ListAll();
     for (int i = 0; i < files.Length; i++)
     {
         Assert.IsNotNull(files[i]);
     }
     fsd.Close();
 }
コード例 #25
0
        public virtual void  TestNormsClose()
        {
            Directory dir1 = new MockRAMDirectory();

            TestIndexReaderReopen.CreateIndex(dir1, false);
            SegmentReader reader1 = SegmentReader.GetOnlySegmentReader(dir1);

            reader1.Norms("field1");
            Norm r1norm = reader1.norms_ForNUnit["field1"];

            SegmentReader.Ref r1BytesRef = r1norm.BytesRef();
            SegmentReader     reader2    = (SegmentReader)reader1.Clone();

            Assert.AreEqual(2, r1norm.BytesRef().RefCount());
            reader1.Close();
            Assert.AreEqual(1, r1BytesRef.RefCount());
            reader2.Norms("field1");
            reader2.Close();
            dir1.Close();
        }
コード例 #26
0
        public virtual void  TestMissingTerms()
        {
            System.String    fieldName = "field1";
            MockRAMDirectory rd        = new MockRAMDirectory();
            IndexWriter      w         = new IndexWriter(rd, new KeywordAnalyzer(), MaxFieldLength.UNLIMITED, null);

            for (int i = 0; i < 100; i++)
            {
                Document doc  = new Document();
                int      term = i * 10;            //terms are units of 10;
                doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.NOT_ANALYZED));
                w.AddDocument(doc, null);
            }
            w.Close();

            IndexReader   reader   = IndexReader.Open((Directory)rd, true, null);
            IndexSearcher searcher = new IndexSearcher(reader);
            int           numDocs  = reader.NumDocs();

            ScoreDoc[]        results;
            MatchAllDocsQuery q = new MatchAllDocsQuery();

            System.Collections.ArrayList terms = new System.Collections.ArrayList();
            terms.Add("5");
            results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, (System.String[])terms.ToArray(typeof(System.String))), numDocs, null).ScoreDocs;
            Assert.AreEqual(0, results.Length, "Must match nothing");

            terms = new System.Collections.ArrayList();
            terms.Add("10");
            results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, (System.String[])terms.ToArray(typeof(System.String))), numDocs, null).ScoreDocs;
            Assert.AreEqual(1, results.Length, "Must match 1");

            terms = new System.Collections.ArrayList();
            terms.Add("10");
            terms.Add("20");
            results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, (System.String[])terms.ToArray(typeof(System.String))), numDocs, null).ScoreDocs;
            Assert.AreEqual(2, results.Length, "Must match 2");

            reader.Close();
            rd.Close();
        }
コード例 #27
0
ファイル: TestCrash.cs プロジェクト: ravendb/lucenenet
        private IndexWriter InitIndex(MockRAMDirectory dir)
        {
            dir.SetLockFactory(NoLockFactory.Instance);

            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);

            //writer.setMaxBufferedDocs(2);
            writer.SetMaxBufferedDocs(10);
            ((ConcurrentMergeScheduler)writer.MergeScheduler).SetSuppressExceptions();

            Document doc = new Document();

            doc.Add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
            doc.Add(new Field("id", "0", Field.Store.YES, Field.Index.ANALYZED));
            for (int i = 0; i < 157; i++)
            {
                writer.AddDocument(doc, null);
            }

            return(writer);
        }
コード例 #28
0
        public virtual void  TestSubclassConcurrentMergeScheduler()
        {
            MockRAMDirectory dir = new MockRAMDirectory();

            dir.FailOn(new FailOnlyOnMerge());

            Document doc     = new Document();
            Field    idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);

            doc.Add(idField);

            IndexWriter      writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
            MyMergeScheduler ms     = new MyMergeScheduler(this);

            writer.SetMergeScheduler(ms, null);
            writer.SetMaxBufferedDocs(2);
            writer.SetRAMBufferSizeMB(Lucene.Net.Index.IndexWriter.DISABLE_AUTO_FLUSH);
            for (int i = 0; i < 20; i++)
            {
                writer.AddDocument(doc, null);
            }

            ms.Sync();
            writer.Close();

            Console.WriteLine("merge thread");
            Assert.IsTrue(mergeThreadCreated);

            Console.WriteLine("merge called");
            Assert.IsTrue(mergeCalled);

            Console.WriteLine("exec called");
            Assert.IsTrue(excCalled);
            Console.WriteLine("exec true");
            dir.Close();

            Console.WriteLine("Last");
            Assert.IsTrue(ConcurrentMergeScheduler.AnyUnhandledExceptions());
        }
コード例 #29
0
		// public TestIndexReader(System.String name)
		// {
		// }
		
		public virtual void  TestIsCurrent()
		{
			RAMDirectory d = new MockRAMDirectory();
			IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(), true);
			AddDocumentWithFields(writer);
			writer.Close();
			// set up reader:
			IndexReader reader = IndexReader.Open(d);
			Assert.IsTrue(reader.IsCurrent());
			// modify index by adding another document:
			writer = new IndexWriter(d, new StandardAnalyzer(), false);
			AddDocumentWithFields(writer);
			writer.Close();
			Assert.IsFalse(reader.IsCurrent());
			// re-create index:
			writer = new IndexWriter(d, new StandardAnalyzer(), true);
			AddDocumentWithFields(writer);
			writer.Close();
			Assert.IsFalse(reader.IsCurrent());
			reader.Close();
			d.Close();
		}
コード例 #30
0
ファイル: TestCrash.cs プロジェクト: ravendb/lucenenet
        public virtual void  TestCrashAfterReopen()
        {
            IndexWriter      writer = InitIndex();
            MockRAMDirectory dir    = (MockRAMDirectory)writer.Directory;

            writer.Close();
            writer = InitIndex(dir);
            Assert.AreEqual(314, writer.MaxDoc());
            Crash(writer);

            /*
             * System.out.println("\n\nTEST: open reader");
             * String[] l = dir.list();
             * Arrays.sort(l);
             * for(int i=0;i<l.length;i++)
             * System.out.println("file " + i + " = " + l[i] + " " +
             * dir.fileLength(l[i]) + " bytes");
             */

            IndexReader reader = IndexReader.Open((Directory)dir, false, null);

            Assert.IsTrue(reader.NumDocs() >= 157);
        }
コード例 #31
0
		public virtual void  TestMultiConfig()
		{
			// test lots of smaller different params together
			for (int i = 0; i < 100; i++)
			{
				// increase iterations for better testing
				sameFieldOrder = r.NextDouble() > 0.5;
				autoCommit = r.NextDouble() > 0.5;
				mergeFactor = r.Next(3) + 2;
				maxBufferedDocs = r.Next(3) + 2;
				seed++;
				
				int nThreads = r.Next(5) + 1;
				int iter = r.Next(10) + 1;
				int range = r.Next(20) + 1;
				
				Directory dir1 = new MockRAMDirectory();
				Directory dir2 = new MockRAMDirectory();
                System.Collections.IDictionary docs = IndexRandom(nThreads, iter, range, dir1);
				IndexSerial(docs, dir2);
				VerifyEquals(dir1, dir2, "id");
			}
		}
コード例 #32
0
		public virtual void  TestRandom()
		{
			r = NewRandom();
			Directory dir1 = new MockRAMDirectory();
			// dir1 = FSDirectory.open("foofoofoo");
			Directory dir2 = new MockRAMDirectory();
			// mergeFactor=2; maxBufferedDocs=2; Map docs = indexRandom(1, 3, 2, dir1);
			System.Collections.IDictionary docs = IndexRandom(10, 100, 100, dir1);
			IndexSerial(docs, dir2);
			
			// verifying verify
			// verifyEquals(dir1, dir1, "id");
			// verifyEquals(dir2, dir2, "id");
			
			VerifyEquals(dir1, dir2, "id");
		}
コード例 #33
0
		public virtual void  TestRandomIWReader()
		{
			this.r = NewRandom();
			Directory dir = new MockRAMDirectory();
			
			// TODO: verify equals using IW.getReader
			DocsAndWriter dw = IndexRandomIWReader(10, 100, 100, dir);
			IndexReader r = dw.writer.GetReader();
			dw.writer.Commit();
			VerifyEquals(r, dir, "id");
			r.Close();
			dw.writer.Close();
			dir.Close();
		}
コード例 #34
0
		public virtual void  TestRAMDirectorySize()
		{
			
			MockRAMDirectory ramDir = new MockRAMDirectory(indexDir.FullName);
			IndexWriter writer = new IndexWriter(ramDir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
			writer.Optimize();
			
			Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
			
			SupportClass.ThreadClass[] threads = new SupportClass.ThreadClass[numThreads];
			for (int i = 0; i < numThreads; i++)
			{
				int num = i;
				threads[i] = new AnonymousClassThread(num, writer, ramDir, this);
			}
			for (int i = 0; i < numThreads; i++)
				threads[i].Start();
			for (int i = 0; i < numThreads; i++)
				threads[i].Join();
			
			writer.Optimize();
			Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
			
			writer.Close();
		}
コード例 #35
0
		public virtual void  TestRAMDirectoryString()
		{
			
			MockRAMDirectory ramDir = new MockRAMDirectory(indexDir.FullName);
			
			// Check size
			Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
			
			// open reader to test document count
			IndexReader reader = IndexReader.Open(ramDir);
			Assert.AreEqual(docsToAdd, reader.NumDocs());
			
			// open search zo check if all doc's are there
			IndexSearcher searcher = new IndexSearcher(reader);
			
			// search for all documents
			for (int i = 0; i < docsToAdd; i++)
			{
				Document doc = searcher.Doc(i);
				Assert.IsTrue(doc.GetField("content") != null);
			}
			
			// cleanup
			reader.Close();
			searcher.Close();
		}
コード例 #36
0
		public virtual void  TestLock()
		{
			Directory dir = new MockRAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			AddDocumentWithFields(writer);
			writer.Close();
			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
			IndexReader reader = IndexReader.Open(dir);
			try
			{
				reader.DeleteDocument(0);
				Assert.Fail("expected lock");
			}
			catch (System.IO.IOException)
			{
				// expected exception
			}
			IndexReader.Unlock(dir); // this should not be done in the real world! 
			reader.DeleteDocument(0);
			reader.Close();
			writer.Close();
			dir.Close();
		}
コード例 #37
0
		public virtual void  TestBasicDelete()
		{
			Directory dir = new MockRAMDirectory();
			
			IndexWriter writer = null;
			IndexReader reader = null;
			Term searchTerm = new Term("content", "aaa");
			
			//  add 100 documents with term : aaa
			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			for (int i = 0; i < 100; i++)
			{
				AddDoc(writer, searchTerm.Text());
			}
			writer.Close();
			
			// OPEN READER AT THIS POINT - this should fix the view of the
			// index at the point of having 100 "aaa" documents and 0 "bbb"
			reader = IndexReader.Open(dir);
			Assert.AreEqual(100, reader.DocFreq(searchTerm), "first docFreq");
			AssertTermDocsCount("first reader", reader, searchTerm, 100);
			reader.Close();
			
			// DELETE DOCUMENTS CONTAINING TERM: aaa
			int deleted = 0;
			reader = IndexReader.Open(dir);
			deleted = reader.DeleteDocuments(searchTerm);
			Assert.AreEqual(100, deleted, "deleted count");
			Assert.AreEqual(100, reader.DocFreq(searchTerm), "deleted docFreq");
			AssertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
			
			// open a 2nd reader to make sure first reader can
			// commit its changes (.del) while second reader
			// is open:
			IndexReader reader2 = IndexReader.Open(dir);
			reader.Close();
			
			// CREATE A NEW READER and re-test
			reader = IndexReader.Open(dir);
			Assert.AreEqual(100, reader.DocFreq(searchTerm), "deleted docFreq");
			AssertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
			reader.Close();
			reader2.Close();
			dir.Close();
		}
コード例 #38
0
		public virtual void  TestLastModified()
		{
			Assert.IsFalse(IndexReader.IndexExists("there_is_no_such_index"));
			System.IO.FileInfo fileDir = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), "testIndex"));
			// can't do the filesystem version of this test, as a system level process lock prevents deletion of the index file
			//for (int i = 0; i < 2; i++)
			for (int i = 0; i < 1; i++)
			{
				try
				{
					Directory dir;
					if (0 == i)
						dir = new MockRAMDirectory();
					else
						dir = GetDirectory();
					Assert.IsFalse(IndexReader.IndexExists(dir));
					IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
					AddDocumentWithFields(writer);
					Assert.IsTrue(IndexReader.IsLocked(dir)); // writer open, so dir is locked
					writer.Close();
					Assert.IsTrue(IndexReader.IndexExists(dir));
					IndexReader reader = IndexReader.Open(dir);
					Assert.IsFalse(IndexReader.IsLocked(dir)); // reader only, no lock
					long version = IndexReader.LastModified(dir);
					if (i == 1)
					{
						long version2 = IndexReader.LastModified(fileDir);
						Assert.AreEqual(version, version2);
					}
					reader.Close();
					// modify index and check version has been
					// incremented:
					while (true)
					{
						try
						{
							System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 1000));
							break;
						}
						catch (System.Threading.ThreadInterruptedException)
						{
							SupportClass.ThreadClass.Current().Interrupt();
						}
					}
					
					writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
					AddDocumentWithFields(writer);
					writer.Close();
					reader = IndexReader.Open(dir);
					Assert.IsTrue(
						version <= IndexReader.LastModified(dir),
						"old lastModified is " + version + "; new lastModified is " + IndexReader.LastModified(dir)
					);
					reader.Close();
					dir.Close();
				}
				finally
				{
					if (i == 1)
						_TestUtil.RmDir(fileDir);
				}
			}
		}
コード例 #39
0
			public override void  Eval(MockRAMDirectory dir)
			{
				if (TestTransactions.doFail && Enclosing_Instance.RANDOM.Next() % 10 <= 3)
					throw new System.IO.IOException("now failing randomly but on purpose");
			}
コード例 #40
0
		/// <summary> Tests the IndexReader.getFieldNames implementation</summary>
		/// <throws>  Exception on error </throws>
		public virtual void  TestGetFieldNames()
		{
			RAMDirectory d = new MockRAMDirectory();
			// set up writer
			IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(), true);
			AddDocumentWithFields(writer);
			writer.Close();
			// set up reader
			IndexReader reader = IndexReader.Open(d);
			System.Collections.ICollection fieldNames = reader.GetFieldNames(IndexReader.FieldOption.ALL);
			Assert.IsTrue(CollectionContains(fieldNames, "keyword"));
			Assert.IsTrue(CollectionContains(fieldNames, "text"));
			Assert.IsTrue(CollectionContains(fieldNames, "unindexed"));
			Assert.IsTrue(CollectionContains(fieldNames, "unstored"));
			reader.Close();
			// add more documents
			writer = new IndexWriter(d, new StandardAnalyzer(), false);
			// want to get some more segments here
			for (int i = 0; i < 5 * writer.GetMergeFactor(); i++)
			{
				AddDocumentWithFields(writer);
			}
			// new fields are in some different segments (we hope)
			for (int i = 0; i < 5 * writer.GetMergeFactor(); i++)
			{
				AddDocumentWithDifferentFields(writer);
			}
			// new termvector fields
			for (int i = 0; i < 5 * writer.GetMergeFactor(); i++)
			{
				AddDocumentWithTermVectorFields(writer);
			}
			
			writer.Close();
			// verify fields again
			reader = IndexReader.Open(d);
			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.ALL);
			Assert.AreEqual(13, fieldNames.Count); // the following fields
			Assert.IsTrue(CollectionContains(fieldNames, "keyword"));
			Assert.IsTrue(CollectionContains(fieldNames, "text"));
			Assert.IsTrue(CollectionContains(fieldNames, "unindexed"));
			Assert.IsTrue(CollectionContains(fieldNames, "unstored"));
			Assert.IsTrue(CollectionContains(fieldNames, "keyword2"));
			Assert.IsTrue(CollectionContains(fieldNames, "text2"));
			Assert.IsTrue(CollectionContains(fieldNames, "unindexed2"));
			Assert.IsTrue(CollectionContains(fieldNames, "unstored2"));
			Assert.IsTrue(CollectionContains(fieldNames, "tvnot"));
			Assert.IsTrue(CollectionContains(fieldNames, "termvector"));
			Assert.IsTrue(CollectionContains(fieldNames, "tvposition"));
			Assert.IsTrue(CollectionContains(fieldNames, "tvoffset"));
			Assert.IsTrue(CollectionContains(fieldNames, "tvpositionoffset"));
			
			// verify that only indexed fields were returned
			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.INDEXED);
			Assert.AreEqual(11, fieldNames.Count); // 6 original + the 5 termvector fields 
			Assert.IsTrue(CollectionContains(fieldNames, "keyword"));
			Assert.IsTrue(CollectionContains(fieldNames, "text"));
			Assert.IsTrue(CollectionContains(fieldNames, "unstored"));
			Assert.IsTrue(CollectionContains(fieldNames, "keyword2"));
			Assert.IsTrue(CollectionContains(fieldNames, "text2"));
			Assert.IsTrue(CollectionContains(fieldNames, "unstored2"));
			Assert.IsTrue(CollectionContains(fieldNames, "tvnot"));
			Assert.IsTrue(CollectionContains(fieldNames, "termvector"));
			Assert.IsTrue(CollectionContains(fieldNames, "tvposition"));
			Assert.IsTrue(CollectionContains(fieldNames, "tvoffset"));
			Assert.IsTrue(CollectionContains(fieldNames, "tvpositionoffset"));
			
			// verify that only unindexed fields were returned
			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.UNINDEXED);
			Assert.AreEqual(2, fieldNames.Count); // the following fields
			Assert.IsTrue(CollectionContains(fieldNames, "unindexed"));
			Assert.IsTrue(CollectionContains(fieldNames, "unindexed2"));
			
			// verify index term vector fields  
			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.TERMVECTOR);
			Assert.AreEqual(1, fieldNames.Count); // 1 field has term vector only
			Assert.IsTrue(CollectionContains(fieldNames, "termvector"));
			
			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION);
			Assert.AreEqual(1, fieldNames.Count); // 4 fields are indexed with term vectors
			Assert.IsTrue(CollectionContains(fieldNames, "tvposition"));
			
			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_OFFSET);
			Assert.AreEqual(1, fieldNames.Count); // 4 fields are indexed with term vectors
			Assert.IsTrue(CollectionContains(fieldNames, "tvoffset"));
			
			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET);
			Assert.AreEqual(1, fieldNames.Count); // 4 fields are indexed with term vectors
			Assert.IsTrue(CollectionContains(fieldNames, "tvpositionoffset"));
			reader.Close();
			d.Close();
		}
コード例 #41
0
		public virtual void  TestNormsClose()
		{
			Directory dir1 = new MockRAMDirectory();
			TestIndexReaderReopen.CreateIndex(dir1, false);
			SegmentReader reader1 = SegmentReader.GetOnlySegmentReader(dir1);
			reader1.Norms("field1");
			Norm r1norm = reader1.norms_ForNUnit["field1"];
			SegmentReader.Ref r1BytesRef = r1norm.BytesRef();
			SegmentReader reader2 = (SegmentReader) reader1.Clone();
			Assert.AreEqual(2, r1norm.BytesRef().RefCount());
			reader1.Close();
			Assert.AreEqual(1, r1BytesRef.RefCount());
			reader2.Norms("field1");
			reader2.Close();
			dir1.Close();
		}
コード例 #42
0
		/// <summary>Construct an empty output buffer. </summary>
		public MockRAMOutputStream(MockRAMDirectory dir, RAMFile f, System.String name):base(f)
		{
			this.dir = dir;
			this.name = name;
		}
コード例 #43
0
		public virtual void  TestDiskFull()
		{
			
			bool debug = false;
			Term searchTerm = new Term("content", "aaa");
			int START_COUNT = 157;
			int END_COUNT = 144;
			
			// First build up a starting index:
			RAMDirectory startDir = new MockRAMDirectory();
			IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true);
			for (int i = 0; i < 157; i++)
			{
				Lucene.Net.Documents.Document d = new Lucene.Net.Documents.Document();
				d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
				d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.TOKENIZED));
				writer.AddDocument(d);
			}
			writer.Close();
			
			long diskUsage = startDir.SizeInBytes();
			long diskFree = diskUsage + 100;
			
			System.IO.IOException err = null;
			
			bool done = false;
			
			// Iterate w/ ever increasing free disk space:
			while (!done)
			{
				MockRAMDirectory dir = new MockRAMDirectory(startDir);
				IndexReader reader = IndexReader.Open(dir);
				
				// For each disk size, first try to commit against
				// dir that will hit random IOExceptions & disk
				// full; after, give it infinite disk space & turn
				// off random IOExceptions & retry w/ same reader:
				bool success = false;
				
				for (int x = 0; x < 2; x++)
				{
					
					double rate = 0.05;
					double diskRatio = ((double) diskFree) / diskUsage;
					long thisDiskFree;
					System.String testName;
					
					if (0 == x)
					{
						thisDiskFree = diskFree;
						if (diskRatio >= 2.0)
						{
							rate /= 2;
						}
						if (diskRatio >= 4.0)
						{
							rate /= 2;
						}
						if (diskRatio >= 6.0)
						{
							rate = 0.0;
						}
						if (debug)
						{
							System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
						}
						testName = "disk full during reader.Close() @ " + thisDiskFree + " bytes";
					}
					else
					{
						thisDiskFree = 0;
						rate = 0.0;
						if (debug)
						{
							System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
						}
						testName = "reader re-use after disk full";
					}
					
					dir.SetMaxSizeInBytes(thisDiskFree);
					dir.SetRandomIOExceptionRate(rate, diskFree);
					
					try
					{
						if (0 == x)
						{
							int docId = 12;
							for (int i = 0; i < 13; i++)
							{
								reader.DeleteDocument(docId);
								reader.SetNorm(docId, "contents", (float) 2.0);
								docId += 12;
							}
						}
						reader.Close();
						success = true;
						if (0 == x)
						{
							done = true;
						}
					}
					catch (System.IO.IOException e)
					{
						if (debug)
						{
							System.Console.Out.WriteLine("  hit IOException: " + e);
						}
						err = e;
						if (1 == x)
						{
							System.Console.Error.WriteLine(e.StackTrace);
							Assert.Fail(testName + " hit IOException after disk space was freed up");
						}
					}
					
					// Whether we succeeded or failed, check that all
					// un-referenced files were in fact deleted (ie,
					// we did not create garbage).  Just create a
					// new IndexFileDeleter, have it delete
					// unreferenced files, then verify that in fact
					// no files were deleted:
					System.String[] startFiles = dir.List();
					SegmentInfos infos = new SegmentInfos();
					infos.Read(dir);
					IndexFileDeleter d = new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
					System.String[] endFiles = dir.List();
					
					System.Array.Sort(startFiles);
					System.Array.Sort(endFiles);
					
					//for(int i=0;i<startFiles.length;i++) {
					//  System.out.println("  startFiles: " + i + ": " + startFiles[i]);
					//}
					
					if (SupportClass.Compare.CompareStringArrays(startFiles, endFiles) == false)
					{
						System.String successStr;
						if (success)
						{
							successStr = "success";
						}
						else
						{
							successStr = "IOException";
							System.Console.Error.WriteLine(err.StackTrace);
						}
						Assert.Fail("reader.Close() failed to delete unreferenced files after " + successStr + " (" + diskFree + " bytes): before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
					}
					
					// Finally, verify index is not corrupt, and, if
					// we succeeded, we see all docs changed, and if
					// we failed, we see either all docs or no docs
					// changed (transactional semantics):
					IndexReader newReader = null;
					try
					{
						newReader = IndexReader.Open(dir);
					}
					catch (System.IO.IOException e)
					{
						System.Console.Error.WriteLine(e.StackTrace);
						Assert.Fail(testName + ":exception when creating IndexReader after disk full during Close: " + e);
					}
					/*
					int result = newReader.docFreq(searchTerm);
					if (success) {
					if (result != END_COUNT) {
					fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + END_COUNT);
					}
					} else {
					// On hitting exception we still may have added
					// all docs:
					if (result != START_COUNT && result != END_COUNT) {
					err.printStackTrace();
					fail(testName + ": method did throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT + " or " + END_COUNT);
					}
					}
					*/
					
					IndexSearcher searcher = new IndexSearcher(newReader);
					Hits hits = null;
					try
					{
						hits = searcher.Search(new TermQuery(searchTerm));
					}
					catch (System.IO.IOException e)
					{
						System.Console.Error.WriteLine(e.StackTrace);
						Assert.Fail(testName + ": exception when searching: " + e);
					}
					int result2 = hits.Length();
					if (success)
					{
						if (result2 != END_COUNT)
						{
							Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
						}
					}
					else
					{
						// On hitting exception we still may have added
						// all docs:
						if (result2 != START_COUNT && result2 != END_COUNT)
						{
							System.Console.Error.WriteLine(err.StackTrace);
							Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT);
						}
					}
					
					searcher.Close();
					newReader.Close();
					
					if (result2 == END_COUNT)
					{
						break;
					}
				}
				
				dir.Close();
				
				// Try again with 10 more bytes of free space:
				diskFree += 10;
			}
			
			startDir.Close();
		}
コード例 #44
0
        public void TestEnforceDeletions()
        {
            Directory dir = new MockRAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            IndexReader reader = writer.GetReader();
            IndexSearcher searcher = new IndexSearcher(reader);

            // add a doc, refresh the reader, and check that its there
            Document doc = new Document();
            doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
            writer.AddDocument(doc);

            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1);
            Assert.AreEqual(1, docs.TotalHits, "Should find a hit...");

            SpanFilter startFilter = new SpanQueryFilter(new SpanTermQuery(new Term("id", "1")));

            // ignore deletions
            CachingSpanFilter filter = new CachingSpanFilter(startFilter, CachingWrapperFilter.DeletesMode.IGNORE);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
            ConstantScoreQuery constantScore = new ConstantScoreQuery(filter);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");


            // force cache to regenerate:
            filter = new CachingSpanFilter(startFilter, CachingWrapperFilter.DeletesMode.RECACHE);

            writer.AddDocument(doc);
            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");

            constantScore = new ConstantScoreQuery(filter);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // make sure we get a cache hit when we reopen readers
            // that had no new deletions
            IndexReader newReader = RefreshReader(reader);
            Assert.IsTrue(reader != newReader);
            reader = newReader;
            searcher = new IndexSearcher(reader);
            int missCount = filter.missCount;
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
            Assert.AreEqual(missCount, filter.missCount);

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");
        }
コード例 #45
0
		public virtual void  TestVersion()
		{
			Assert.IsFalse(IndexReader.IndexExists("there_is_no_such_index"));
			Directory dir = new MockRAMDirectory();
			Assert.IsFalse(IndexReader.IndexExists(dir));
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			AddDocumentWithFields(writer);
			Assert.IsTrue(IndexReader.IsLocked(dir)); // writer open, so dir is locked
			writer.Close();
			Assert.IsTrue(IndexReader.IndexExists(dir));
			IndexReader reader = IndexReader.Open(dir);
			Assert.IsFalse(IndexReader.IsLocked(dir)); // reader only, no lock
			long version = IndexReader.GetCurrentVersion(dir);
			reader.Close();
			// modify index and check version has been
			// incremented:
			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			AddDocumentWithFields(writer);
			writer.Close();
			reader = IndexReader.Open(dir);
			Assert.IsTrue(version < IndexReader.GetCurrentVersion(dir), "old version is " + version + "; new version is " + IndexReader.GetCurrentVersion(dir));
			reader.Close();
			dir.Close();
		}
コード例 #46
0
		public virtual void  TestUndeleteAllAfterCloseThenReopen()
		{
			Directory dir = new MockRAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			AddDocumentWithFields(writer);
			AddDocumentWithFields(writer);
			writer.Close();
			IndexReader reader = IndexReader.Open(dir);
			reader.DeleteDocument(0);
			reader.DeleteDocument(1);
			reader.Close();
			reader = IndexReader.Open(dir);
			reader.UndeleteAll();
			reader.Close();
			reader = IndexReader.Open(dir);
			Assert.AreEqual(2, reader.NumDocs()); // nothing has really been deleted thanks to undeleteAll()
			reader.Close();
			dir.Close();
		}
コード例 #47
0
		public virtual void  TestNormsRefCounting()
		{
			Directory dir1 = new MockRAMDirectory();
			TestIndexReaderReopen.CreateIndex(dir1, false);
            IndexReader reader1 = IndexReader.Open(dir1, false);
			
			IndexReader reader2C = (IndexReader) reader1.Clone();
			SegmentReader segmentReader2C = SegmentReader.GetOnlySegmentReader(reader2C);
			segmentReader2C.Norms("field1"); // load the norms for the field
			Norm reader2CNorm = segmentReader2C.norms_ForNUnit["field1"];
			Assert.IsTrue(reader2CNorm.BytesRef().RefCount() == 2, "reader2CNorm.bytesRef()=" + reader2CNorm.BytesRef());
			
			
			
			IndexReader reader3C = (IndexReader) reader2C.Clone();
			SegmentReader segmentReader3C = SegmentReader.GetOnlySegmentReader(reader3C);
			Norm reader3CCNorm = segmentReader3C.norms_ForNUnit["field1"];
			Assert.AreEqual(3, reader3CCNorm.BytesRef().RefCount());
			
			// edit a norm and the refcount should be 1
			IndexReader reader4C = (IndexReader) reader3C.Clone();
			SegmentReader segmentReader4C = SegmentReader.GetOnlySegmentReader(reader4C);
			Assert.AreEqual(4, reader3CCNorm.BytesRef().RefCount());
			reader4C.SetNorm(5, "field1", 0.33f);
			
			// generate a cannot update exception in reader1
            Assert.Throws<LockObtainFailedException>(() => reader3C.SetNorm(1, "field1", 0.99f), "did not hit expected exception");
			
			// norm values should be different 
			Assert.IsTrue(Similarity.DecodeNorm(segmentReader3C.Norms("field1")[5]) != Similarity.DecodeNorm(segmentReader4C.Norms("field1")[5]));
			Norm reader4CCNorm = segmentReader4C.norms_ForNUnit["field1"];
			Assert.AreEqual(3, reader3CCNorm.BytesRef().RefCount());
			Assert.AreEqual(1, reader4CCNorm.BytesRef().RefCount());
			
			IndexReader reader5C = (IndexReader) reader4C.Clone();
			SegmentReader segmentReader5C = SegmentReader.GetOnlySegmentReader(reader5C);
			Norm reader5CCNorm = segmentReader5C.norms_ForNUnit["field1"];
			reader5C.SetNorm(5, "field1", 0.7f);
			Assert.AreEqual(1, reader5CCNorm.BytesRef().RefCount());
			
			reader5C.Close();
			reader4C.Close();
			reader3C.Close();
			reader2C.Close();
			reader1.Close();
			dir1.Close();
		}
コード例 #48
0
 /**
  * eval is called on the first write of every new file.
  */
 public virtual void Eval(MockRAMDirectory dir)
 {
 }
コード例 #49
0
        public virtual void  TestBasic()
        {
            Directory   dir      = new MockRAMDirectory();
            Analyzer    analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
            IndexWriter writer   = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED, null);

            writer.MergeFactor = 2;
            writer.SetMaxBufferedDocs(2);
            writer.SetSimilarity(new SimpleSimilarity());


            System.Text.StringBuilder sb   = new System.Text.StringBuilder(265);
            System.String             term = "term";
            for (int i = 0; i < 30; i++)
            {
                Document d = new Document();
                sb.Append(term).Append(" ");
                System.String content = sb.ToString();
                Field         noTf    = new Field("noTf", content + (i % 2 == 0?"":" notf"), Field.Store.NO, Field.Index.ANALYZED);
                noTf.OmitTermFreqAndPositions = true;
                d.Add(noTf);

                Field tf = new Field("tf", content + (i % 2 == 0?" tf":""), Field.Store.NO, Field.Index.ANALYZED);
                d.Add(tf);

                writer.AddDocument(d, null);
                //System.out.println(d);
            }

            writer.Optimize(null);
            // flush
            writer.Close();
            _TestUtil.CheckIndex(dir);

            /*
             * Verify the index
             */
            Searcher searcher = new IndexSearcher(dir, true, null);

            searcher.Similarity = new SimpleSimilarity();

            Term      a  = new Term("noTf", term);
            Term      b  = new Term("tf", term);
            Term      c  = new Term("noTf", "notf");
            Term      d2 = new Term("tf", "tf");
            TermQuery q1 = new TermQuery(a);
            TermQuery q2 = new TermQuery(b);
            TermQuery q3 = new TermQuery(c);
            TermQuery q4 = new TermQuery(d2);


            searcher.Search(q1, new AnonymousClassCountingHitCollector(this), null);
            //System.out.println(CountingHitCollector.getCount());


            searcher.Search(q2, new AnonymousClassCountingHitCollector1(this), null);
            //System.out.println(CountingHitCollector.getCount());



            searcher.Search(q3, new AnonymousClassCountingHitCollector2(this), null);
            //System.out.println(CountingHitCollector.getCount());


            searcher.Search(q4, new AnonymousClassCountingHitCollector3(this), null);
            //System.out.println(CountingHitCollector.getCount());



            BooleanQuery bq = new BooleanQuery();

            bq.Add(q1, Occur.MUST);
            bq.Add(q4, Occur.MUST);

            searcher.Search(bq, new AnonymousClassCountingHitCollector4(this), null);
            Assert.IsTrue(15 == CountingHitCollector.GetCount());

            searcher.Close();
            dir.Close();
        }
コード例 #50
0
		public virtual void  TestTermVectors()
		{
			RAMDirectory d = new MockRAMDirectory();
			// set up writer
			IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(), true);
			// want to get some more segments here
			// new termvector fields
			for (int i = 0; i < 5 * writer.GetMergeFactor(); i++)
			{
				Document doc = new Document();
				doc.Add(new Field("tvnot", "one two two three three three", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
				doc.Add(new Field("termvector", "one two two three three three", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.YES));
				doc.Add(new Field("tvoffset", "one two two three three three", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_OFFSETS));
				doc.Add(new Field("tvposition", "one two two three three three", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS));
				doc.Add(new Field("tvpositionoffset", "one two two three three three", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
				
				writer.AddDocument(doc);
			}
			writer.Close();
			IndexReader reader = IndexReader.Open(d);
			FieldSortedTermVectorMapper mapper = new FieldSortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
			reader.GetTermFreqVector(0, mapper);
			System.Collections.IDictionary map = mapper.GetFieldToTerms();
			Assert.IsTrue(map != null, "map is null and it shouldn't be");
			Assert.IsTrue(map.Count == 4, "map Size: " + map.Count + " is not: " + 4);
			System.Collections.IDictionary set_Renamed = (System.Collections.IDictionary) map["termvector"];
			for (System.Collections.IEnumerator iterator = set_Renamed.Keys.GetEnumerator(); iterator.MoveNext(); )
			{
				TermVectorEntry entry = (TermVectorEntry) iterator.Current;
				Assert.IsTrue(entry != null, "entry is null and it shouldn't be");
				System.Console.Out.WriteLine("Entry: " + entry);
			}
		}
コード例 #51
0
ファイル: TestAtomicUpdate.cs プロジェクト: Nangal/lucene.net
 public virtual void  TestAtomicUpdates()
 {
     RANDOM = NewRandom();
     Directory directory;
     
     // First in a RAM directory:
     directory = new MockRAMDirectory();
     RunTest(directory);
     directory.Close();
     
     // Second in an FSDirectory:
     System.IO.DirectoryInfo dirPath = _TestUtil.GetTempDir("lucene.test.atomic");
     directory = FSDirectory.Open(dirPath);
     RunTest(directory);
     directory.Close();
     _TestUtil.RmDir(dirPath);
 }
コード例 #52
0
		/// <summary>Construct an empty output buffer. </summary>
		/// <throws>  IOException  </throws>
		public MockRAMInputStream(MockRAMDirectory dir, System.String name, RAMFile f):base(f)
		{
			this.name = name;
			this.dir = dir;
		}
コード例 #53
0
		public virtual void  TestTransactions_Rename()
		{
			RANDOM = NewRandom();
			MockRAMDirectory dir1 = new MockRAMDirectory();
			MockRAMDirectory dir2 = new MockRAMDirectory();
			dir1.SetPreventDoubleWrite(false);
			dir2.SetPreventDoubleWrite(false);
			dir1.FailOn(new RandomFailure(this));
			dir2.FailOn(new RandomFailure(this));
			
			InitIndex(dir1);
			InitIndex(dir2);
			
			TimedThread[] threads = new TimedThread[3];
			int numThread = 0;
			
			IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, threads);
			threads[numThread++] = indexerThread;
			indexerThread.Start();
			
			SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads);
			threads[numThread++] = searcherThread1;
			searcherThread1.Start();
			
			SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads);
			threads[numThread++] = searcherThread2;
			searcherThread2.Start();
			
			for (int i = 0; i < numThread; i++)
				threads[i].Join();
			
			for (int i = 0; i < numThread; i++)
				Assert.IsTrue(!((TimedThread) threads[i]).failed);
		}
コード例 #54
0
			/// <summary> eval is called on the first write of every new file.</summary>
			public virtual void  Eval(MockRAMDirectory dir)
			{
			}
コード例 #55
0
 /// <summary>Construct an empty output buffer. </summary>
 public MockRAMOutputStream(MockRAMDirectory dir, RAMFile f, System.String name) : base(f)
 {
     this.dir  = dir;
     this.name = name;
 }
コード例 #56
0
		public virtual void  TestWritingNormsNoReader()
		{
			Directory dir = new MockRAMDirectory();
			IndexWriter writer = null;
			IndexReader reader = null;
			Term searchTerm = new Term("content", "aaa");
			
			//  add 1 documents with term : aaa
			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			writer.SetUseCompoundFile(false);
			AddDoc(writer, searchTerm.Text());
			writer.Close();
			
			//  now open reader & set norm for doc 0 (writes to
			//  _0_1.s0)
			reader = IndexReader.Open(dir);
			reader.SetNorm(0, "content", (float) 2.0);
			reader.Close();
			
			//  now open reader again & set norm for doc 0 (writes to _0_2.s0)
			reader = IndexReader.Open(dir);
			reader.SetNorm(0, "content", (float) 2.0);
			reader.Close();
			Assert.IsFalse(dir.FileExists("_0_1.s0"), "failed to remove first generation norms file on writing second generation");
			
			dir.Close();
		}
コード例 #57
0
 /// <summary>Construct an empty output buffer. </summary>
 /// <throws>  IOException  </throws>
 public MockRAMInputStream(MockRAMDirectory dir, System.String name, RAMFile f) : base(f)
 {
     this.name = name;
     this.dir  = dir;
 }
コード例 #58
0
		/// <summary>Construct an empty output buffer. </summary>
		public MockRAMOutputStream(MockRAMDirectory dir, RAMFile f) : base(f)
		{
			this.dir = dir;
		}