Exemple #1
0
        public void TestRollbackIntegrityWithBufferFlush()
        {
            Directory   dir = new MockRAMDirectory();
            IndexWriter w   = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);

            for (int i = 0; i < 5; i++)
            {
                Document doc = new Document();
                doc.Add(new Field("pk", i.ToString(), Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
                w.AddDocument(doc);
            }
            w.Close();

            // If buffer size is small enough to cause a flush, errors ensue...
            w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            w.SetMaxBufferedDocs(2);

            Term pkTerm = new Term("pk", "");

            for (int i = 0; i < 3; i++)
            {
                Document doc   = new Document();
                String   value = i.ToString();
                doc.Add(new Field("pk", value, Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
                doc.Add(new Field("text", "foo", Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
                w.UpdateDocument(pkTerm.CreateTerm(value), doc);
            }
            w.Rollback();

            IndexReader r = IndexReader.Open(dir, true);

            Assert.AreEqual(5, r.NumDocs(), "index should contain same number of docs post rollback");
            r.Close();
            dir.Close();
        }
 public override void  Eval(MockRAMDirectory dir)
 {
     if (TestTransactions.doFail && Enclosing_Instance.RANDOM.Next() % 10 <= 3)
     {
         throw new System.IO.IOException("now failing randomly but on purpose");
     }
 }
Exemple #3
0
        public virtual void  TestSorting()
        {
            Directory   directory = new MockRAMDirectory();
            IndexWriter writer    = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            writer.SetMaxBufferedDocs(2);
            writer.MergeFactor = 1000;
            writer.AddDocument(Adoc(new System.String[] { "id", "a", "title", "ipod", "str_s", "a" }), null);
            writer.AddDocument(Adoc(new System.String[] { "id", "b", "title", "ipod ipod", "str_s", "b" }), null);
            writer.AddDocument(Adoc(new System.String[] { "id", "c", "title", "ipod ipod ipod", "str_s", "c" }), null);
            writer.AddDocument(Adoc(new System.String[] { "id", "x", "title", "boosted", "str_s", "x" }), null);
            writer.AddDocument(Adoc(new System.String[] { "id", "y", "title", "boosted boosted", "str_s", "y" }), null);
            writer.AddDocument(Adoc(new System.String[] { "id", "z", "title", "boosted boosted boosted", "str_s", "z" }), null);

            IndexReader r = writer.GetReader(null);

            writer.Close();

            IndexSearcher searcher = new IndexSearcher(r);

            RunTest(searcher, true);
            RunTest(searcher, false);

            searcher.Close();
            r.Close();
            directory.Close();
        }
        public virtual void  TestRandomIWReader()
        {
            this.r = NewRandom();
            Directory dir = new MockRAMDirectory();

            // TODO: verify equals using IW.getReader
            DocsAndWriter dw = IndexRandomIWReader(10, 100, 100, dir);
            IndexReader   r  = dw.writer.GetReader();

            dw.writer.Commit();
            VerifyEquals(r, dir, "id");
            r.Close();
            dw.writer.Close();
            dir.Close();
        }
        public virtual void  TestRandom()
        {
            r = NewRandom();
            Directory dir1 = new MockRAMDirectory();
            // dir1 = FSDirectory.open("foofoofoo");
            Directory dir2 = new MockRAMDirectory();

            // mergeFactor=2; maxBufferedDocs=2; Map docs = indexRandom(1, 3, 2, dir1);
            System.Collections.IDictionary docs = IndexRandom(10, 100, 100, dir1);
            IndexSerial(docs, dir2);

            // verifying verify
            // verifyEquals(dir1, dir1, "id");
            // verifyEquals(dir2, dir2, "id");

            VerifyEquals(dir1, dir2, "id");
        }
        public virtual void  TestAtomicUpdates()
        {
            RANDOM = NewRandom();
            Directory directory;

            // First in a RAM directory:
            directory = new MockRAMDirectory();
            RunTest(directory);
            directory.Close();

            // Second in an FSDirectory:
            System.IO.DirectoryInfo dirPath = _TestUtil.GetTempDir("lucene.test.atomic");
            directory = FSDirectory.Open(dirPath);
            RunTest(directory);
            directory.Close();
            _TestUtil.RmDir(dirPath);
        }
Exemple #7
0
        public virtual void  TestAtomicUpdates()
        {
            RANDOM = NewRandom();
            Directory directory;

            // First in a RAM directory:
            directory = new MockRAMDirectory();
            RunTest(directory);
            directory.Close();

            // Second in an FSDirectory:
            System.String      tempDir = System.IO.Path.GetTempPath();
            System.IO.FileInfo dirPath = new System.IO.FileInfo(System.IO.Path.Combine(tempDir, "lucene.test.atomic"));
            directory = FSDirectory.Open(dirPath);
            RunTest(directory);
            directory.Close();
            _TestUtil.RmDir(dirPath);
        }
        public virtual void  TestStressIndexAndSearching()
        {
            RANDOM = NewRandom();

            // With ConcurrentMergeScheduler, in RAMDir
            Directory directory = new MockRAMDirectory();

            RunStressTest(directory, new ConcurrentMergeScheduler());
            directory.Close();

            // With ConcurrentMergeScheduler, in FSDir
            var dirPath = _TestUtil.GetTempDir("lucene.test.stress");

            directory = FSDirectory.Open(dirPath);
            RunStressTest(directory, new ConcurrentMergeScheduler());
            directory.Close();

            _TestUtil.RmDir(dirPath);
        }
        public virtual void  TestTransactions_Rename()
        {
            RANDOM = NewRandom();
            MockRAMDirectory dir1 = new MockRAMDirectory();
            MockRAMDirectory dir2 = new MockRAMDirectory();

            dir1.SetPreventDoubleWrite(false);
            dir2.SetPreventDoubleWrite(false);
            dir1.FailOn(new RandomFailure(this));
            dir2.FailOn(new RandomFailure(this));

            InitIndex(dir1);
            InitIndex(dir2);

            TimedThread[] threads   = new TimedThread[3];
            int           numThread = 0;

            IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, threads);

            threads[numThread++] = indexerThread;
            indexerThread.Start();

            SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads);

            threads[numThread++] = searcherThread1;
            searcherThread1.Start();

            SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads);

            threads[numThread++] = searcherThread2;
            searcherThread2.Start();

            for (int i = 0; i < numThread; i++)
            {
                threads[i].Join();
            }

            for (int i = 0; i < numThread; i++)
            {
                Assert.IsTrue(!((TimedThread)threads[i]).failed);
            }
        }
Exemple #10
0
        public virtual void  TestMultiConfig()
        {
            // test lots of smaller different params together
            r = NewRandom();
            for (int i = 0; i < 100; i++)
            {
                // increase iterations for better testing
                sameFieldOrder  = r.NextDouble() > 0.5;
                mergeFactor     = r.Next(3) + 2;
                maxBufferedDocs = r.Next(3) + 2;
                seed++;

                int       nThreads = r.Next(5) + 1;
                int       iter     = r.Next(10) + 1;
                int       range    = r.Next(20) + 1;
                Directory dir1     = new MockRAMDirectory();
                Directory dir2     = new MockRAMDirectory();
                System.Collections.IDictionary docs = IndexRandom(nThreads, iter, range, dir1);
                IndexSerial(docs, dir2);
                VerifyEquals(dir1, dir2, "id");
            }
        }
        public virtual void  TestStressIndexAndSearching()
        {
            RANDOM = NewRandom();

            // RAMDir
            Directory directory = new MockRAMDirectory();

            RunStressTest(directory, true, null);
            directory.Close();

            // FSDir
            System.IO.FileInfo dirPath = _TestUtil.GetTempDir("lucene.test.stress");
            directory = FSDirectory.Open(dirPath);
            RunStressTest(directory, true, null);
            directory.Close();

            // With ConcurrentMergeScheduler, in RAMDir
            directory = new MockRAMDirectory();
            RunStressTest(directory, true, new ConcurrentMergeScheduler());
            directory.Close();

            // With ConcurrentMergeScheduler, in FSDir
            directory = FSDirectory.Open(dirPath);
            RunStressTest(directory, true, new ConcurrentMergeScheduler());
            directory.Close();

            // With ConcurrentMergeScheduler and autoCommit=false, in RAMDir
            directory = new MockRAMDirectory();
            RunStressTest(directory, false, new ConcurrentMergeScheduler());
            directory.Close();

            // With ConcurrentMergeScheduler and autoCommit=false, in FSDir
            directory = FSDirectory.Open(dirPath);
            RunStressTest(directory, false, new ConcurrentMergeScheduler());
            directory.Close();

            _TestUtil.RmDir(dirPath);
        }
Exemple #12
0
        public virtual void  TestMultiValueSource()
        {
            Directory   dir = new MockRAMDirectory();
            IndexWriter w   = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            Document    doc = new Document();
            Field       f   = new Field("field", "", Field.Store.NO, Field.Index.NOT_ANALYZED);

            doc.Add(f);

            for (int i = 0; i < 17; i++)
            {
                f.SetValue("" + i);
                w.AddDocument(doc);
                w.Commit();
            }

            IndexReader r = w.GetReader();

            w.Close();

            Assert.IsTrue(r.GetSequentialSubReaders().Length > 1);

            ValueSource s1 = new IntFieldSource("field");
            DocValues   v1 = s1.GetValues(r);
            DocValues   v2 = new MultiValueSource(s1).GetValues(r);

            for (int i = 0; i < r.MaxDoc(); i++)
            {
                Assert.AreEqual(v1.IntVal(i), i);
                Assert.AreEqual(v2.IntVal(i), i);
            }

            Lucene.Net.Search.FieldCache_Fields.DEFAULT.PurgeAllCaches();

            r.Close();
            dir.Close();
        }
Exemple #13
0
        public void TestEnforceDeletions()
        {
            Directory     dir      = new MockRAMDirectory();
            IndexWriter   writer   = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            IndexReader   reader   = writer.GetReader();
            IndexSearcher searcher = new IndexSearcher(reader);

            // add a doc, refresh the reader, and check that its there
            Document doc = new Document();

            doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
            writer.AddDocument(doc);

            reader   = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1);

            Assert.AreEqual(1, docs.TotalHits, "Should find a hit...");

            Filter startFilter = new QueryWrapperFilter(new TermQuery(new Term("id", "1")));

            // ignore deletions
            CachingWrapperFilter filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.IGNORE);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
            ConstantScoreQuery constantScore = new ConstantScoreQuery(filter);

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader   = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");


            // force cache to regenerate:
            filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.RECACHE);

            writer.AddDocument(doc);
            reader   = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");

            constantScore = new ConstantScoreQuery(filter);
            docs          = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // make sure we get a cache hit when we reopen reader
            // that had no change to deletions
            IndexReader newReader = RefreshReader(reader);

            Assert.IsTrue(reader != newReader);
            reader   = newReader;
            searcher = new IndexSearcher(reader);
            int missCount = filter.missCount;

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
            Assert.AreEqual(missCount, filter.missCount);

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader   = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            missCount = filter.missCount;
            docs      = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(missCount + 1, filter.missCount);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");


            // apply deletions dynamically
            filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.DYNAMIC);

            writer.AddDocument(doc);
            reader   = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
            constantScore = new ConstantScoreQuery(filter);
            docs          = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader   = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");

            missCount = filter.missCount;
            docs      = searcher.Search(constantScore, 1);
            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");

            // doesn't count as a miss
            Assert.AreEqual(missCount, filter.missCount);
        }