An IndexReader which reads indexes with multiple segments.
Inheritance: IndexReader
		public override void SetUp()
		{
			base.SetUp();
			dir = NewDirectory();
			var iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
			var doc = new Document
			{
			    NewStringField("id", "1", Field.Store.YES),
			    NewTextField("body", "some contents and more contents", Field.Store.NO),
			    new NumericDocValuesField("popularity", 5)
			};
		    iw.AddDocument(doc);
			doc = new Document
			{
			    NewStringField("id", "2", Field.Store.YES),
			    NewTextField("body", "another document with different contents", Field.Store
			        .NO),
			    new NumericDocValuesField("popularity", 20)
			};
		    iw.AddDocument(doc);
			doc = new Document
			{
			    NewStringField("id", "3", Field.Store.YES),
			    NewTextField("body", "crappy contents", Field.Store.NO),
			    new NumericDocValuesField("popularity", 2)
			};
		    iw.AddDocument(doc);
			reader = iw.Reader;
			searcher = new IndexSearcher(reader);
			iw.Dispose();
		}
 public override void SetUp()
 {
     base.SetUp();
     dir = NewDirectory();
     IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer
         (Random()));
     iwc.SetMergePolicy(NewLogMergePolicy());
     var iw = new RandomIndexWriter(Random(), dir, iwc);
     var doc = new Document
     {
         NewStringField("id", "1", Field.Store.YES),
         NewTextField("body", "some contents and more contents", Field.Store.NO),
         new NumericDocValuesField("popularity", 5)
     };
     iw.AddDocument(doc);
     doc = new Document
     {
         NewStringField("id", "2", Field.Store.YES),
         NewTextField("body", "another document with different contents", Field.Store
             .NO),
         new NumericDocValuesField("popularity", 20)
     };
     iw.AddDocument(doc);
     doc = new Document
     {
         NewStringField("id", "3", Field.Store.YES),
         NewTextField("body", "crappy contents", Field.Store.NO),
         new NumericDocValuesField("popularity", 2)
     };
     iw.AddDocument(doc);
     iw.ForceMerge(1);
     reader = iw.Reader;
     iw.Dispose();
 }
Ejemplo n.º 3
0
        protected SpatialContext ctx;//subclass must initialize

        public override void SetUp()
        {
            base.SetUp();

            directory = NewDirectory();
            Random random = Random();
            indexWriter = new RandomIndexWriter(random, directory, newIndexWriterConfig(random));
            indexReader = indexWriter.Reader;
            indexSearcher = NewSearcher(indexReader);
        }
Ejemplo n.º 4
0
		public override void TearDown()
		{
			if (indexWriter != null)
			{
				indexWriter.Dispose();
				indexWriter = null;
			}
			if (indexReader != null)
			{
				indexReader.Dispose();
				indexReader = null;
			}
			if (directory != null)
			{
				directory.Dispose();
				directory = null;
			}
			base.TearDown();
		}
Ejemplo n.º 5
0
 public override void TearDown()
 {
     if (indexWriter != null)
     {
         indexWriter.Dispose();
         indexWriter = null;
     }
     if (indexReader != null)
     {
         indexReader.Dispose();
         indexReader = null;
     }
     if (directory != null)
     {
         directory.Dispose();
         directory = null;
     }
     CompatibilityExtensions.PurgeSpatialCaches(null);
     base.TearDown();
 }
Ejemplo n.º 6
0
 public override void SetUp()
 {
     base.SetUp();
     Dir = NewDirectory();
     Iw = new RandomIndexWriter(Random(), Dir);
     Document doc = new Document();
     Field idField = new StringField("id", "", Field.Store.NO);
     doc.Add(idField);
     // add 500 docs with id 0..499
     for (int i = 0; i < 500; i++)
     {
         idField.StringValue = Convert.ToString(i);
         Iw.AddDocument(doc);
     }
     // delete 20 of them
     for (int i = 0; i < 20; i++)
     {
         Iw.DeleteDocuments(new Term("id", Convert.ToString(Random().Next(Iw.MaxDoc()))));
     }
     Ir = Iw.Reader;
     @is = NewSearcher(Ir);
 }
Ejemplo n.º 7
0
 public override void SetUp()
 {
     base.SetUp();
     dir = NewDirectory();
     var iw = new RandomIndexWriter(Random(), dir);
     var doc = new Document
     {
         NewStringField("id", "1", Field.Store.YES),
         NewTextField("body", "some contents and more contents", Field.Store.NO),
         new NumericDocValuesField("popularity", 5),
         new DoubleField("latitude", 40.759011, Field.Store.NO),
         new DoubleField("longitude", -73.9844722, Field.Store.NO)
     };
     iw.AddDocument(doc);
     doc = new Document
     {
         NewStringField("id", "2", Field.Store.YES),
         NewTextField("body", "another document with different contents", Field.Store
             .NO),
         new NumericDocValuesField("popularity", 20),
         new DoubleField("latitude", 40.718266, Field.Store.NO),
         new DoubleField("longitude", -74.007819, Field.Store.NO)
     };
     iw.AddDocument(doc);
     doc = new Document
     {
         NewStringField("id", "3", Field.Store.YES),
         NewTextField("body", "crappy contents", Field.Store.NO),
         new NumericDocValuesField("popularity", 2),
         new DoubleField("latitude", 40.7051157, Field.Store.NO),
         new DoubleField("longitude", -74.0088305, Field.Store.NO)
     };
     iw.AddDocument(doc);
     reader = iw.Reader;
     searcher = new IndexSearcher(reader);
     iw.Dispose();
 }
Ejemplo n.º 8
0
        public virtual void Test2BTerms_Mem([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")] Func <IConcurrentMergeScheduler> newScheduler)
        {
            if ("Lucene3x".Equals(Codec.Default.Name, StringComparison.Ordinal))
            {
                throw new Exception("this test cannot run with PreFlex codec");
            }
            Console.WriteLine("Starting Test2B");
            long TERM_COUNT = ((long)int.MaxValue) + 100000000;

            int TERMS_PER_DOC = TestUtil.NextInt32(Random, 100000, 1000000);

            IList <BytesRef> savedTerms = null;

            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BTerms"));

            //MockDirectoryWrapper dir = NewFSDirectory(new File("/p/lucene/indices/2bindex"));
            if (dir is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
            }
            dir.CheckIndexOnDispose = false; // don't double-checkindex

            if (true)
            {
                IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                                                .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                                                .SetRAMBufferSizeMB(256.0)
                                                .SetMergeScheduler(newScheduler())
                                                .SetMergePolicy(NewLogMergePolicy(false, 10))
                                                .SetOpenMode(OpenMode.CREATE));

                MergePolicy mp = w.Config.MergePolicy;
                if (mp is LogByteSizeMergePolicy)
                {
                    // 1 petabyte:
                    ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
                }

                Documents.Document doc = new Documents.Document();
                MyTokenStream      ts  = new MyTokenStream(Random, TERMS_PER_DOC);

                FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
                customType.IndexOptions = IndexOptions.DOCS_ONLY;
                customType.OmitNorms    = true;
                Field field = new Field("field", ts, customType);
                doc.Add(field);
                //w.setInfoStream(System.out);
                int numDocs = (int)(TERM_COUNT / TERMS_PER_DOC);

                Console.WriteLine("TERMS_PER_DOC=" + TERMS_PER_DOC);
                Console.WriteLine("numDocs=" + numDocs);

                for (int i = 0; i < numDocs; i++)
                {
                    long t0 = Environment.TickCount;
                    w.AddDocument(doc);
                    Console.WriteLine(i + " of " + numDocs + " " + (Environment.TickCount - t0) + " msec");
                }
                savedTerms = ts.SavedTerms;

                Console.WriteLine("TEST: full merge");
                w.ForceMerge(1);
                Console.WriteLine("TEST: close writer");
                w.Dispose();
            }

            Console.WriteLine("TEST: open reader");
            IndexReader r = DirectoryReader.Open(dir);

            if (savedTerms == null)
            {
                savedTerms = FindTerms(r);
            }
            int numSavedTerms            = savedTerms.Count;
            IList <BytesRef> bigOrdTerms = new List <BytesRef>(savedTerms.SubList(numSavedTerms - 10, numSavedTerms));

            Console.WriteLine("TEST: test big ord terms...");
            TestSavedTerms(r, bigOrdTerms);
            Console.WriteLine("TEST: test all saved terms...");
            TestSavedTerms(r, savedTerms);
            r.Dispose();

            Console.WriteLine("TEST: now CheckIndex...");
            CheckIndex.Status status = TestUtil.CheckIndex(dir);
            long tc = status.SegmentInfos[0].TermIndexStatus.TermCount;

            Assert.IsTrue(tc > int.MaxValue, "count " + tc + " is not > " + int.MaxValue);

            dir.Dispose();
            Console.WriteLine("TEST: done!");
        }
Ejemplo n.º 9
0
        public virtual void TestRefCounts1()
        {
            Directory    dir1 = GetDir1(Random());
            Directory    dir2 = GetDir2(Random());
            AtomicReader ir1, ir2;
            // close subreaders, ParallelReader will not change refCounts, but close on its own close
            ParallelAtomicReader pr = new ParallelAtomicReader(ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)), ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2)));

            // check RefCounts
            Assert.AreEqual(1, ir1.RefCount);
            Assert.AreEqual(1, ir2.RefCount);
            pr.Dispose();
            Assert.AreEqual(0, ir1.RefCount);
            Assert.AreEqual(0, ir2.RefCount);
            dir1.Dispose();
            dir2.Dispose();
        }
Ejemplo n.º 10
0
        // Fields 1 & 2 in one index, 3 & 4 in other, with ParallelReader:
        private IndexSearcher Parallel(Random random)
        {
            Dir1 = GetDir1(random);
            Dir2 = GetDir2(random);
            ParallelAtomicReader pr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(Dir1)), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(Dir2)));

            TestUtil.CheckReader(pr);
            return(NewSearcher(pr));
        }
Ejemplo n.º 11
0
        public virtual void TestDocsWithField()
        {
            AssumeTrue("codec does not support docsWithField", DefaultCodecSupportsDocsWithField());
            Directory dir = NewDirectory();

            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);

            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);

            int numDocs = AtLeast(500);

            for (int i = 0; i < numDocs; i++)
            {
                Document doc = new Document();
                if (Random().Next(4) >= 0)
                {
                    doc.Add(new NumericDocValuesField("numbers", Random().NextLong()));
                }
                doc.Add(new NumericDocValuesField("numbersAlways", Random().NextLong()));
                iw.AddDocument(doc);
                if (Random().Next(17) == 0)
                {
                    iw.Commit();
                }
            }
            DirectoryReader ir = iw.Reader;

            iw.ForceMerge(1);
            DirectoryReader ir2    = iw.Reader;
            AtomicReader    merged = GetOnlySegmentReader(ir2);

            iw.Dispose();

            Bits multi  = MultiDocValues.GetDocsWithField(ir, "numbers");
            Bits single = merged.GetDocsWithField("numbers");

            if (multi == null)
            {
                Assert.IsNull(single);
            }
            else
            {
                Assert.AreEqual(single.Length(), multi.Length());
                for (int i = 0; i < numDocs; i++)
                {
                    Assert.AreEqual(single.Get(i), multi.Get(i));
                }
            }

            multi  = MultiDocValues.GetDocsWithField(ir, "numbersAlways");
            single = merged.GetDocsWithField("numbersAlways");
            Assert.AreEqual(single.Length(), multi.Length());
            for (int i = 0; i < numDocs; i++)
            {
                Assert.AreEqual(single.Get(i), multi.Get(i));
            }
            ir.Dispose();
            ir2.Dispose();
            dir.Dispose();
        }
Ejemplo n.º 12
0
 /// <summary>
 /// Create a new FilterDirectoryReader that filters a passed in DirectoryReader,
 /// using the supplied SubReaderWrapper to wrap its subreader. </summary>
 /// <param name="in"> the DirectoryReader to filter </param>
 /// <param name="wrapper"> the SubReaderWrapper to use to wrap subreaders </param>
 public FilterDirectoryReader(DirectoryReader @in, SubReaderWrapper wrapper)
     : base(@in.Directory(), wrapper.Wrap(@in.GetSequentialSubReaders().OfType<AtomicReader>().ToList()))
 {
     this.@in = @in;
 }
            public ThreadAnonymousInnerClassHelper(int numDocs, DirectoryReader rd, IndexSearcher searcher, int readsPerThread, AtomicReference<Exception> ex, int i)
            {
                this.NumDocs = numDocs;
                this.Rd = rd;
                this.Searcher = searcher;
                this.ReadsPerThread = readsPerThread;
                this.Ex = ex;
                this.i = i;

                queries = new int[ReadsPerThread];
                for (int j = 0; j < queries.Length; ++j)
                {
                    queries[j] = Random().Next(NumDocs);
                }
            }
            public ThreadAnonymousInnerClassHelper(BaseStoredFieldsFormatTestCase outerInstance, int numDocs, DirectoryReader rd, IndexSearcher searcher, int readsPerThread, AtomicReference<Exception> ex, int i)
            {
                this.OuterInstance = outerInstance;
                this.NumDocs = numDocs;
                this.Rd = rd;
                this.Searcher = searcher;
                this.ReadsPerThread = readsPerThread;
                this.Ex = ex;
                this.i = i;

                queries = new int[ReadsPerThread];
                for (int j = 0; j < queries.Length; ++j)
                {
                    queries[j] = Random().NextIntBetween(0, NumDocs);
                }
            }
Ejemplo n.º 15
0
        [Timeout(1500000)] // 25 minutes
        public virtual void Test()
        {
            // LUCENENET specific - disable the test if not 64 bit
            AssumeTrue("This test consumes too much RAM be run on x86.", Constants.RUNTIME_IS_64BIT);

            MockDirectoryWrapper dir = new MockDirectoryWrapper(Random, new MMapDirectory(CreateTempDir("4GBStoredFields")));

            dir.Throttling = Throttling.NEVER;

            var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                         .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                         .SetRAMBufferSizeMB(256.0)
                         .SetMergeScheduler(new ConcurrentMergeScheduler())
                         .SetMergePolicy(NewLogMergePolicy(false, 10))
                         .SetOpenMode(OpenMode.CREATE);
            IndexWriter w = new IndexWriter(dir, config);

            MergePolicy mp = w.Config.MergePolicy;

            if (mp is LogByteSizeMergePolicy)
            {
                // 1 petabyte:
                ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
            }

            Document  doc = new Document();
            FieldType ft  = new FieldType();

            ft.IsIndexed = false;
            ft.IsStored  = true;
            ft.Freeze();
            int valueLength = RandomInts.RandomInt32Between(Random, 1 << 13, 1 << 20);
            var value       = new byte[valueLength];

            for (int i = 0; i < valueLength; ++i)
            {
                // random so that even compressing codecs can't compress it
                value[i] = (byte)Random.Next(256);
            }
            Field f = new Field("fld", value, ft);

            doc.Add(f);

            int numDocs = (int)((1L << 32) / valueLength + 100);

            for (int i = 0; i < numDocs; ++i)
            {
                w.AddDocument(doc);
                if (Verbose && i % (numDocs / 10) == 0)
                {
                    Console.WriteLine(i + " of " + numDocs + "...");
                }
            }
            w.ForceMerge(1);
            w.Dispose();
            if (Verbose)
            {
                bool found = false;
                foreach (string file in dir.ListAll())
                {
                    if (file.EndsWith(".fdt", StringComparison.Ordinal))
                    {
                        long fileLength = dir.FileLength(file);
                        if (fileLength >= 1L << 32)
                        {
                            found = true;
                        }
                        Console.WriteLine("File length of " + file + " : " + fileLength);
                    }
                }
                if (!found)
                {
                    Console.WriteLine("No .fdt file larger than 4GB, test bug?");
                }
            }

            DirectoryReader rd = DirectoryReader.Open(dir);
            Document        sd = rd.Document(numDocs - 1);

            Assert.IsNotNull(sd);
            Assert.AreEqual(1, sd.Fields.Count);
            BytesRef valueRef = sd.GetBinaryValue("fld");

            Assert.IsNotNull(valueRef);
            Assert.AreEqual(new BytesRef(value), valueRef);
            rd.Dispose();

            dir.Dispose();
        }
Ejemplo n.º 16
0
 /// <summary>
 /// Create a new <see cref="FilterDirectoryReader"/> that filters a passed in <see cref="DirectoryReader"/>,
 /// using the supplied <see cref="SubReaderWrapper"/> to wrap its subreader. </summary>
 /// <param name="input"> the <see cref="DirectoryReader"/> to filter </param>
 /// <param name="wrapper"> the <see cref="SubReaderWrapper"/> to use to wrap subreaders </param>
 public FilterDirectoryReader(DirectoryReader input, SubReaderWrapper wrapper)
     : base(input.Directory, wrapper.Wrap(input.GetSequentialSubReaders()))
 {
     this.m_input = input;
 }
Ejemplo n.º 17
0
        public virtual void Test2()
        {
            Random            random   = Random;
            int               NUM_DOCS = AtLeast(100);
            Directory         dir      = NewDirectory();
            RandomIndexWriter writer   = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                random, dir);
            bool          allowDups = random.NextBoolean();
            ISet <string> seen      = new JCG.HashSet <string>();

            if (Verbose)
            {
                Console.WriteLine("TEST: NUM_DOCS=" + NUM_DOCS + " allowDups=" + allowDups);
            }
            int numDocs = 0;
            IList <BytesRef> docValues = new List <BytesRef>();

            // TODO: deletions
            while (numDocs < NUM_DOCS)
            {
                string s;
                if (random.NextBoolean())
                {
                    s = TestUtil.RandomSimpleString(random);
                }
                else
                {
                    s = TestUtil.RandomUnicodeString(random);
                }
                BytesRef br = new BytesRef(s);

                if (!allowDups)
                {
                    if (seen.Contains(s))
                    {
                        continue;
                    }
                    seen.Add(s);
                }

                if (Verbose)
                {
                    Console.WriteLine("  " + numDocs + ": s=" + s);
                }

                Document doc = new Document();
                doc.Add(new SortedDocValuesField("stringdv", br));
                doc.Add(new NumericDocValuesField("id", numDocs));
                docValues.Add(br);
                writer.AddDocument(doc);
                numDocs++;

                if (random.Next(40) == 17)
                {
                    // force flush
                    writer.GetReader().Dispose();
                }
            }

            writer.ForceMerge(1);
            DirectoryReader r = writer.GetReader();

            writer.Dispose();

            AtomicReader sr = GetOnlySegmentReader(r);

            long END_TIME = Environment.TickCount + (TestNightly ? 30 : 1);

            int NUM_THREADS = TestUtil.NextInt32(LuceneTestCase.Random, 1, 10);

            ThreadJob[] threads = new ThreadJob[NUM_THREADS];
            for (int thread = 0; thread < NUM_THREADS; thread++)
            {
                threads[thread] = new ThreadAnonymousClass2(random, docValues, sr, END_TIME);
                threads[thread].Start();
            }

            foreach (ThreadJob thread in threads)
            {
                thread.Join();
            }

            r.Dispose();
            dir.Dispose();
        }
Ejemplo n.º 18
0
 public ThreadAnonymousInnerClassHelper2(TestIndexWriterReader outerInstance, IndexWriter writer, DirectoryReader r, long endTime, IList<Exception> excs)
 {
     this.OuterInstance = outerInstance;
     this.Writer = writer;
     this.r = r;
     this.EndTime = endTime;
     this.Excs = excs;
     rand = new Random(Random().Next());
 }
Ejemplo n.º 19
0
        public virtual void Test()
        {
            // update variables
            int commitPercent = Random().Next(20);
            int softCommitPercent = Random().Next(100); // what percent of the commits are soft
            int deletePercent = Random().Next(50);
            int deleteByQueryPercent = Random().Next(25);
            int ndocs = AtLeast(50);
            int nWriteThreads = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5);
            int maxConcurrentCommits = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5); // number of committers at a time... needed if we want to avoid commit errors due to exceeding the max

            bool tombstones = Random().NextBoolean();

            // query variables
            AtomicLong operations = new AtomicLong(AtLeast(10000)); // number of query operations to perform in total

            int nReadThreads = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5);
            InitModel(ndocs);

            FieldType storedOnlyType = new FieldType();
            storedOnlyType.Stored = true;

            if (VERBOSE)
            {
                Console.WriteLine("\n");
                Console.WriteLine("TEST: commitPercent=" + commitPercent);
                Console.WriteLine("TEST: softCommitPercent=" + softCommitPercent);
                Console.WriteLine("TEST: deletePercent=" + deletePercent);
                Console.WriteLine("TEST: deleteByQueryPercent=" + deleteByQueryPercent);
                Console.WriteLine("TEST: ndocs=" + ndocs);
                Console.WriteLine("TEST: nWriteThreads=" + nWriteThreads);
                Console.WriteLine("TEST: nReadThreads=" + nReadThreads);
                Console.WriteLine("TEST: maxConcurrentCommits=" + maxConcurrentCommits);
                Console.WriteLine("TEST: tombstones=" + tombstones);
                Console.WriteLine("TEST: operations=" + operations);
                Console.WriteLine("\n");
            }

            AtomicInteger numCommitting = new AtomicInteger();

            IList<ThreadClass> threads = new List<ThreadClass>();

            Directory dir = NewDirectory();

            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            writer.DoRandomForceMergeAssert = false;
            writer.Commit();
            Reader = DirectoryReader.Open(dir);

            for (int i = 0; i < nWriteThreads; i++)
            {
                ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, "WRITER" + i, commitPercent, softCommitPercent, deletePercent, deleteByQueryPercent, ndocs, maxConcurrentCommits, tombstones, operations, storedOnlyType, numCommitting, writer);

                threads.Add(thread);
            }

            for (int i = 0; i < nReadThreads; i++)
            {
                ThreadClass thread = new ThreadAnonymousInnerClassHelper2(this, "READER" + i, ndocs, tombstones, operations);

                threads.Add(thread);
            }

            foreach (ThreadClass thread in threads)
            {
                thread.Start();
            }

            foreach (ThreadClass thread in threads)
            {
                thread.Join();
            }

            writer.Dispose();
            if (VERBOSE)
            {
                Console.WriteLine("TEST: close reader=" + Reader);
            }
            Reader.Dispose();
            dir.Dispose();
        }
 public ThreadAnonymousInnerClassHelper2(BaseDocValuesFormatTestCase outerInstance, DirectoryReader ir, CountdownEvent startingGun)
 {
     this.OuterInstance = outerInstance;
     this.Ir = ir;
     this.StartingGun = startingGun;
 }
Ejemplo n.º 21
0
 public static void AfterClass()
 {
     atomicReader.Dispose();
     Dir.Dispose();
     Searcher = null;
     atomicReader = null;
     r = null;
     Dir = null;
 }
Ejemplo n.º 22
0
        public virtual void TestCloseWithThreads([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")] Func <IConcurrentMergeScheduler> newScheduler)
        {
            int NUM_THREADS   = 3;
            int numIterations = TestNightly ? 7 : 3;

            for (int iter = 0; iter < numIterations; iter++)
            {
                if (Verbose)
                {
                    Console.WriteLine("\nTEST: iter=" + iter);
                }
                Directory dir    = NewDirectory();
                var       config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                                   .SetMaxBufferedDocs(10)
                                   .SetMergeScheduler(newScheduler())
                                   .SetMergePolicy(NewLogMergePolicy(4));
                IndexWriter writer    = new IndexWriter(dir, config);
                var         scheduler = config.mergeScheduler as IConcurrentMergeScheduler;
                if (scheduler != null)
                {
                    scheduler.SetSuppressExceptions();
                }

                IndexerThread[] threads = new IndexerThread[NUM_THREADS];

                for (int i = 0; i < NUM_THREADS; i++)
                {
                    threads[i] = new IndexerThread(writer, false, NewField)

                                 // LUCENENET NOTE - ConcurrentMergeScheduler
                                 // used to take too long for this test to index a single document
                                 // so, increased the time from 200 to 300 ms.
                                 // But it has now been restored to 200 ms like Lucene.
                    {
                        timeToRunInMilliseconds = 200
                    };
                }

                for (int i = 0; i < NUM_THREADS; i++)
                {
                    threads[i].Start();
                }

                bool done = false;
                while (!done)
                {
                    Thread.Sleep(100);
                    for (int i = 0; i < NUM_THREADS; i++)
                    // only stop when at least one thread has added a doc
                    {
                        if (threads[i].addCount > 0)
                        {
                            done = true;
                            break;
                        }
                        else if (!threads[i].IsAlive)
                        {
                            Assert.Fail("thread failed before indexing a single document");
                        }
                    }
                }

                if (Verbose)
                {
                    Console.WriteLine("\nTEST: now close");
                }
                writer.Dispose(false);

                // Make sure threads that are adding docs are not hung:
                for (int i = 0; i < NUM_THREADS; i++)
                {
                    // Without fix for LUCENE-1130: one of the
                    // threads will hang
                    threads[i].Join();
                    if (threads[i].IsAlive)
                    {
                        Assert.Fail("thread seems to be hung");
                    }
                }

                // Quick test to make sure index is not corrupt:
                IndexReader reader = DirectoryReader.Open(dir);
                DocsEnum    tdocs  = TestUtil.Docs(Random, reader, "field", new BytesRef("aaa"), MultiFields.GetLiveDocs(reader), null, 0);
                int         count  = 0;
                while (tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
                {
                    count++;
                }
                Assert.IsTrue(count > 0);
                reader.Dispose();

                dir.Dispose();
            }
        }
Ejemplo n.º 23
0
        public virtual void TestVariableBinary()
        {
            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BVariableBinary"));

            if (dir is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
            }

            var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                         .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                         .SetRAMBufferSizeMB(256.0)
                         .SetMergeScheduler(new ConcurrentMergeScheduler())
                         .SetMergePolicy(NewLogMergePolicy(false, 10))
                         .SetOpenMode(OpenMode.CREATE);
            IndexWriter w = new IndexWriter(dir, config);

            Document             doc     = new Document();
            var                  bytes   = new byte[4];
            ByteArrayDataOutput  encoder = new ByteArrayDataOutput(bytes);
            BytesRef             data    = new BytesRef(bytes);
            BinaryDocValuesField dvField = new BinaryDocValuesField("dv", data);

            doc.Add(dvField);

            for (int i = 0; i < int.MaxValue; i++)
            {
                encoder.Reset(bytes);
                encoder.WriteVInt32(i % 65535); // 1, 2, or 3 bytes
                data.Length = encoder.Position;
                w.AddDocument(doc);
                if (i % 100000 == 0)
                {
                    Console.WriteLine("indexed: " + i);
                    Console.Out.Flush();
                }
            }

            w.ForceMerge(1);
            w.Dispose();

            Console.WriteLine("verifying...");
            Console.Out.Flush();

            DirectoryReader    r             = DirectoryReader.Open(dir);
            int                expectedValue = 0;
            ByteArrayDataInput input         = new ByteArrayDataInput();

            foreach (AtomicReaderContext context in r.Leaves)
            {
                AtomicReader    reader  = context.AtomicReader;
                BytesRef        scratch = new BytesRef(bytes);
                BinaryDocValues dv      = reader.GetBinaryDocValues("dv");
                for (int i = 0; i < reader.MaxDoc; i++)
                {
                    dv.Get(i, scratch);
                    input.Reset(scratch.Bytes, scratch.Offset, scratch.Length);
                    Assert.AreEqual(expectedValue % 65535, input.ReadVInt32());
                    Assert.IsTrue(input.Eof);
                    expectedValue++;
                }
            }

            r.Dispose();
            dir.Dispose();
        }
Ejemplo n.º 24
0
        // Runs test, with multiple threads, using the specific
        // failure to trigger an IOException
        public virtual void TestMultipleThreadsFailure(Func <IConcurrentMergeScheduler> newScheduler, Failure failure)
        {
            int NUM_THREADS = 3;

            for (int iter = 0; iter < 2; iter++)
            {
                if (Verbose)
                {
                    Console.WriteLine("TEST: iter=" + iter);
                }
                MockDirectoryWrapper dir = NewMockDirectory();
                var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                             .SetMaxBufferedDocs(2)
                             .SetMergeScheduler(newScheduler())
                             .SetMergePolicy(NewLogMergePolicy(4));
                IndexWriter writer    = new IndexWriter(dir, config);
                var         scheduler = config.mergeScheduler as IConcurrentMergeScheduler;
                if (scheduler != null)
                {
                    scheduler.SetSuppressExceptions();
                }

                IndexerThread[] threads = new IndexerThread[NUM_THREADS];

                for (int i = 0; i < NUM_THREADS; i++)
                {
                    threads[i] = new IndexerThread(writer, true, NewField);
                }

                for (int i = 0; i < NUM_THREADS; i++)
                {
                    threads[i].Start();
                }

                Thread.Sleep(10);

                dir.FailOn(failure);
                failure.SetDoFail();

                for (int i = 0; i < NUM_THREADS; i++)
                {
                    threads[i].Join();
                    Assert.IsTrue(threads[i].error == null, "hit unexpected Throwable");
                }

                bool success = false;
                try
                {
                    writer.Dispose(false);
                    success = true;
                }
                catch (IOException)
                {
                    failure.ClearDoFail();
                    writer.Dispose(false);
                }
                if (Verbose)
                {
                    Console.WriteLine("TEST: success=" + success);
                }

                if (success)
                {
                    IndexReader reader  = DirectoryReader.Open(dir);
                    IBits       delDocs = MultiFields.GetLiveDocs(reader);
                    for (int j = 0; j < reader.MaxDoc; j++)
                    {
                        if (delDocs == null || !delDocs.Get(j))
                        {
                            reader.Document(j);
                            reader.GetTermVectors(j);
                        }
                    }
                    reader.Dispose();
                }

                dir.Dispose();
            }
        }
Ejemplo n.º 25
0
 private void  InitBlock(bool openReadOnly, DirectoryReader enclosingInstance)
 {
     this.openReadOnly = openReadOnly;
     this.enclosingInstance = enclosingInstance;
 }
Ejemplo n.º 26
0
        public virtual void Test()
        {
            IList <string> postingsList   = new List <string>();
            int            numTerms       = AtLeast(300);
            int            maxTermsPerDoc = TestUtil.NextInt32(Random, 10, 20);
            bool           isSimpleText   = "SimpleText".Equals(TestUtil.GetPostingsFormat("field"), StringComparison.Ordinal);

            IndexWriterConfig iwc = NewIndexWriterConfig(Random, TEST_VERSION_CURRENT, new MockAnalyzer(Random));

            if ((isSimpleText || iwc.MergePolicy is MockRandomMergePolicy) && (TestNightly || RandomMultiplier > 1))
            {
                // Otherwise test can take way too long (> 2 hours)
                numTerms /= 2;
            }
            if (Verbose)
            {
                Console.WriteLine("maxTermsPerDoc=" + maxTermsPerDoc);
                Console.WriteLine("numTerms=" + numTerms);
            }
            for (int i = 0; i < numTerms; i++)
            {
                string term = Convert.ToString(i, CultureInfo.InvariantCulture);
                for (int j = 0; j < i; j++)
                {
                    postingsList.Add(term);
                }
            }

            postingsList.Shuffle(Random);

            ConcurrentQueue <string> postings = new ConcurrentQueue <string>(postingsList);

            Directory dir = NewFSDirectory(CreateTempDir(GetFullMethodName()));

            RandomIndexWriter iw = new RandomIndexWriter(Random, dir, iwc);

            int threadCount = TestUtil.NextInt32(Random, 1, 5);

            if (Verbose)
            {
                Console.WriteLine("config: " + iw.IndexWriter.Config);
                Console.WriteLine("threadCount=" + threadCount);
            }

            Field     prototype = NewTextField("field", "", Field.Store.NO);
            FieldType fieldType = new FieldType(prototype.FieldType);

            if (Random.NextBoolean())
            {
                fieldType.OmitNorms = true;
            }
            int options = Random.Next(3);

            if (options == 0)
            {
                fieldType.IndexOptions     = IndexOptions.DOCS_AND_FREQS; // we dont actually need positions
                fieldType.StoreTermVectors = true;                        // but enforce term vectors when we do this so we check SOMETHING
            }
            else if (options == 1 && !DoesntSupportOffsets.Contains(TestUtil.GetPostingsFormat("field")))
            {
                fieldType.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
            }
            // else just positions

            ThreadJob[]    threads     = new ThreadJob[threadCount];
            CountdownEvent startingGun = new CountdownEvent(1);

            for (int threadID = 0; threadID < threadCount; threadID++)
            {
                Random   threadRandom = new Random(Random.Next());
                Document document     = new Document();
                Field    field        = new Field("field", "", fieldType);
                document.Add(field);
                threads[threadID] = new ThreadAnonymousInnerClassHelper(this, numTerms, maxTermsPerDoc, postings, iw, startingGun, threadRandom, document, field);
                threads[threadID].Start();
            }
            startingGun.Signal();
            foreach (ThreadJob t in threads)
            {
                t.Join();
            }

            iw.ForceMerge(1);
            DirectoryReader ir = iw.GetReader();

            Assert.AreEqual(1, ir.Leaves.Count);
            AtomicReader air   = (AtomicReader)ir.Leaves[0].Reader;
            Terms        terms = air.GetTerms("field");

            // numTerms-1 because there cannot be a term 0 with 0 postings:
            Assert.AreEqual(numTerms - 1, terms.Count);
            TermsEnum termsEnum = terms.GetIterator(null);
            BytesRef  termBR;

            while ((termBR = termsEnum.Next()) != null)
            {
                int value = Convert.ToInt32(termBR.Utf8ToString(), CultureInfo.InvariantCulture);
                Assert.AreEqual(value, termsEnum.TotalTermFreq);
                // don't really need to check more than this, as CheckIndex
                // will verify that totalTermFreq == total number of positions seen
                // from a docsAndPositionsEnum.
            }
            ir.Dispose();
            iw.Dispose();
            dir.Dispose();
        }
Ejemplo n.º 27
0
 private DirectoryReader WrapDirectoryReader(DirectoryReader @in)
 {
     return @in == null ? null : DoWrapDirectoryReader(@in);
 }
Ejemplo n.º 28
0
        public virtual void TestRefCounts1()
        {
            Directory       dir1 = GetDir1(Random());
            Directory       dir2 = GetDir2(Random());
            DirectoryReader ir1, ir2;
            // close subreaders, ParallelReader will not change refCounts, but close on its own close
            ParallelCompositeReader pr    = new ParallelCompositeReader(ir1 = DirectoryReader.Open(dir1), ir2 = DirectoryReader.Open(dir2));
            IndexReader             psub1 = pr.GetSequentialSubReaders()[0];

            // check RefCounts
            Assert.AreEqual(1, ir1.RefCount);
            Assert.AreEqual(1, ir2.RefCount);
            Assert.AreEqual(1, psub1.RefCount);
            pr.Dispose();
            Assert.AreEqual(0, ir1.RefCount);
            Assert.AreEqual(0, ir2.RefCount);
            Assert.AreEqual(0, psub1.RefCount);
            dir1.Dispose();
            dir2.Dispose();
        }
Ejemplo n.º 29
0
 protected virtual void Commit()
 {
     indexWriter.Commit();
     IOUtils.Close(indexReader);
     indexReader = indexWriter.Reader;
     indexSearcher = NewSearcher(indexReader);
 }
Ejemplo n.º 30
0
        public virtual void TestIncompatibleIndexes3()
        {
            Directory dir1 = GetDir1(Random());
            Directory dir2 = GetDir2(Random());

            CompositeReader ir1 = new MultiReader(DirectoryReader.Open(dir1), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1))), ir2 = new MultiReader(DirectoryReader.Open(dir2), DirectoryReader.Open(dir2));

            CompositeReader[] readers = new CompositeReader[] { ir1, ir2 };
            try
            {
                new ParallelCompositeReader(readers);
                Assert.Fail("didn't get expected exception: indexes don't have same subreader structure");
            }
            catch (System.ArgumentException e)
            {
                // expected exception
            }
            try
            {
                new ParallelCompositeReader(Random().NextBoolean(), readers, readers);
                Assert.Fail("didn't get expected exception: indexes don't have same subreader structure");
            }
            catch (System.ArgumentException e)
            {
                // expected exception
            }
            Assert.AreEqual(1, ir1.RefCount);
            Assert.AreEqual(1, ir2.RefCount);
            ir1.Dispose();
            ir2.Dispose();
            Assert.AreEqual(0, ir1.RefCount);
            Assert.AreEqual(0, ir2.RefCount);
            dir1.Dispose();
            dir2.Dispose();
        }
Ejemplo n.º 31
0
        public virtual void TestIgnoreStoredFields()
        {
            Directory    dir1 = GetDir1(Random());
            Directory    dir2 = GetDir2(Random());
            AtomicReader ir1  = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1));
            AtomicReader ir2  = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2));

            // with overlapping
            ParallelAtomicReader pr = new ParallelAtomicReader(false, new AtomicReader[] { ir1, ir2 }, new AtomicReader[] { ir1 });

            Assert.AreEqual("v1", pr.Document(0).Get("f1"));
            Assert.AreEqual("v1", pr.Document(0).Get("f2"));
            Assert.IsNull(pr.Document(0).Get("f3"));
            Assert.IsNull(pr.Document(0).Get("f4"));
            // check that fields are there
            Assert.IsNotNull(pr.GetTerms("f1"));
            Assert.IsNotNull(pr.GetTerms("f2"));
            Assert.IsNotNull(pr.GetTerms("f3"));
            Assert.IsNotNull(pr.GetTerms("f4"));
            pr.Dispose();

            // no stored fields at all
            pr = new ParallelAtomicReader(false, new AtomicReader[] { ir2 }, new AtomicReader[0]);
            Assert.IsNull(pr.Document(0).Get("f1"));
            Assert.IsNull(pr.Document(0).Get("f2"));
            Assert.IsNull(pr.Document(0).Get("f3"));
            Assert.IsNull(pr.Document(0).Get("f4"));
            // check that fields are there
            Assert.IsNull(pr.GetTerms("f1"));
            Assert.IsNull(pr.GetTerms("f2"));
            Assert.IsNotNull(pr.GetTerms("f3"));
            Assert.IsNotNull(pr.GetTerms("f4"));
            pr.Dispose();

            // without overlapping
            pr = new ParallelAtomicReader(true, new AtomicReader[] { ir2 }, new AtomicReader[] { ir1 });
            Assert.AreEqual("v1", pr.Document(0).Get("f1"));
            Assert.AreEqual("v1", pr.Document(0).Get("f2"));
            Assert.IsNull(pr.Document(0).Get("f3"));
            Assert.IsNull(pr.Document(0).Get("f4"));
            // check that fields are there
            Assert.IsNull(pr.GetTerms("f1"));
            Assert.IsNull(pr.GetTerms("f2"));
            Assert.IsNotNull(pr.GetTerms("f3"));
            Assert.IsNotNull(pr.GetTerms("f4"));
            pr.Dispose();

            // no main readers
            try
            {
                new ParallelAtomicReader(true, new AtomicReader[0], new AtomicReader[] { ir1 });
                Assert.Fail("didn't get expected exception: need a non-empty main-reader array");
            }
#pragma warning disable 168
            catch (System.ArgumentException iae)
#pragma warning restore 168
            {
                // pass
            }

            dir1.Dispose();
            dir2.Dispose();
        }
Ejemplo n.º 32
0
 private static DirectoryReader RefreshReader(DirectoryReader reader)
 {
     DirectoryReader oldReader = reader;
     reader = DirectoryReader.OpenIfChanged(reader);
     if (reader != null)
     {
         oldReader.Dispose();
         return reader;
     }
     else
     {
         return oldReader;
     }
 }
Ejemplo n.º 33
0
        public virtual void TestFieldNames()
        {
            Directory            dir1       = GetDir1(Random());
            Directory            dir2       = GetDir2(Random());
            ParallelAtomicReader pr         = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2)));
            FieldInfos           fieldInfos = pr.FieldInfos;

            Assert.AreEqual(4, fieldInfos.Count);
            Assert.IsNotNull(fieldInfos.FieldInfo("f1"));
            Assert.IsNotNull(fieldInfos.FieldInfo("f2"));
            Assert.IsNotNull(fieldInfos.FieldInfo("f3"));
            Assert.IsNotNull(fieldInfos.FieldInfo("f4"));
            pr.Dispose();
            dir1.Dispose();
            dir2.Dispose();
        }
Ejemplo n.º 34
0
        public virtual void Test()
        {
            IList <string> postingsList   = new List <string>();
            int            numTerms       = AtLeast(300);
            int            maxTermsPerDoc = TestUtil.NextInt(Random(), 10, 20);

            bool isSimpleText = "SimpleText".Equals(TestUtil.GetPostingsFormat("field"));

            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, new MockAnalyzer(Random()));

            if ((isSimpleText || iwc.MergePolicy is MockRandomMergePolicy) && (TEST_NIGHTLY || RANDOM_MULTIPLIER > 1))
            {
                // Otherwise test can take way too long (> 2 hours)
                numTerms /= 2;
            }

            if (VERBOSE)
            {
                Console.WriteLine("maxTermsPerDoc=" + maxTermsPerDoc);
                Console.WriteLine("numTerms=" + numTerms);
            }

            for (int i = 0; i < numTerms; i++)
            {
                string term = Convert.ToString(i);
                for (int j = 0; j < i; j++)
                {
                    postingsList.Add(term);
                }
            }
            Collections.Shuffle(postingsList);

            ConcurrentQueue <string> postings = new ConcurrentQueue <string>(postingsList);

            Directory         dir = NewFSDirectory(CreateTempDir("bagofpostings"));
            RandomIndexWriter iw  = new RandomIndexWriter(Random(), dir, iwc);

            int threadCount = TestUtil.NextInt(Random(), 1, 5);

            if (VERBOSE)
            {
                Console.WriteLine("config: " + iw.w.Config);
                Console.WriteLine("threadCount=" + threadCount);
            }

            ThreadClass[]  threads     = new ThreadClass[threadCount];
            CountdownEvent startingGun = new CountdownEvent(1);

            for (int threadID = 0; threadID < threadCount; threadID++)
            {
                threads[threadID] = new ThreadAnonymousInnerClassHelper(this, maxTermsPerDoc, postings, iw, startingGun);
                threads[threadID].Start();
            }
            startingGun.Signal();
            foreach (ThreadClass t in threads)
            {
                t.Join();
            }

            iw.ForceMerge(1);
            DirectoryReader ir = iw.Reader;

            Assert.AreEqual(1, ir.Leaves.Count);
            AtomicReader air   = (AtomicReader)ir.Leaves[0].Reader;
            Terms        terms = air.GetTerms("field");

            // numTerms-1 because there cannot be a term 0 with 0 postings:
#pragma warning disable 612, 618
            Assert.AreEqual(numTerms - 1, air.Fields.UniqueTermCount);
            if (iwc.Codec is Lucene3xCodec == false)
#pragma warning restore 612, 618
            {
                Assert.AreEqual(numTerms - 1, terms.Count);
            }
            TermsEnum termsEnum = terms.GetIterator(null);
            BytesRef  term_;
            while ((term_ = termsEnum.Next()) != null)
            {
                int value = Convert.ToInt32(term_.Utf8ToString());
                Assert.AreEqual(value, termsEnum.DocFreq);
                // don't really need to check more than this, as CheckIndex
                // will verify that docFreq == actual number of documents seen
                // from a docsAndPositionsEnum.
            }
            ir.Dispose();
            iw.Dispose();
            dir.Dispose();
        }
Ejemplo n.º 35
0
        public virtual void TestBasic()
        {
            Directory   dir      = NewDirectory();
            Analyzer    analyzer = new MockAnalyzer(Random());
            IndexWriter writer   = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(2).SetSimilarity(new SimpleSimilarity()).SetMergePolicy(NewLogMergePolicy(2)));

            StringBuilder sb   = new StringBuilder(265);
            string        term = "term";

            for (int i = 0; i < 30; i++)
            {
                Document doc = new Document();
                sb.Append(term).Append(" ");
                string content = sb.ToString();
                Field  noTf    = NewField("noTf", content + (i % 2 == 0 ? "" : " notf"), OmitType);
                doc.Add(noTf);

                Field tf = NewField("tf", content + (i % 2 == 0 ? " tf" : ""), NormalType);
                doc.Add(tf);

                writer.AddDocument(doc);
                //System.out.println(d);
            }

            writer.ForceMerge(1);
            // flush
            writer.Dispose();

            /*
             * Verify the index
             */
            IndexReader   reader   = DirectoryReader.Open(dir);
            IndexSearcher searcher = NewSearcher(reader);

            searcher.Similarity = new SimpleSimilarity();

            Term      a  = new Term("noTf", term);
            Term      b  = new Term("tf", term);
            Term      c  = new Term("noTf", "notf");
            Term      d  = new Term("tf", "tf");
            TermQuery q1 = new TermQuery(a);
            TermQuery q2 = new TermQuery(b);
            TermQuery q3 = new TermQuery(c);
            TermQuery q4 = new TermQuery(d);

            PhraseQuery pq = new PhraseQuery();

            pq.Add(a);
            pq.Add(c);
            try
            {
                searcher.Search(pq, 10);
                Assert.Fail("did not hit expected exception");
            }
            catch (Exception e)
            {
                Exception cause = e;
                // If the searcher uses an executor service, the IAE is wrapped into other exceptions
                while (cause.InnerException != null)
                {
                    cause = cause.InnerException;
                }
                if (!(cause is InvalidOperationException))
                {
                    throw new InvalidOperationException("Expected an IAE", e);
                } // else OK because positions are not indexed
            }

            searcher.Search(q1, new CountingHitCollectorAnonymousInnerClassHelper(this));
            //System.out.println(CountingHitCollector.getCount());

            searcher.Search(q2, new CountingHitCollectorAnonymousInnerClassHelper2(this));
            //System.out.println(CountingHitCollector.getCount());

            searcher.Search(q3, new CountingHitCollectorAnonymousInnerClassHelper3(this));
            //System.out.println(CountingHitCollector.getCount());

            searcher.Search(q4, new CountingHitCollectorAnonymousInnerClassHelper4(this));
            //System.out.println(CountingHitCollector.getCount());

            BooleanQuery bq = new BooleanQuery();

            bq.Add(q1, Occur.MUST);
            bq.Add(q4, Occur.MUST);

            searcher.Search(bq, new CountingHitCollectorAnonymousInnerClassHelper5(this));
            Assert.AreEqual(15, CountingHitCollector.Count);

            reader.Dispose();
            dir.Dispose();
        }
Ejemplo n.º 36
0
        public virtual void TestPositions()
        {
            Directory   ram      = NewDirectory();
            Analyzer    analyzer = new MockAnalyzer(Random);
            IndexWriter writer   = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
            Document    d        = new Document();

            // f1,f2,f3: docs only
            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);

            ft.IndexOptions = IndexOptions.DOCS_ONLY;

            Field f1 = NewField("f1", "this field has docs only", ft);

            d.Add(f1);

            Field f2 = NewField("f2", "this field has docs only", ft);

            d.Add(f2);

            Field f3 = NewField("f3", "this field has docs only", ft);

            d.Add(f3);

            FieldType ft2 = new FieldType(TextField.TYPE_NOT_STORED);

            ft2.IndexOptions = IndexOptions.DOCS_AND_FREQS;

            // f4,f5,f6 docs and freqs
            Field f4 = NewField("f4", "this field has docs and freqs", ft2);

            d.Add(f4);

            Field f5 = NewField("f5", "this field has docs and freqs", ft2);

            d.Add(f5);

            Field f6 = NewField("f6", "this field has docs and freqs", ft2);

            d.Add(f6);

            FieldType ft3 = new FieldType(TextField.TYPE_NOT_STORED);

            ft3.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;

            // f7,f8,f9 docs/freqs/positions
            Field f7 = NewField("f7", "this field has docs and freqs and positions", ft3);

            d.Add(f7);

            Field f8 = NewField("f8", "this field has docs and freqs and positions", ft3);

            d.Add(f8);

            Field f9 = NewField("f9", "this field has docs and freqs and positions", ft3);

            d.Add(f9);

            writer.AddDocument(d);
            writer.ForceMerge(1);

            // now we add another document which has docs-only for f1, f4, f7, docs/freqs for f2, f5, f8,
            // and docs/freqs/positions for f3, f6, f9
            d = new Document();

            // f1,f4,f7: docs only
            f1 = NewField("f1", "this field has docs only", ft);
            d.Add(f1);

            f4 = NewField("f4", "this field has docs only", ft);
            d.Add(f4);

            f7 = NewField("f7", "this field has docs only", ft);
            d.Add(f7);

            // f2, f5, f8: docs and freqs
            f2 = NewField("f2", "this field has docs and freqs", ft2);
            d.Add(f2);

            f5 = NewField("f5", "this field has docs and freqs", ft2);
            d.Add(f5);

            f8 = NewField("f8", "this field has docs and freqs", ft2);
            d.Add(f8);

            // f3, f6, f9: docs and freqs and positions
            f3 = NewField("f3", "this field has docs and freqs and positions", ft3);
            d.Add(f3);

            f6 = NewField("f6", "this field has docs and freqs and positions", ft3);
            d.Add(f6);

            f9 = NewField("f9", "this field has docs and freqs and positions", ft3);
            d.Add(f9);

            writer.AddDocument(d);

            // force merge
            writer.ForceMerge(1);
            // flush
            writer.Dispose();

            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
            FieldInfos    fi     = reader.FieldInfos;

            // docs + docs = docs
            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f1").IndexOptions);
            // docs + docs/freqs = docs
            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f2").IndexOptions);
            // docs + docs/freqs/pos = docs
            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f3").IndexOptions);
            // docs/freqs + docs = docs
            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f4").IndexOptions);
            // docs/freqs + docs/freqs = docs/freqs
            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS, fi.FieldInfo("f5").IndexOptions);
            // docs/freqs + docs/freqs/pos = docs/freqs
            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS, fi.FieldInfo("f6").IndexOptions);
            // docs/freqs/pos + docs = docs
            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f7").IndexOptions);
            // docs/freqs/pos + docs/freqs = docs/freqs
            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS, fi.FieldInfo("f8").IndexOptions);
            // docs/freqs/pos + docs/freqs/pos = docs/freqs/pos
            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.FieldInfo("f9").IndexOptions);

            reader.Dispose();
            ram.Dispose();
        }
Ejemplo n.º 37
0
        public virtual void TestRandom()
        {
            // token -> docID -> tokens
            IDictionary <string, IDictionary <int?, IList <Token> > > actualTokens = new Dictionary <string, IDictionary <int?, IList <Token> > >();

            Directory         dir = NewDirectory();
            RandomIndexWriter w   = new RandomIndexWriter(Random, dir, iwc);

            int numDocs = AtLeast(20);
            //final int numDocs = AtLeast(5);

            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);

            // TODO: randomize what IndexOptions we use; also test
            // changing this up in one IW buffered segment...:
            ft.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
            if (Random.NextBoolean())
            {
                ft.StoreTermVectors         = true;
                ft.StoreTermVectorOffsets   = Random.NextBoolean();
                ft.StoreTermVectorPositions = Random.NextBoolean();
            }

            for (int docCount = 0; docCount < numDocs; docCount++)
            {
                Document doc = new Document();
                doc.Add(new Int32Field("id", docCount, Field.Store.NO));
                IList <Token> tokens    = new List <Token>();
                int           numTokens = AtLeast(100);
                //final int numTokens = AtLeast(20);
                int pos    = -1;
                int offset = 0;
                //System.out.println("doc id=" + docCount);
                for (int tokenCount = 0; tokenCount < numTokens; tokenCount++)
                {
                    string text;
                    if (Random.NextBoolean())
                    {
                        text = "a";
                    }
                    else if (Random.NextBoolean())
                    {
                        text = "b";
                    }
                    else if (Random.NextBoolean())
                    {
                        text = "c";
                    }
                    else
                    {
                        text = "d";
                    }

                    int posIncr = Random.NextBoolean() ? 1 : Random.Next(5);
                    if (tokenCount == 0 && posIncr == 0)
                    {
                        posIncr = 1;
                    }
                    int offIncr     = Random.NextBoolean() ? 0 : Random.Next(5);
                    int tokenOffset = Random.Next(5);

                    Token token = MakeToken(text, posIncr, offset + offIncr, offset + offIncr + tokenOffset);
                    if (!actualTokens.TryGetValue(text, out IDictionary <int?, IList <Token> > postingsByDoc))
                    {
                        actualTokens[text] = postingsByDoc = new Dictionary <int?, IList <Token> >();
                    }
                    if (!postingsByDoc.TryGetValue(docCount, out IList <Token> postings))
                    {
                        postingsByDoc[docCount] = postings = new List <Token>();
                    }
                    postings.Add(token);
                    tokens.Add(token);
                    pos += posIncr;
                    // stuff abs position into type:
                    token.Type = "" + pos;
                    offset    += offIncr + tokenOffset;
                    //System.out.println("  " + token + " posIncr=" + token.getPositionIncrement() + " pos=" + pos + " off=" + token.StartOffset + "/" + token.EndOffset + " (freq=" + postingsByDoc.Get(docCount).Size() + ")");
                }
                doc.Add(new Field("content", new CannedTokenStream(tokens.ToArray()), ft));
                w.AddDocument(doc);
            }
            DirectoryReader r = w.GetReader();

            w.Dispose();

            string[] terms = new string[] { "a", "b", "c", "d" };
            foreach (AtomicReaderContext ctx in r.Leaves)
            {
                // TODO: improve this
                AtomicReader sub = (AtomicReader)ctx.Reader;
                //System.out.println("\nsub=" + sub);
                TermsEnum            termsEnum                  = sub.Fields.GetTerms("content").GetIterator(null);
                DocsEnum             docs                       = null;
                DocsAndPositionsEnum docsAndPositions           = null;
                DocsAndPositionsEnum docsAndPositionsAndOffsets = null;
                FieldCache.Int32s    docIDToID                  = FieldCache.DEFAULT.GetInt32s(sub, "id", false);
                foreach (string term in terms)
                {
                    //System.out.println("  term=" + term);
                    if (termsEnum.SeekExact(new BytesRef(term)))
                    {
                        docs = termsEnum.Docs(null, docs);
                        Assert.IsNotNull(docs);
                        int doc;
                        //System.out.println("    doc/freq");
                        while ((doc = docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
                        {
                            IList <Token> expected = actualTokens[term][docIDToID.Get(doc)];
                            //System.out.println("      doc=" + docIDToID.Get(doc) + " docID=" + doc + " " + expected.Size() + " freq");
                            Assert.IsNotNull(expected);
                            Assert.AreEqual(expected.Count, docs.Freq);
                        }

                        // explicitly exclude offsets here
                        docsAndPositions = termsEnum.DocsAndPositions(null, docsAndPositions, DocsAndPositionsFlags.PAYLOADS);
                        Assert.IsNotNull(docsAndPositions);
                        //System.out.println("    doc/freq/pos");
                        while ((doc = docsAndPositions.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
                        {
                            IList <Token> expected = actualTokens[term][docIDToID.Get(doc)];
                            //System.out.println("      doc=" + docIDToID.Get(doc) + " " + expected.Size() + " freq");
                            Assert.IsNotNull(expected);
                            Assert.AreEqual(expected.Count, docsAndPositions.Freq);
                            foreach (Token token in expected)
                            {
                                int pos = Convert.ToInt32(token.Type);
                                //System.out.println("        pos=" + pos);
                                Assert.AreEqual(pos, docsAndPositions.NextPosition());
                            }
                        }

                        docsAndPositionsAndOffsets = termsEnum.DocsAndPositions(null, docsAndPositions);
                        Assert.IsNotNull(docsAndPositionsAndOffsets);
                        //System.out.println("    doc/freq/pos/offs");
                        while ((doc = docsAndPositionsAndOffsets.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
                        {
                            IList <Token> expected = actualTokens[term][docIDToID.Get(doc)];
                            //System.out.println("      doc=" + docIDToID.Get(doc) + " " + expected.Size() + " freq");
                            Assert.IsNotNull(expected);
                            Assert.AreEqual(expected.Count, docsAndPositionsAndOffsets.Freq);
                            foreach (Token token in expected)
                            {
                                int pos = Convert.ToInt32(token.Type);
                                //System.out.println("        pos=" + pos);
                                Assert.AreEqual(pos, docsAndPositionsAndOffsets.NextPosition());
                                Assert.AreEqual(token.StartOffset, docsAndPositionsAndOffsets.StartOffset);
                                Assert.AreEqual(token.EndOffset, docsAndPositionsAndOffsets.EndOffset);
                            }
                        }
                    }
                }
                // TODO: test advance:
            }
            r.Dispose();
            dir.Dispose();
        }
Ejemplo n.º 38
0
 public ReaderClosedListenerAnonymousInnerClassHelper(TestDirectoryReader outerInstance, DirectoryReader reader, int[] closeCount)
 {
     this.OuterInstance = outerInstance;
     this.Reader = reader;
     this.CloseCount = closeCount;
 }
Ejemplo n.º 39
0
        public static void BeforeClass()
        {
            Dir = NewDirectory();
            RandomIndexWriter iw = new RandomIndexWriter(Random(), Dir);
            int numDocs = AtLeast(300);
            for (int i = 0; i < numDocs; i++)
            {
                Document doc = new Document();

                AddSome(doc, AlwaysTerms);

                if (Random().Next(100) < 90)
                {
                    AddSome(doc, CommonTerms);
                }
                if (Random().Next(100) < 50)
                {
                    AddSome(doc, MediumTerms);
                }
                if (Random().Next(100) < 10)
                {
                    AddSome(doc, RareTerms);
                }
                iw.AddDocument(doc);
            }
            iw.ForceMerge(1);
            iw.Dispose();
            r = DirectoryReader.Open(Dir);
            atomicReader = GetOnlySegmentReader(r);
            Searcher = new IndexSearcher(atomicReader);
            Searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper();
        }
Ejemplo n.º 40
0
 public AnonymousFindSegmentsFile(Directory directory, bool openReadOnly, DirectoryReader dirReader) : base(directory)
 {
     this.dir = directory;
     this.openReadOnly = openReadOnly;
     enclosingInstance = dirReader;
 }
Ejemplo n.º 41
0
        public virtual void TestFixedBinary()
        {
            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BFixedBinary"));

            if (dir is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
            }
            var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                         .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                         .SetRAMBufferSizeMB(256.0)
                         .SetMergeScheduler(new ConcurrentMergeScheduler())
                         .SetMergePolicy(NewLogMergePolicy(false, 10))
                         .SetOpenMode(OpenMode.CREATE);
            IndexWriter w = new IndexWriter(dir, config);

            Document             doc     = new Document();
            var                  bytes   = new byte[4];
            BytesRef             data    = new BytesRef(bytes);
            BinaryDocValuesField dvField = new BinaryDocValuesField("dv", data);

            doc.Add(dvField);

            for (int i = 0; i < int.MaxValue; i++)
            {
                bytes[0] = (byte)(i >> 24);
                bytes[1] = (byte)(i >> 16);
                bytes[2] = (byte)(i >> 8);
                bytes[3] = (byte)i;
                w.AddDocument(doc);
                if (i % 100000 == 0)
                {
                    Console.WriteLine("indexed: " + i);
                    Console.Out.Flush();
                }
            }

            w.ForceMerge(1);
            w.Dispose();

            Console.WriteLine("verifying...");
            Console.Out.Flush();

            DirectoryReader r             = DirectoryReader.Open(dir);
            int             expectedValue = 0;

            foreach (AtomicReaderContext context in r.Leaves)
            {
                AtomicReader    reader  = context.AtomicReader;
                BytesRef        scratch = new BytesRef();
                BinaryDocValues dv      = reader.GetBinaryDocValues("dv");
                for (int i = 0; i < reader.MaxDoc; i++)
                {
                    bytes[0] = (byte)(expectedValue >> 24);
                    bytes[1] = (byte)(expectedValue >> 16);
                    bytes[2] = (byte)(expectedValue >> 8);
                    bytes[3] = (byte)expectedValue;
                    dv.Get(i, scratch);
                    Assert.AreEqual(data, scratch);
                    expectedValue++;
                }
            }

            r.Dispose();
            dir.Dispose();
        }
Ejemplo n.º 42
0
		protected void commit()
		{
			indexWriter.Commit();
			if (indexReader == null)
			{
				indexReader = (DirectoryReader)IndexReader.Open(directory, true);
			}
			else
			{
				indexReader = (DirectoryReader)indexReader.Reopen();
			}
			indexSearcher = newSearcher(indexReader);
		}
Ejemplo n.º 43
0
        public virtual void TestFlushExceptions()
        {
            MockDirectoryWrapper directory = NewMockDirectory();
            FailOnlyOnFlush      failure   = new FailOnlyOnFlush(this);

            directory.FailOn(failure);

            IndexWriter writer  = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
            Document    doc     = new Document();
            Field       idField = NewStringField("id", "", Field.Store.YES);

            doc.Add(idField);
            int extraCount = 0;

            for (int i = 0; i < 10; i++)
            {
                if (VERBOSE)
                {
                    Console.WriteLine("TEST: iter=" + i);
                }

                for (int j = 0; j < 20; j++)
                {
                    idField.StringValue = Convert.ToString(i * 20 + j);
                    writer.AddDocument(doc);
                }

                // must cycle here because sometimes the merge flushes
                // the doc we just added and so there's nothing to
                // flush, and we don't hit the exception
                while (true)
                {
                    writer.AddDocument(doc);
                    failure.SetDoFail();
                    try
                    {
                        writer.Flush(true, true);
                        if (failure.HitExc)
                        {
                            Assert.Fail("failed to hit IOException");
                        }
                        extraCount++;
                    }
                    catch (IOException ioe)
                    {
                        if (VERBOSE)
                        {
                            Console.WriteLine(ioe.StackTrace);
                        }
                        failure.ClearDoFail();
                        break;
                    }
                }
                Assert.AreEqual(20 * (i + 1) + extraCount, writer.NumDocs());
            }

            writer.Dispose();
            IndexReader reader = DirectoryReader.Open(directory);

            Assert.AreEqual(200 + extraCount, reader.NumDocs);
            reader.Dispose();
            directory.Dispose();
        }
        public virtual void TestRandom()
        {
            int                    numThreads          = 1 + Random.Next(8);
            int                    numDocumentsToIndex = 50 + AtLeast(70);
            AtomicInt32            numDocs             = new AtomicInt32(numDocumentsToIndex);
            Directory              dir         = NewDirectory();
            IndexWriterConfig      iwc         = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
            MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();

            iwc.SetFlushPolicy(flushPolicy);

            int numDWPT = 1 + Random.Next(8);
            DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(numDWPT);

            iwc.SetIndexerThreadPool(threadPool);

            IndexWriter writer = new IndexWriter(dir, iwc);

            flushPolicy = (MockDefaultFlushPolicy)writer.Config.FlushPolicy;
            DocumentsWriter docsWriter = writer.DocsWriter;

            Assert.IsNotNull(docsWriter);
            DocumentsWriterFlushControl flushControl = docsWriter.flushControl;

            Assert.AreEqual(0, flushControl.FlushBytes, " bytes must be 0 after init");

            IndexThread[] threads = new IndexThread[numThreads];
            for (int x = 0; x < threads.Length; x++)
            {
                threads[x] = new IndexThread(this, numDocs, numThreads, writer, lineDocFile, true);
                threads[x].Start();
            }

            for (int x = 0; x < threads.Length; x++)
            {
                threads[x].Join();
            }
            Assert.AreEqual(0, flushControl.FlushBytes, " all flushes must be due");
            Assert.AreEqual(numDocumentsToIndex, writer.NumDocs);
            Assert.AreEqual(numDocumentsToIndex, writer.MaxDoc);
            if (flushPolicy.FlushOnRAM && !flushPolicy.FlushOnDocCount && !flushPolicy.FlushOnDeleteTerms)
            {
                long maxRAMBytes = (long)(iwc.RAMBufferSizeMB * 1024.0 * 1024.0);
                Assert.IsTrue(flushPolicy.peakBytesWithoutFlush <= maxRAMBytes, "peak bytes without flush exceeded watermark");
                if (flushPolicy.hasMarkedPending)
                {
                    assertTrue("max: " + maxRAMBytes + " " + flushControl.peakActiveBytes, maxRAMBytes <= flushControl.peakActiveBytes);
                }
            }
            AssertActiveBytesAfter(flushControl);
            writer.Commit();
            Assert.AreEqual(0, flushControl.ActiveBytes);
            IndexReader r = DirectoryReader.Open(dir);

            Assert.AreEqual(numDocumentsToIndex, r.NumDocs);
            Assert.AreEqual(numDocumentsToIndex, r.MaxDoc);
            if (!flushPolicy.FlushOnRAM)
            {
                assertFalse("never stall if we don't flush on RAM", docsWriter.flushControl.stallControl.WasStalled);
                assertFalse("never block if we don't flush on RAM", docsWriter.flushControl.stallControl.HasBlocked);
            }
            r.Dispose();
            writer.Dispose();
            dir.Dispose();
        }
Ejemplo n.º 45
0
        public virtual void TestCommitOnCloseAbort()
        {
            Directory   dir    = NewDirectory();
            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10));

            for (int i = 0; i < 14; i++)
            {
                TestIndexWriter.AddDoc(writer);
            }
            writer.Dispose();

            Term          searchTerm = new Term("content", "aaa");
            IndexReader   reader     = DirectoryReader.Open(dir);
            IndexSearcher searcher   = NewSearcher(reader);

            ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
            Assert.AreEqual(14, hits.Length, "first number of hits");
            reader.Dispose();

            writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND).SetMaxBufferedDocs(10));
            for (int j = 0; j < 17; j++)
            {
                TestIndexWriter.AddDoc(writer);
            }
            // Delete all docs:
            writer.DeleteDocuments(searchTerm);

            reader   = DirectoryReader.Open(dir);
            searcher = NewSearcher(reader);
            hits     = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
            Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer");
            reader.Dispose();

            // Now, close the writer:
            writer.Rollback();

            TestIndexWriter.AssertNoUnreferencedFiles(dir, "unreferenced files remain after rollback()");

            reader   = DirectoryReader.Open(dir);
            searcher = NewSearcher(reader);
            hits     = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
            Assert.AreEqual(14, hits.Length, "saw changes after writer.abort");
            reader.Dispose();

            // Now make sure we can re-open the index, add docs,
            // and all is good:
            writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND).SetMaxBufferedDocs(10));

            // On abort, writer in fact may write to the same
            // segments_N file:
            if (dir is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir).PreventDoubleWrite = false;
            }

            for (int i = 0; i < 12; i++)
            {
                for (int j = 0; j < 17; j++)
                {
                    TestIndexWriter.AddDoc(writer);
                }
                IndexReader r = DirectoryReader.Open(dir);
                searcher = NewSearcher(r);
                hits     = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
                Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer");
                r.Dispose();
            }

            writer.Dispose();
            IndexReader ir = DirectoryReader.Open(dir);

            searcher = NewSearcher(ir);
            hits     = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
            Assert.AreEqual(218, hits.Length, "didn't see changes after close");
            ir.Dispose();

            dir.Dispose();
        }
Ejemplo n.º 46
0
        public static void Main(string[] args)
        {
            if (args.Length < 5)
            {
                // LUCENENET specific - our wrapper console shows the correct usage
                throw new ArgumentException();
                //Console.Error.WriteLine("Usage: MultiPassIndexSplitter -out <outputDir> -num <numParts> [-seq] <inputIndex1> [<inputIndex2 ...]");
                //Console.Error.WriteLine("\tinputIndex\tpath to input index, multiple values are ok");
                //Console.Error.WriteLine("\t-out ouputDir\tpath to output directory to contain partial indexes");
                //Console.Error.WriteLine("\t-num numParts\tnumber of parts to produce");
                //Console.Error.WriteLine("\t-seq\tsequential docid-range split (default is round-robin)");
                //Environment.Exit(-1);
            }
            List <IndexReader> indexes = new List <IndexReader>();

            try
            {
                string outDir   = null;
                int    numParts = -1;
                bool   seq      = false;
                for (int i = 0; i < args.Length; i++)
                {
                    if (args[i].Equals("-out", StringComparison.Ordinal))
                    {
                        outDir = args[++i];
                    }
                    else if (args[i].Equals("-num", StringComparison.Ordinal))
                    {
                        numParts = Convert.ToInt32(args[++i], CultureInfo.InvariantCulture);
                    }
                    else if (args[i].Equals("-seq", StringComparison.Ordinal))
                    {
                        seq = true;
                    }
                    else
                    {
                        DirectoryInfo file = new DirectoryInfo(args[i]);
                        if (!file.Exists)
                        {
                            Console.Error.WriteLine("Invalid input path - skipping: " + file);
                            continue;
                        }
                        using (Store.Directory dir = FSDirectory.Open(new DirectoryInfo(args[i])))
                        {
                            try
                            {
                                if (!DirectoryReader.IndexExists(dir))
                                {
                                    Console.Error.WriteLine("Invalid input index - skipping: " + file);
                                    continue;
                                }
                            }
                            catch (Exception)
                            {
                                Console.Error.WriteLine("Invalid input index - skipping: " + file);
                                continue;
                            }
                            indexes.Add(DirectoryReader.Open(dir));
                        }
                    }
                }
                if (outDir == null)
                {
                    throw new Exception("Required argument missing: -out outputDir");
                }
                if (numParts < 2)
                {
                    throw new Exception("Invalid value of required argument: -num numParts");
                }
                if (indexes.Count == 0)
                {
                    throw new Exception("No input indexes to process");
                }
                DirectoryInfo @out = new DirectoryInfo(outDir);
                @out.Create();
                if (!new DirectoryInfo(outDir).Exists)
                {
                    throw new Exception("Can't create output directory: " + @out);
                }
                Store.Directory[] dirs = new Store.Directory[numParts];
                try
                {
                    for (int i = 0; i < numParts; i++)
                    {
                        dirs[i] = FSDirectory.Open(new DirectoryInfo(Path.Combine(@out.FullName, "part-" + i)));
                    }
                    MultiPassIndexSplitter splitter = new MultiPassIndexSplitter();
                    IndexReader            input;
                    if (indexes.Count == 1)
                    {
                        input = indexes[0];
                    }
                    else
                    {
                        input = new MultiReader(indexes.ToArray());
                    }
#pragma warning disable 612, 618
                    splitter.Split(LuceneVersion.LUCENE_CURRENT, input, dirs, seq);
#pragma warning restore 612, 618
                }
                finally
                {
                    // LUCENENET specific - properly dispose directories to prevent resource leaks
                    IOUtils.Dispose(dirs);
                }
            }
            finally
            {
                // LUCENENET specific - properly dispose index readers to prevent resource leaks
                IOUtils.Dispose(indexes);
            }
        }
Ejemplo n.º 47
0
        // TODO: maybe this can reuse the logic of test dueling codecs?
        public static void AssertIndexEquals(DirectoryReader index1, DirectoryReader index2)
        {
            Assert.AreEqual(index1.NumDocs, index2.NumDocs, "IndexReaders have different values for numDocs.");
            Assert.AreEqual(index1.MaxDoc, index2.MaxDoc, "IndexReaders have different values for maxDoc.");
            Assert.AreEqual(index1.HasDeletions, index2.HasDeletions, "Only one IndexReader has deletions.");
            Assert.AreEqual(index1.Leaves.Count == 1, index2.Leaves.Count == 1, "Single segment test differs.");

            // check field names
            FieldInfos fieldInfos1 = MultiFields.GetMergedFieldInfos(index1);
            FieldInfos fieldInfos2 = MultiFields.GetMergedFieldInfos(index2);
            Assert.AreEqual(fieldInfos1.Size(), fieldInfos2.Size(), "IndexReaders have different numbers of fields.");
            int numFields = fieldInfos1.Size();
            for (int fieldID = 0; fieldID < numFields; fieldID++)
            {
                FieldInfo fieldInfo1 = fieldInfos1.FieldInfo(fieldID);
                FieldInfo fieldInfo2 = fieldInfos2.FieldInfo(fieldID);
                Assert.AreEqual(fieldInfo1.Name, fieldInfo2.Name, "Different field names.");
            }

            // check norms
            foreach (FieldInfo fieldInfo in fieldInfos1)
            {
                string curField = fieldInfo.Name;
                NumericDocValues norms1 = MultiDocValues.GetNormValues(index1, curField);
                NumericDocValues norms2 = MultiDocValues.GetNormValues(index2, curField);
                if (norms1 != null && norms2 != null)
                {
                    // todo: generalize this (like TestDuelingCodecs assert)
                    for (int i = 0; i < index1.MaxDoc; i++)
                    {
                        Assert.AreEqual(norms1.Get(i), norms2.Get(i), "Norm different for doc " + i + " and field '" + curField + "'.");
                    }
                }
                else
                {
                    Assert.IsNull(norms1);
                    Assert.IsNull(norms2);
                }
            }

            // check deletions
            Bits liveDocs1 = MultiFields.GetLiveDocs(index1);
            Bits liveDocs2 = MultiFields.GetLiveDocs(index2);
            for (int i = 0; i < index1.MaxDoc; i++)
            {
                Assert.AreEqual(liveDocs1 == null || !liveDocs1.Get(i), liveDocs2 == null || !liveDocs2.Get(i), "Doc " + i + " only deleted in one index.");
            }

            // check stored fields
            for (int i = 0; i < index1.MaxDoc; i++)
            {
                if (liveDocs1 == null || liveDocs1.Get(i))
                {
                    Document doc1 = index1.Document(i);
                    Document doc2 = index2.Document(i);
                    IList<IndexableField> field1 = doc1.Fields;
                    IList<IndexableField> field2 = doc2.Fields;
                    Assert.AreEqual(field1.Count, field2.Count, "Different numbers of fields for doc " + i + ".");
                    IEnumerator<IndexableField> itField1 = field1.GetEnumerator();
                    IEnumerator<IndexableField> itField2 = field2.GetEnumerator();
                    while (itField1.MoveNext())
                    {
                        Field curField1 = (Field)itField1.Current;
                        itField2.MoveNext();
                        Field curField2 = (Field)itField2.Current;
                        Assert.AreEqual(curField1.Name(), curField2.Name(), "Different fields names for doc " + i + ".");
                        Assert.AreEqual(curField1.StringValue, curField2.StringValue, "Different field values for doc " + i + ".");
                    }
                }
            }

            // check dictionary and posting lists
            Fields fields1 = MultiFields.GetFields(index1);
            Fields fields2 = MultiFields.GetFields(index2);
            IEnumerator<string> fenum2 = fields2.GetEnumerator();
            Bits liveDocs = MultiFields.GetLiveDocs(index1);
            foreach (string field1 in fields1)
            {
                fenum2.MoveNext();
                Assert.AreEqual(field1, fenum2.Current, "Different fields");
                Terms terms1 = fields1.Terms(field1);
                if (terms1 == null)
                {
                    Assert.IsNull(fields2.Terms(field1));
                    continue;
                }
                TermsEnum enum1 = terms1.Iterator(null);

                Terms terms2 = fields2.Terms(field1);
                Assert.IsNotNull(terms2);
                TermsEnum enum2 = terms2.Iterator(null);

                while (enum1.Next() != null)
                {
                    Assert.AreEqual(enum1.Term(), enum2.Next(), "Different terms");
                    DocsAndPositionsEnum tp1 = enum1.DocsAndPositions(liveDocs, null);
                    DocsAndPositionsEnum tp2 = enum2.DocsAndPositions(liveDocs, null);

                    while (tp1.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
                    {
                        Assert.IsTrue(tp2.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
                        Assert.AreEqual(tp1.DocID(), tp2.DocID(), "Different doc id in postinglist of term " + enum1.Term() + ".");
                        Assert.AreEqual(tp1.Freq(), tp2.Freq(), "Different term frequence in postinglist of term " + enum1.Term() + ".");
                        for (int i = 0; i < tp1.Freq(); i++)
                        {
                            Assert.AreEqual(tp1.NextPosition(), tp2.NextPosition(), "Different positions in postinglist of term " + enum1.Term() + ".");
                        }
                    }
                }
            }
            Assert.IsFalse(fenum2.MoveNext());
        }
            public override void Run()
            {
                DirectoryReader currentReader = null;
                Random          random        = LuceneTestCase.Random;

                try
                {
                    Document doc = new Document();
                    doc.Add(new TextField("id", "1", Field.Store.NO));
                    Writer.AddDocument(doc);
                    Holder.Reader = currentReader = Writer.GetReader(true);
                    Term term = new Term("id");
                    for (int i = 0; i < NumOps && !Holder.Stop; i++)
                    {
                        float nextOp = (float)random.NextDouble();
                        if (nextOp < 0.3)
                        {
                            term.Set("id", new BytesRef("1"));
                            Writer.UpdateDocument(term, doc);
                        }
                        else if (nextOp < 0.5)
                        {
                            Writer.AddDocument(doc);
                        }
                        else
                        {
                            term.Set("id", new BytesRef("1"));
                            Writer.DeleteDocuments(term);
                        }
                        if (Holder.Reader != currentReader)
                        {
                            Holder.Reader = currentReader;
                            if (Countdown)
                            {
                                Countdown = false;
                                Latch.Signal();
                            }
                        }
                        if (random.NextBoolean())
                        {
                            Writer.Commit();
                            DirectoryReader newReader = DirectoryReader.OpenIfChanged(currentReader);
                            if (newReader != null)
                            {
                                currentReader.DecRef();
                                currentReader = newReader;
                            }
                            if (currentReader.NumDocs == 0)
                            {
                                Writer.AddDocument(doc);
                            }
                        }
                    }
                }
                catch (Exception e)
                {
                    Failed = e;
                }
                finally
                {
                    Holder.Reader = null;
                    if (Countdown)
                    {
                        Latch.Signal();
                    }
                    if (currentReader != null)
                    {
                        try
                        {
                            currentReader.DecRef();
                        }
#pragma warning disable 168
                        catch (IOException e)
#pragma warning restore 168
                        {
                        }
                    }
                }
                if (VERBOSE)
                {
                    Console.WriteLine("writer stopped - forced by reader: " + Holder.Stop);
                }
            }
Ejemplo n.º 49
0
 private DirectoryReader DoReopen(SegmentInfos infos, bool doClone, bool openReadOnly)
 {
     lock (this)
     {
         DirectoryReader reader;
         if (openReadOnly)
         {
             reader = new ReadOnlyDirectoryReader(internalDirectory, infos, subReaders, starts, normsCache, doClone, termInfosIndexDivisor);
         }
         else
         {
             reader = new DirectoryReader(internalDirectory, infos, subReaders, starts, normsCache, false, doClone, termInfosIndexDivisor);
         }
         return reader;
     }
 }
Ejemplo n.º 50
0
 /// <summary>
 /// Create a new <see cref="FilterDirectoryReader"/> that filters a passed in <see cref="DirectoryReader"/>,
 /// using the supplied <see cref="SubReaderWrapper"/> to wrap its subreader. </summary>
 /// <param name="input"> the <see cref="DirectoryReader"/> to filter </param>
 /// <param name="wrapper"> the <see cref="SubReaderWrapper"/> to use to wrap subreaders </param>
 public FilterDirectoryReader(DirectoryReader input, SubReaderWrapper wrapper)
     : base(input.Directory, wrapper.Wrap(input.GetSequentialSubReaders().OfType <AtomicReader>().ToList()))
 {
     this.m_input = input;
 }
Ejemplo n.º 51
0
 internal AnonymousClassFindSegmentsFile1(bool openReadOnly, DirectoryReader enclosingInstance, Lucene.Net.Store.Directory Param1):base(Param1)
 {
     InitBlock(openReadOnly, enclosingInstance);
 }
Ejemplo n.º 52
0
 /// <summary>
 /// Called by the <see cref="DoOpenIfChanged()"/> methods to return a new wrapped <see cref="DirectoryReader"/>.
 /// <para/>
 /// Implementations should just return an instance of themselves, wrapping the
 /// passed in <see cref="DirectoryReader"/>.
 /// </summary>
 /// <param name="input"> the <see cref="DirectoryReader"/> to wrap </param>
 /// <returns> the wrapped <see cref="DirectoryReader"/> </returns>
 protected abstract DirectoryReader DoWrapDirectoryReader(DirectoryReader input);
Ejemplo n.º 53
0
 /// <summary>
 /// Called by the doOpenIfChanged() methods to return a new wrapped DirectoryReader.
 ///
 /// Implementations should just return an instantiation of themselves, wrapping the
 /// passed in DirectoryReader.
 /// </summary>
 /// <param name="in"> the DirectoryReader to wrap </param>
 /// <returns> the wrapped DirectoryReader </returns>
 protected internal abstract DirectoryReader DoWrapDirectoryReader(DirectoryReader @in);
Ejemplo n.º 54
0
 private DirectoryReader WrapDirectoryReader(DirectoryReader input)
 {
     return(input == null ? null : DoWrapDirectoryReader(input));
 }
Ejemplo n.º 55
0
 /// <summary>
 /// Create a new FilterDirectoryReader that filters a passed in DirectoryReader. </summary>
 /// <param name="in"> the DirectoryReader to filter </param>
 public FilterDirectoryReader(DirectoryReader @in)
     : this(@in, new StandardReaderWrapper())
 {
 }
Ejemplo n.º 56
0
 /// <summary>
 /// Create a new <see cref="FilterDirectoryReader"/> that filters a passed in <see cref="DirectoryReader"/>. </summary>
 /// <param name="input"> the <see cref="DirectoryReader"/> to filter </param>
 public FilterDirectoryReader(DirectoryReader input)
     : this(input, new StandardReaderWrapper())
 {
 }
Ejemplo n.º 57
0
 /// <summary>
 /// Some tests expect the directory to contain a single segment, and want to
 /// do tests on that segment's reader. this is an utility method to help them.
 /// </summary>
 public static SegmentReader GetOnlySegmentReader(DirectoryReader reader)
 {
     IList<AtomicReaderContext> subReaders = reader.Leaves;
     if (subReaders.Count != 1)
     {
         throw new System.ArgumentException(reader + " has " + subReaders.Count + " segments instead of exactly one");
     }
     AtomicReader r = (AtomicReader)subReaders[0].Reader;
     Assert.IsTrue(r is SegmentReader);
     return (SegmentReader)r;
 }
Ejemplo n.º 58
0
        public virtual void TestSortedSetWithDups()
        {
            AssumeTrue("codec does not support SORTED_SET", DefaultCodecSupportsSortedSet());
            Directory dir = NewDirectory();

            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);

            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);

            int numDocs = AtLeast(500);

            for (int i = 0; i < numDocs; i++)
            {
                Document doc       = new Document();
                int      numValues = Random().Next(5);
                for (int j = 0; j < numValues; j++)
                {
                    doc.Add(new SortedSetDocValuesField("bytes", new BytesRef(TestUtil.RandomSimpleString(Random(), 2))));
                }
                iw.AddDocument(doc);
                if (Random().Next(17) == 0)
                {
                    iw.Commit();
                }
            }
            DirectoryReader ir = iw.Reader;

            iw.ForceMerge(1);
            DirectoryReader ir2    = iw.Reader;
            AtomicReader    merged = GetOnlySegmentReader(ir2);

            iw.Dispose();

            SortedSetDocValues multi  = MultiDocValues.GetSortedSetValues(ir, "bytes");
            SortedSetDocValues single = merged.GetSortedSetDocValues("bytes");

            if (multi == null)
            {
                Assert.IsNull(single);
            }
            else
            {
                Assert.AreEqual(single.ValueCount, multi.ValueCount);
                BytesRef actual   = new BytesRef();
                BytesRef expected = new BytesRef();
                // check values
                for (long i = 0; i < single.ValueCount; i++)
                {
                    single.LookupOrd(i, expected);
                    multi.LookupOrd(i, actual);
                    Assert.AreEqual(expected, actual);
                }
                // check ord list
                for (int i = 0; i < numDocs; i++)
                {
                    single.Document = i;
                    List <long?> expectedList = new List <long?>();
                    long         ord;
                    while ((ord = single.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
                    {
                        expectedList.Add(ord);
                    }

                    multi.Document = i;
                    int upto = 0;
                    while ((ord = multi.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
                    {
                        Assert.AreEqual((long)expectedList[upto], ord);
                        upto++;
                    }
                    Assert.AreEqual(expectedList.Count, upto);
                }
            }

            ir.Dispose();
            ir2.Dispose();
            dir.Dispose();
        }