Example #1
0
        public void SortOrder_With_MultiIndexReader_Test()
        {
            var dir2 = new RAMDirectory();

            var indexWriter2 = new IndexWriter(dir2, _analyzer, IndexWriter.MaxFieldLength.UNLIMITED);

            indexWriter2.AddDocument(CreateDocument(100, "Anders", 0));
            indexWriter2.Commit();


            var mr = new MultiReader(new[]
            {
                IndexReader.Open(_dir, true),
                IndexReader.Open(dir2, true)
            });
            var searcher = new IndexSearcher(mr);

            var sortOrder = new[] { 20, 10, 100, 30, 40 };
            var result    = Search(searcher, _query, sortOrder);

            CollectionAssert.AreEqual(sortOrder, result);

            searcher.Dispose();
            indexWriter2.Dispose();
            dir2.Dispose();
        }
        public override void BeforeClass()
        {
            base.BeforeClass();

            Dir   = NewDirectory();
            Sdir1 = NewDirectory();
            Sdir2 = NewDirectory();
            RandomIndexWriter writer   = new RandomIndexWriter(Random, Dir, new MockAnalyzer(Random), Similarity, TimeZone);
            RandomIndexWriter swriter1 = new RandomIndexWriter(Random, Sdir1, new MockAnalyzer(Random), Similarity, TimeZone);
            RandomIndexWriter swriter2 = new RandomIndexWriter(Random, Sdir2, new MockAnalyzer(Random), Similarity, TimeZone);

            for (int i = 0; i < 10; i++)
            {
                Document doc = new Document();
                doc.Add(NewStringField("data", Convert.ToString(i), Field.Store.NO));
                writer.AddDocument(doc);
                ((i % 2 == 0) ? swriter1 : swriter2).AddDocument(doc);
            }
            writer.ForceMerge(1);
            swriter1.ForceMerge(1);
            swriter2.ForceMerge(1);
            writer.Dispose();
            swriter1.Dispose();
            swriter2.Dispose();

            Reader   = DirectoryReader.Open(Dir);
            Searcher = NewSearcher(Reader);

            MultiReader   = new MultiReader(new IndexReader[] { DirectoryReader.Open(Sdir1), DirectoryReader.Open(Sdir2) }, true);
            MultiSearcher = NewSearcher(MultiReader);

            MultiReaderDupls   = new MultiReader(new IndexReader[] { DirectoryReader.Open(Sdir1), DirectoryReader.Open(Dir) }, true);
            MultiSearcherDupls = NewSearcher(MultiReaderDupls);
        }
Example #3
0
 /// <summary>
 /// Basic Searcher in a Lucene index for CK.Monitoring log.
 /// </summary>
 /// <param name="multiReader"></param>
 public LuceneSearcher(MultiReader multiReader)
 {
     _indexSearcher = new IndexSearcher(multiReader);
     _sort          = new Sort(
         SortField.FIELD_SCORE,
         new SortField(LogField.LOG_TIME, SortFieldType.STRING));
 }
        public void BeforeClass()
        {
            Dir = NewDirectory();
            Sdir1 = NewDirectory();
            Sdir2 = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, new MockAnalyzer(Random()), Similarity, TimeZone);
            RandomIndexWriter swriter1 = new RandomIndexWriter(Random(), Sdir1, new MockAnalyzer(Random()), Similarity, TimeZone);
            RandomIndexWriter swriter2 = new RandomIndexWriter(Random(), Sdir2, new MockAnalyzer(Random()), Similarity, TimeZone);

            for (int i = 0; i < 10; i++)
            {
                Document doc = new Document();
                doc.Add(NewStringField("data", Convert.ToString(i), Field.Store.NO));
                writer.AddDocument(doc);
                ((i % 2 == 0) ? swriter1 : swriter2).AddDocument(doc);
            }
            writer.ForceMerge(1);
            swriter1.ForceMerge(1);
            swriter2.ForceMerge(1);
            writer.Dispose();
            swriter1.Dispose();
            swriter2.Dispose();

            Reader = DirectoryReader.Open(Dir);
            Searcher = NewSearcher(Reader);

            MultiReader = new MultiReader(new IndexReader[] { DirectoryReader.Open(Sdir1), DirectoryReader.Open(Sdir2) }, true);
            MultiSearcher = NewSearcher(MultiReader);

            MultiReaderDupls = new MultiReader(new IndexReader[] { DirectoryReader.Open(Sdir1), DirectoryReader.Open(Dir) }, true);
            MultiSearcherDupls = NewSearcher(MultiReaderDupls);
        }
Example #5
0
        public void TestMissingField()
        {
            string            fieldName = "field1";
            Directory         rd1       = NewDirectory();
            RandomIndexWriter w1        = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, rd1);
            Document doc = new Document();

            doc.Add(NewStringField(fieldName, "content1", Field.Store.YES));
            w1.AddDocument(doc);
            IndexReader reader1 = w1.GetReader();

            w1.Dispose();

            fieldName = "field2";
            Directory         rd2 = NewDirectory();
            RandomIndexWriter w2  = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, rd2);

            doc = new Document();
            doc.Add(NewStringField(fieldName, "content2", Field.Store.YES));
            w2.AddDocument(doc);
            IndexReader reader2 = w2.GetReader();

            w2.Dispose();

            TermsFilter tf    = new TermsFilter(new Term(fieldName, "content1"));
            MultiReader multi = new MultiReader(reader1, reader2);

            foreach (AtomicReaderContext context in multi.Leaves)
            {
                DocIdSet docIdSet = tf.GetDocIdSet(context, context.AtomicReader.LiveDocs);
                if (context.Reader.DocFreq(new Term(fieldName, "content1")) == 0)
                {
                    assertNull(docIdSet);
                }
                else
                {
                    FixedBitSet bits = (FixedBitSet)docIdSet;
                    assertTrue("Must be >= 0", bits.Cardinality >= 0);
                }
            }
            multi.Dispose();
            reader1.Dispose();
            reader2.Dispose();
            rd1.Dispose();
            rd2.Dispose();
        }
Example #6
0
    public LuceneMultiConnection(params string[] indexNames)
    {
        readers = new IndexReader[indexNames.Length];

        for (int i = 0; i < indexNames.Length; i++)
        {
            readers[i] = LuceneTool.GetIndexReader(indexNames[i]);
        }
        multiReader   = new MultiReader(readers, false); //不关闭Readers
        indexSearcher = new IndexSearcher(multiReader);
    }
        //// Stored Procedure Calls

        public static async Task <List <T> > ExecuteStoredProcedure <T>(this SqlConnection dbConnection, string sqlStatement, params SqlParameter[] sqlParameters)
        {
            List <T> _lstObjects;

            using (MultiReader _multiReader = await dbConnection.ExecuteMultiResultStoredProcedure(sqlStatement, sqlParameters))
            {
                _lstObjects = _multiReader.Read <T>();
            }

            return(_lstObjects);
        }
Example #8
0
        public virtual void TestDeMorgan()
        {
            Directory         dir1 = NewDirectory();
            RandomIndexWriter iw1  = new RandomIndexWriter(Random(), dir1, Similarity, TimeZone);
            Document          doc1 = new Document();

            doc1.Add(NewTextField("field", "foo bar", Field.Store.NO));
            iw1.AddDocument(doc1);
            IndexReader reader1 = iw1.Reader;

            iw1.Dispose();

            Directory         dir2 = NewDirectory();
            RandomIndexWriter iw2  = new RandomIndexWriter(Random(), dir2, Similarity, TimeZone);
            Document          doc2 = new Document();

            doc2.Add(NewTextField("field", "foo baz", Field.Store.NO));
            iw2.AddDocument(doc2);
            IndexReader reader2 = iw2.Reader;

            iw2.Dispose();

            BooleanQuery query = new BooleanQuery(); // Query: +foo -ba*

            query.Add(new TermQuery(new Term("field", "foo")), Occur.MUST);
            WildcardQuery wildcardQuery = new WildcardQuery(new Term("field", "ba*"));

            wildcardQuery.MultiTermRewriteMethod = (MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
            query.Add(wildcardQuery, Occur.MUST_NOT);

            MultiReader   multireader = new MultiReader(reader1, reader2);
            IndexSearcher searcher    = NewSearcher(multireader);

            Assert.AreEqual(0, searcher.Search(query, 10).TotalHits);


            Task foo = new Task(TestDeMorgan);

            TaskScheduler es = TaskScheduler.Default;

            searcher = new IndexSearcher(multireader, es);
            if (VERBOSE)
            {
                Console.WriteLine("rewritten form: " + searcher.Rewrite(query));
            }
            Assert.AreEqual(0, searcher.Search(query, 10).TotalHits);

            multireader.Dispose();
            reader1.Dispose();
            reader2.Dispose();
            dir1.Dispose();
            dir2.Dispose();
        }
 public static void AfterClass()
 {
     Reader.Dispose();
     MultiReader.Dispose();
     MultiReaderDupls.Dispose();
     Dir.Dispose();
     Sdir1.Dispose();
     Sdir2.Dispose();
     Reader   = MultiReader = MultiReaderDupls = null;
     Searcher = MultiSearcher = MultiSearcherDupls = null;
     Dir      = Sdir1 = Sdir2 = null;
 }
 public override void AfterClass()
 {
     Reader.Dispose();
     MultiReader.Dispose();
     MultiReaderDupls.Dispose();
     Dir.Dispose();
     Sdir1.Dispose();
     Sdir2.Dispose();
     Reader   = MultiReader = MultiReaderDupls = null;
     Searcher = MultiSearcher = MultiSearcherDupls = null;
     Dir      = Sdir1 = Sdir2 = null;
     base.AfterClass();
 }
Example #11
0
        public virtual void TestMultiTermDocs()
        {
            SqlServerDirectory.ProvisionDatabase(Connection, "test1", true);
            SqlServerDirectory.ProvisionDatabase(Connection, "test2", true);
            SqlServerDirectory.ProvisionDatabase(Connection, "test3", true);

            var ramDir1 = new SqlServerDirectory(Connection, new Options()
            {
                SchemaName = "test1"
            });

            AddDoc(ramDir1, "test foo", true);
            var ramDir2 = new SqlServerDirectory(Connection, new Options()
            {
                SchemaName = "test2"
            });

            AddDoc(ramDir2, "test blah", true);
            var ramDir3 = new SqlServerDirectory(Connection, new Options()
            {
                SchemaName = "test3"
            });

            AddDoc(ramDir3, "test wow", true);

            IndexReader[] readers1 = new[] { IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir3, false) };
            IndexReader[] readers2 = new[] { IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir2, false), IndexReader.Open(ramDir3, false) };
            MultiReader   mr2      = new MultiReader(readers1);
            MultiReader   mr3      = new MultiReader(readers2);

            // test mixing up TermDocs and TermEnums from different readers.
            TermDocs td2 = mr2.TermDocs();
            TermEnum te3 = mr3.Terms(new Term("body", "wow"));

            td2.Seek(te3);
            int ret = 0;

            // This should blow up if we forget to check that the TermEnum is from the same
            // reader as the TermDocs.
            while (td2.Next())
            {
                ret += td2.Doc;
            }
            td2.Close();
            te3.Close();

            // really a dummy assert to ensure that we got some docs and to ensure that
            // nothing is optimized out.
            Assert.IsTrue(ret > 0);
        }
Example #12
0
        internal static readonly IndexReader[] EmptyReaders = null;// = new IndexReader[8];

        static QueryUtils()
        {
            EmptyReaders = new IndexReader[8];
            try
            {
                EmptyReaders[0] = new MultiReader();
                EmptyReaders[4] = MakeEmptyIndex(new Random(0), 4);
                EmptyReaders[5] = MakeEmptyIndex(new Random(0), 5);
                EmptyReaders[7] = MakeEmptyIndex(new Random(0), 7);
            }
            catch (IOException ex)
            {
                throw new Exception(ex.Message, ex);
            }
        }
Example #13
0
        private static IndexReader[] LoadEmptyReaders() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
        {
            var emptyReaders = new IndexReader[8];

            try
            {
                emptyReaders[0] = new MultiReader();
                emptyReaders[4] = MakeEmptyIndex(new J2N.Randomizer(0), 4);
                emptyReaders[5] = MakeEmptyIndex(new J2N.Randomizer(0), 5);
                emptyReaders[7] = MakeEmptyIndex(new J2N.Randomizer(0), 7);
            }
            catch (Exception ex) when(ex.IsIOException())
            {
                throw RuntimeException.Create(ex);
            }
            return(emptyReaders);
        }
Example #14
0
        private static IndexReader[] LoadEmptyReaders() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
        {
            var emptyReaders = new IndexReader[8];

            try
            {
                emptyReaders[0] = new MultiReader();
                emptyReaders[4] = MakeEmptyIndex(new Random(0), 4);
                emptyReaders[5] = MakeEmptyIndex(new Random(0), 5);
                emptyReaders[7] = MakeEmptyIndex(new Random(0), 7);
            }
            catch (IOException ex)
            {
                throw new Exception(ex.ToString(), ex);
            }
            return(emptyReaders);
        }
Example #15
0
        public override void BeforeClass()
        {
            base.BeforeClass();

            dir   = NewDirectory();
            sdir1 = NewDirectory();
            sdir2 = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir, new MockAnalyzer(Random));
            RandomIndexWriter swriter1 = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, sdir1, new MockAnalyzer(Random));
            RandomIndexWriter swriter2 = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, sdir2, new MockAnalyzer(Random));

            for (int i = 0; i < 10; i++)
            {
                Document doc = new Document();
                doc.Add(NewStringField("data", Convert.ToString(i), Field.Store.NO));
                writer.AddDocument(doc);
                ((i % 2 == 0) ? swriter1 : swriter2).AddDocument(doc);
            }
            writer.ForceMerge(1);
            swriter1.ForceMerge(1);
            swriter2.ForceMerge(1);
            writer.Dispose();
            swriter1.Dispose();
            swriter2.Dispose();

            reader   = DirectoryReader.Open(dir);
            searcher = NewSearcher(reader);

            multiReader   = new MultiReader(new IndexReader[] { DirectoryReader.Open(sdir1), DirectoryReader.Open(sdir2) }, true);
            multiSearcher = NewSearcher(multiReader);

            multiReaderDupls   = new MultiReader(new IndexReader[] { DirectoryReader.Open(sdir1), DirectoryReader.Open(dir) }, true);
            multiSearcherDupls = NewSearcher(multiReaderDupls);
        }
Example #16
0
        public void TestTieBreaker()
        {
            Directory         directory = NewDirectory();
            RandomIndexWriter writer    = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, directory);

            addDoc("a123456", writer);
            addDoc("c123456", writer);
            addDoc("d123456", writer);
            addDoc("e123456", writer);

            Directory         directory2 = NewDirectory();
            RandomIndexWriter writer2    = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, directory2);

            addDoc("a123456", writer2);
            addDoc("b123456", writer2);
            addDoc("b123456", writer2);
            addDoc("b123456", writer2);
            addDoc("c123456", writer2);
            addDoc("f123456", writer2);

            IndexReader ir1 = writer.GetReader();
            IndexReader ir2 = writer2.GetReader();

            MultiReader    mr       = new MultiReader(ir1, ir2);
            IndexSearcher  searcher = NewSearcher(mr);
            SlowFuzzyQuery fq       = new SlowFuzzyQuery(new Term("field", "z123456"), 1f, 0, 2);
            TopDocs        docs     = searcher.Search(fq, 2);

            assertEquals(5, docs.TotalHits); // 5 docs, from the a and b's
            mr.Dispose();
            ir1.Dispose();
            ir2.Dispose();
            writer.Dispose();
            writer2.Dispose();
            directory.Dispose();
            directory2.Dispose();
        }
Example #17
0
 public override IList <AtomicReader> GetMergeReaders()
 {
     if (unsortedReaders == null)
     {
         unsortedReaders = base.GetMergeReaders();
         AtomicReader atomicView;
         if (unsortedReaders.Count == 1)
         {
             atomicView = unsortedReaders[0];
         }
         else
         {
             IndexReader multiReader = new MultiReader(unsortedReaders.ToArray());
             atomicView = SlowCompositeReaderWrapper.Wrap(multiReader);
         }
         docMap     = outerInstance.sorter.Sort(atomicView);
         sortedView = SortingAtomicReader.Wrap(atomicView, docMap);
     }
     // a null doc map means that the readers are already sorted
     return(docMap == null ? unsortedReaders : new List <AtomicReader>(new AtomicReader[] { sortedView }));
 }
Example #18
0
        public virtual void TestTieBreaker()
        {
            Directory         directory = NewDirectory();
            RandomIndexWriter writer    = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);

            AddDoc("a123456", writer);
            AddDoc("c123456", writer);
            AddDoc("d123456", writer);
            AddDoc("e123456", writer);

            Directory         directory2 = NewDirectory();
            RandomIndexWriter writer2    = new RandomIndexWriter(Random(), directory2, Similarity, TimeZone);

            AddDoc("a123456", writer2);
            AddDoc("b123456", writer2);
            AddDoc("b123456", writer2);
            AddDoc("b123456", writer2);
            AddDoc("c123456", writer2);
            AddDoc("f123456", writer2);

            IndexReader ir1 = writer.Reader;
            IndexReader ir2 = writer2.Reader;

            MultiReader   mr       = new MultiReader(ir1, ir2);
            IndexSearcher searcher = NewSearcher(mr);
            FuzzyQuery    fq       = new FuzzyQuery(new Term("field", "z123456"), 1, 0, 2, false);
            TopDocs       docs     = searcher.Search(fq, 2);

            Assert.AreEqual(5, docs.TotalHits); // 5 docs, from the a and b's
            mr.Dispose();
            ir1.Dispose();
            ir2.Dispose();
            writer.Dispose();
            writer2.Dispose();
            directory.Dispose();
            directory2.Dispose();
        }
Example #19
0
        public void MultiReaderCanDeserializeDifferentClasses()
        {
            var mr = new MultiReader <MyClass>(
                reader =>
            {
                switch ((string)reader["Type"])
                {
                default:
                case "a":
                    return(OneToOne <MyClassA> .Records);

                case "b":
                    return(OneToOne <MyClassB> .Records);
                }
            });

            var results = Connection().QuerySql("SELECT [Type]='a', A=1, B=NULL UNION SELECT [Type]='b', A=NULL, B=2", Parameters.Empty, Query.Returns(mr));

            Assert.AreEqual(2, results.Count);
            Assert.IsTrue(results[0] is MyClassA);
            Assert.AreEqual(1, ((MyClassA)results[0]).A);
            Assert.IsTrue(results[1] is MyClassB);
            Assert.AreEqual(2, ((MyClassB)results[1]).B);
        }
Example #20
0
        public virtual void _testTermVectors()
        {
            MultiReader reader = new MultiReader(Readers);

            Assert.IsTrue(reader != null);
        }
Example #21
0
        internal static readonly IndexReader[] EmptyReaders = null;// = new IndexReader[8];

        static QueryUtils()
        {
            EmptyReaders = new IndexReader[8];
            try
            {
                EmptyReaders[0] = new MultiReader();
                EmptyReaders[4] = MakeEmptyIndex(new Random(0), 4);
                EmptyReaders[5] = MakeEmptyIndex(new Random(0), 5);
                EmptyReaders[7] = MakeEmptyIndex(new Random(0), 7);
            }
            catch (IOException ex)
            {
                throw new Exception(ex.Message, ex);
            }
        }
        public virtual void TestDeMorgan()
        {
            Directory dir1 = NewDirectory();
            RandomIndexWriter iw1 = new RandomIndexWriter(Random(), dir1, Similarity, TimeZone);
            Document doc1 = new Document();
            doc1.Add(NewTextField("field", "foo bar", Field.Store.NO));
            iw1.AddDocument(doc1);
            IndexReader reader1 = iw1.Reader;
            iw1.Dispose();

            Directory dir2 = NewDirectory();
            RandomIndexWriter iw2 = new RandomIndexWriter(Random(), dir2, Similarity, TimeZone);
            Document doc2 = new Document();
            doc2.Add(NewTextField("field", "foo baz", Field.Store.NO));
            iw2.AddDocument(doc2);
            IndexReader reader2 = iw2.Reader;
            iw2.Dispose();

            BooleanQuery query = new BooleanQuery(); // Query: +foo -ba*
            query.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.MUST);
            WildcardQuery wildcardQuery = new WildcardQuery(new Term("field", "ba*"));
            wildcardQuery.SetRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
            query.Add(wildcardQuery, BooleanClause.Occur.MUST_NOT);

            MultiReader multireader = new MultiReader(reader1, reader2);
            IndexSearcher searcher = NewSearcher(multireader);
            Assert.AreEqual(0, searcher.Search(query, 10).TotalHits);


            Task foo = new Task(TestDeMorgan);

            TaskScheduler es = TaskScheduler.Default;
            searcher = new IndexSearcher(multireader, es);
            if (VERBOSE)
            {
                Console.WriteLine("rewritten form: " + searcher.Rewrite(query));
            }
            Assert.AreEqual(0, searcher.Search(query, 10).TotalHits);

            multireader.Dispose();
            reader1.Dispose();
            reader2.Dispose();
            dir1.Dispose();
            dir2.Dispose();
        }
Example #23
0
        public IEnumerable <IHit> Query(int pageIndex, int pageSize, out int totalCount, out IEnumerable <FacetGroup> facetedResults)
        {
            totalCount     = 0;
            facetedResults = null;

            if (searchPaths == null || searchPaths.Count <= 0)
            {
                searchPaths.AddRange(indexPaths.Values.Select(o => o.Path));
            }

            List <LuceneHit> results = new List <LuceneHit>();

            List <IndexSearcher> subSearchs = new List <IndexSearcher>();

            searchPaths.ForEach(o => subSearchs.Add(new IndexSearcher(FSDirectory.Open(o))));

            if (facetFields != null && facetFields.Count > 0)
            {
                var         facetGroups     = new List <FacetGroup>();
                var         mainQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(query));
                MultiReader readers         = new MultiReader(subSearchs.Select(o => o.IndexReader).ToArray());

                foreach (var facetField in facetFields)
                {
                    FacetGroup fg = new FacetGroup();
                    fg.FieldName = facetFieldNameProvider.GetMapName(TypeName, facetField);
                    var items = new List <FacetItem>();

                    var allDistinctField = FieldCache_Fields.DEFAULT.GetStrings(readers, facetField).Distinct().ToArray();
                    int totalHits        = 0;

                    Parallel.ForEach(allDistinctField, fieldValue =>
                    {
                        //foreach (var fieldValue in allDistinctField)
                        //{
                        var facetQuery       = new TermQuery(new Term(facetField, fieldValue));
                        var facetQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(facetQuery));

                        var bs = new OpenBitSetDISI(facetQueryFilter.GetDocIdSet(readers).Iterator(), readers.MaxDoc);
                        bs.InPlaceAnd(mainQueryFilter.GetDocIdSet(readers).Iterator());
                        int count = (Int32)bs.Cardinality();

                        FacetItem item  = new FacetItem();
                        item.GroupValue = fieldValue;
                        item.Count      = count;

                        items.Add(item);
                        totalHits += count;
                    }
                                     );

                    fg.FacetItems = items.OrderByDescending(o => o.Count);
                    fg.TotalHits  = totalHits;

                    facetGroups.Add(fg);
                }

                facetedResults = facetGroups.OrderBy(o => o.FieldName);
            }
            ParallelMultiSearcher searcher = new ParallelMultiSearcher(subSearchs.ToArray());
            Sort sort = null;

            if (sortFields != null && sortFields.Count > 0)
            {
                sort = new Sort(sortFields.ToArray());
            }

            int maxDoc     = searcher.MaxDoc;
            int startIndex = 0;

            if (pageIndex >= 0 && pageSize > 0)
            {
                startIndex = pageIndex * pageSize;
                maxDoc     = pageSize * (pageIndex + 1);
            }
            var docs = sort == null?searcher.Search(query, null, maxDoc) : searcher.Search(query, null, maxDoc, sort);

            totalCount = docs.TotalHits;
            int endIndex = docs.TotalHits - startIndex;

            for (int i = startIndex; i < endIndex; i++)
            {
                LuceneHit h = new LuceneHit(TypeName, DocumentBuilder, searcher.Doc(docs.ScoreDocs[i].Doc));
                results.Add(h);
            }
            return(results);
        }
		public void MultiReaderCanDeserializeDifferentClasses()
		{
			var mr = new MultiReader<MyClass>(
				reader =>
				{
					switch ((string)reader["Type"])
					{
						default:
						case "a":
							return OneToOne<MyClassA>.Records;
						case "b":
							return OneToOne<MyClassB>.Records;
					}
				});

			var results = Connection().QuerySql("SELECT [Type]='a', A=1, B=NULL UNION SELECT [Type]='b', A=NULL, B=2", Parameters.Empty, Query.Returns(mr));

			Assert.AreEqual(2, results.Count);
			Assert.IsTrue(results[0] is MyClassA);
			Assert.AreEqual(1, ((MyClassA)results[0]).A);
			Assert.IsTrue(results[1] is MyClassB);
			Assert.AreEqual(2, ((MyClassB)results[1]).B);
		}
Example #25
0
        public virtual void TestSimple()
        {
            int numNodes = TestUtil.NextInt(Random(), 1, 10);

            double runTimeSec = AtLeast(3);

            int minDocsToMakeTerms = TestUtil.NextInt(Random(), 5, 20);

            int maxSearcherAgeSeconds = TestUtil.NextInt(Random(), 1, 3);

            if (VERBOSE)
            {
                Console.WriteLine("TEST: numNodes=" + numNodes + " runTimeSec=" + runTimeSec + " maxSearcherAgeSeconds=" + maxSearcherAgeSeconds);
            }

            Start(numNodes, runTimeSec, maxSearcherAgeSeconds);

            List <PreviousSearchState> priorSearches = new List <PreviousSearchState>();
            List <BytesRef>            terms         = null;

            while (Time.NanoTime() < endTimeNanos)
            {
                bool doFollowon = priorSearches.Count > 0 && Random().Next(7) == 1;

                // Pick a random node; we will run the query on this node:
                int myNodeID = Random().Next(numNodes);

                NodeState.ShardIndexSearcher localShardSearcher;

                PreviousSearchState prevSearchState;

                if (doFollowon)
                {
                    // Pretend user issued a followon query:
                    prevSearchState = priorSearches[Random().Next(priorSearches.Count)];

                    if (VERBOSE)
                    {
                        Console.WriteLine("\nTEST: follow-on query age=" + ((Time.NanoTime() - prevSearchState.SearchTimeNanos) / 1000000000.0));
                    }

                    try
                    {
                        localShardSearcher = Nodes[myNodeID].Acquire(prevSearchState.Versions);
                    }
                    catch (SearcherExpiredException see)
                    {
                        // Expected, sometimes; in a "real" app we would
                        // either forward this error to the user ("too
                        // much time has passed; please re-run your
                        // search") or sneakily just switch to newest
                        // searcher w/o telling them...
                        if (VERBOSE)
                        {
                            Console.WriteLine("  searcher expired during local shard searcher init: " + see);
                        }
                        priorSearches.Remove(prevSearchState);
                        continue;
                    }
                }
                else
                {
                    if (VERBOSE)
                    {
                        Console.WriteLine("\nTEST: fresh query");
                    }
                    // Do fresh query:
                    localShardSearcher = Nodes[myNodeID].Acquire();
                    prevSearchState    = null;
                }

                IndexReader[] subs = new IndexReader[numNodes];

                PreviousSearchState searchState = null;

                try
                {
                    // Mock: now make a single reader (MultiReader) from all node
                    // searchers.  In a real shard env you can't do this... we
                    // do it to confirm results from the shard searcher
                    // are correct:
                    int docCount = 0;
                    try
                    {
                        for (int nodeID = 0; nodeID < numNodes; nodeID++)
                        {
                            long          subVersion = localShardSearcher.NodeVersions[nodeID];
                            IndexSearcher sub        = Nodes[nodeID].Searchers.Acquire(subVersion);
                            if (sub == null)
                            {
                                nodeID--;
                                while (nodeID >= 0)
                                {
                                    subs[nodeID].DecRef();
                                    subs[nodeID] = null;
                                    nodeID--;
                                }
                                throw new SearcherExpiredException("nodeID=" + nodeID + " version=" + subVersion);
                            }
                            subs[nodeID] = sub.IndexReader;
                            docCount    += subs[nodeID].MaxDoc;
                        }
                    }
                    catch (SearcherExpiredException see)
                    {
                        // Expected
                        if (VERBOSE)
                        {
                            Console.WriteLine("  searcher expired during mock reader init: " + see);
                        }
                        continue;
                    }

                    IndexReader   mockReader   = new MultiReader(subs);
                    IndexSearcher mockSearcher = new IndexSearcher(mockReader);

                    Query query;
                    Sort  sort;

                    if (prevSearchState != null)
                    {
                        query = prevSearchState.Query;
                        sort  = prevSearchState.Sort;
                    }
                    else
                    {
                        if (terms == null && docCount > minDocsToMakeTerms)
                        {
                            // TODO: try to "focus" on high freq terms sometimes too
                            // TODO: maybe also periodically reset the terms...?
                            TermsEnum termsEnum = MultiFields.GetTerms(mockReader, "body").GetIterator(null);
                            terms = new List <BytesRef>();
                            while (termsEnum.Next() != null)
                            {
                                terms.Add(BytesRef.DeepCopyOf(termsEnum.Term));
                            }
                            if (VERBOSE)
                            {
                                Console.WriteLine("TEST: init terms: " + terms.Count + " terms");
                            }
                            if (terms.Count == 0)
                            {
                                terms = null;
                            }
                        }

                        if (VERBOSE)
                        {
                            Console.WriteLine("  maxDoc=" + mockReader.MaxDoc);
                        }

                        if (terms != null)
                        {
                            if (Random().NextBoolean())
                            {
                                query = new TermQuery(new Term("body", terms[Random().Next(terms.Count)]));
                            }
                            else
                            {
                                string t = terms[Random().Next(terms.Count)].Utf8ToString();
                                string prefix;
                                if (t.Length <= 1)
                                {
                                    prefix = t;
                                }
                                else
                                {
                                    prefix = t.Substring(0, TestUtil.NextInt(Random(), 1, 2));
                                }
                                query = new PrefixQuery(new Term("body", prefix));
                            }

                            if (Random().NextBoolean())
                            {
                                sort = null;
                            }
                            else
                            {
                                // TODO: sort by more than 1 field
                                int what = Random().Next(3);
                                if (what == 0)
                                {
                                    sort = new Sort(SortField.FIELD_SCORE);
                                }
                                else if (what == 1)
                                {
                                    // TODO: this sort doesn't merge
                                    // correctly... it's tricky because you
                                    // could have > 2.1B docs across all shards:
                                    //sort = new Sort(SortField.FIELD_DOC);
                                    sort = null;
                                }
                                else if (what == 2)
                                {
                                    sort = new Sort(new SortField[] { new SortField("docid", SortFieldType.INT32, Random().NextBoolean()) });
                                }
                                else
                                {
                                    sort = new Sort(new SortField[] { new SortField("title", SortFieldType.STRING, Random().NextBoolean()) });
                                }
                            }
                        }
                        else
                        {
                            query = null;
                            sort  = null;
                        }
                    }

                    if (query != null)
                    {
                        try
                        {
                            searchState = AssertSame(mockSearcher, localShardSearcher, query, sort, prevSearchState);
                        }
                        catch (SearcherExpiredException see)
                        {
                            // Expected; in a "real" app we would
                            // either forward this error to the user ("too
                            // much time has passed; please re-run your
                            // search") or sneakily just switch to newest
                            // searcher w/o telling them...
                            if (VERBOSE)
                            {
                                Console.WriteLine("  searcher expired during search: " + see);
                                Console.Out.Write(see.StackTrace);
                            }
                            // We can't do this in general: on a very slow
                            // computer it's possible the local searcher
                            // expires before we can finish our search:
                            // assert prevSearchState != null;
                            if (prevSearchState != null)
                            {
                                priorSearches.Remove(prevSearchState);
                            }
                        }
                    }
                }
                finally
                {
                    Nodes[myNodeID].Release(localShardSearcher);
                    foreach (IndexReader sub in subs)
                    {
                        if (sub != null)
                        {
                            sub.DecRef();
                        }
                    }
                }

                if (searchState != null && searchState.SearchAfterLocal != null && Random().Next(5) == 3)
                {
                    priorSearches.Add(searchState);
                    if (priorSearches.Count > 200)
                    {
                        Collections.Shuffle(priorSearches);
                        priorSearches.SubList(100, priorSearches.Count).Clear();
                    }
                }
            }

            Finish();
        }
Example #26
0
//JAVA TO C# CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes:
//ORIGINAL LINE: @SuppressWarnings("deprecation") public static void main(String[] args) throws Exception
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
        public static void Main(string[] args)
        {
            if (args.Length < 5)
            {
                Console.Error.WriteLine("Usage: MultiPassIndexSplitter -out <outputDir> -num <numParts> [-seq] <inputIndex1> [<inputIndex2 ...]");
                Console.Error.WriteLine("\tinputIndex\tpath to input index, multiple values are ok");
                Console.Error.WriteLine("\t-out ouputDir\tpath to output directory to contain partial indexes");
                Console.Error.WriteLine("\t-num numParts\tnumber of parts to produce");
                Console.Error.WriteLine("\t-seq\tsequential docid-range split (default is round-robin)");
                Environment.Exit(-1);
            }
            List <IndexReader> indexes = new List <IndexReader>();
            string             outDir  = null;
            int  numParts = -1;
            bool seq      = false;

            for (int i = 0; i < args.Length; i++)
            {
                if (args[i].Equals("-out"))
                {
                    outDir = args[++i];
                }
                else if (args[i].Equals("-num"))
                {
                    numParts = Convert.ToInt32(args[++i]);
                }
                else if (args[i].Equals("-seq"))
                {
                    seq = true;
                }
                else
                {
                    File file = new File(args[i]);
                    if (!file.exists() || !file.Directory)
                    {
                        Console.Error.WriteLine("Invalid input path - skipping: " + file);
                        continue;
                    }
                    Directory dir = FSDirectory.open(new File(args[i]));
                    try
                    {
                        if (!DirectoryReader.indexExists(dir))
                        {
                            Console.Error.WriteLine("Invalid input index - skipping: " + file);
                            continue;
                        }
                    }
                    catch (Exception)
                    {
                        Console.Error.WriteLine("Invalid input index - skipping: " + file);
                        continue;
                    }
                    indexes.Add(DirectoryReader.open(dir));
                }
            }
            if (outDir == null)
            {
                throw new Exception("Required argument missing: -out outputDir");
            }
            if (numParts < 2)
            {
                throw new Exception("Invalid value of required argument: -num numParts");
            }
            if (indexes.Count == 0)
            {
                throw new Exception("No input indexes to process");
            }
            File @out = new File(outDir);

            if ([email protected]())
            {
                throw new Exception("Can't create output directory: " + @out);
            }
            Directory[] dirs = new Directory[numParts];
            for (int i = 0; i < numParts; i++)
            {
                dirs[i] = FSDirectory.open(new File(@out, "part-" + i));
            }
            MultiPassIndexSplitter splitter = new MultiPassIndexSplitter();
            IndexReader            input;

            if (indexes.Count == 1)
            {
                input = indexes[0];
            }
            else
            {
                input = new MultiReader(indexes.ToArray());
            }
            splitter.Split(Version.LUCENE_CURRENT, input, dirs, seq);
        }
Example #27
0
        public virtual void TestSimple()
        {
            int numNodes = TestUtil.NextInt(Random(), 1, 10);

            double runTimeSec = AtLeast(3);

            int minDocsToMakeTerms = TestUtil.NextInt(Random(), 5, 20);

            int maxSearcherAgeSeconds = TestUtil.NextInt(Random(), 1, 3);

            if (VERBOSE)
            {
                Console.WriteLine("TEST: numNodes=" + numNodes + " runTimeSec=" + runTimeSec + " maxSearcherAgeSeconds=" + maxSearcherAgeSeconds);
            }

            Start(numNodes, runTimeSec, maxSearcherAgeSeconds);

            List<PreviousSearchState> priorSearches = new List<PreviousSearchState>();
            List<BytesRef> terms = null;
            while (TimeHelper.NanoTime() < EndTimeNanos)
            {
                bool doFollowon = priorSearches.Count > 0 && Random().Next(7) == 1;

                // Pick a random node; we will run the query on this node:
                int myNodeID = Random().Next(numNodes);

                NodeState.ShardIndexSearcher localShardSearcher;

                PreviousSearchState prevSearchState;

                if (doFollowon)
                {
                    // Pretend user issued a followon query:
                    prevSearchState = priorSearches[Random().Next(priorSearches.Count)];

                    if (VERBOSE)
                    {
                        Console.WriteLine("\nTEST: follow-on query age=" + ((TimeHelper.NanoTime() - prevSearchState.SearchTimeNanos) / 1000000000.0));
                    }

                    try
                    {
                        localShardSearcher = Nodes[myNodeID].Acquire(prevSearchState.Versions);
                    }
                    catch (SearcherExpiredException see)
                    {
                        // Expected, sometimes; in a "real" app we would
                        // either forward this error to the user ("too
                        // much time has passed; please re-run your
                        // search") or sneakily just switch to newest
                        // searcher w/o telling them...
                        if (VERBOSE)
                        {
                            Console.WriteLine("  searcher expired during local shard searcher init: " + see);
                        }
                        priorSearches.Remove(prevSearchState);
                        continue;
                    }
                }
                else
                {
                    if (VERBOSE)
                    {
                        Console.WriteLine("\nTEST: fresh query");
                    }
                    // Do fresh query:
                    localShardSearcher = Nodes[myNodeID].Acquire();
                    prevSearchState = null;
                }

                IndexReader[] subs = new IndexReader[numNodes];

                PreviousSearchState searchState = null;

                try
                {
                    // Mock: now make a single reader (MultiReader) from all node
                    // searchers.  In a real shard env you can't do this... we
                    // do it to confirm results from the shard searcher
                    // are correct:
                    int docCount = 0;
                    try
                    {
                        for (int nodeID = 0; nodeID < numNodes; nodeID++)
                        {
                            long subVersion = localShardSearcher.NodeVersions[nodeID];
                            IndexSearcher sub = Nodes[nodeID].Searchers.Acquire(subVersion);
                            if (sub == null)
                            {
                                nodeID--;
                                while (nodeID >= 0)
                                {
                                    subs[nodeID].DecRef();
                                    subs[nodeID] = null;
                                    nodeID--;
                                }
                                throw new SearcherExpiredException("nodeID=" + nodeID + " version=" + subVersion);
                            }
                            subs[nodeID] = sub.IndexReader;
                            docCount += subs[nodeID].MaxDoc();
                        }
                    }
                    catch (SearcherExpiredException see)
                    {
                        // Expected
                        if (VERBOSE)
                        {
                            Console.WriteLine("  searcher expired during mock reader init: " + see);
                        }
                        continue;
                    }

                    IndexReader mockReader = new MultiReader(subs);
                    IndexSearcher mockSearcher = new IndexSearcher(mockReader);

                    Query query;
                    Sort sort;

                    if (prevSearchState != null)
                    {
                        query = prevSearchState.Query;
                        sort = prevSearchState.Sort;
                    }
                    else
                    {
                        if (terms == null && docCount > minDocsToMakeTerms)
                        {
                            // TODO: try to "focus" on high freq terms sometimes too
                            // TODO: maybe also periodically reset the terms...?
                            TermsEnum termsEnum = MultiFields.GetTerms(mockReader, "body").Iterator(null);
                            terms = new List<BytesRef>();
                            while (termsEnum.Next() != null)
                            {
                                terms.Add(BytesRef.DeepCopyOf(termsEnum.Term()));
                            }
                            if (VERBOSE)
                            {
                                Console.WriteLine("TEST: init terms: " + terms.Count + " terms");
                            }
                            if (terms.Count == 0)
                            {
                                terms = null;
                            }
                        }

                        if (VERBOSE)
                        {
                            Console.WriteLine("  maxDoc=" + mockReader.MaxDoc());
                        }

                        if (terms != null)
                        {
                            if (Random().NextBoolean())
                            {
                                query = new TermQuery(new Term("body", terms[Random().Next(terms.Count)]));
                            }
                            else
                            {
                                string t = terms[Random().Next(terms.Count)].Utf8ToString();
                                string prefix;
                                if (t.Length <= 1)
                                {
                                    prefix = t;
                                }
                                else
                                {
                                    prefix = t.Substring(0, TestUtil.NextInt(Random(), 1, 2));
                                }
                                query = new PrefixQuery(new Term("body", prefix));
                            }

                            if (Random().NextBoolean())
                            {
                                sort = null;
                            }
                            else
                            {
                                // TODO: sort by more than 1 field
                                int what = Random().Next(3);
                                if (what == 0)
                                {
                                    sort = new Sort(SortField.FIELD_SCORE);
                                }
                                else if (what == 1)
                                {
                                    // TODO: this sort doesn't merge
                                    // correctly... it's tricky because you
                                    // could have > 2.1B docs across all shards:
                                    //sort = new Sort(SortField.FIELD_DOC);
                                    sort = null;
                                }
                                else if (what == 2)
                                {
                                    sort = new Sort(new SortField[] { new SortField("docid", SortField.Type_e.INT, Random().NextBoolean()) });
                                }
                                else
                                {
                                    sort = new Sort(new SortField[] { new SortField("title", SortField.Type_e.STRING, Random().NextBoolean()) });
                                }
                            }
                        }
                        else
                        {
                            query = null;
                            sort = null;
                        }
                    }

                    if (query != null)
                    {
                        try
                        {
                            searchState = AssertSame(mockSearcher, localShardSearcher, query, sort, prevSearchState);
                        }
                        catch (SearcherExpiredException see)
                        {
                            // Expected; in a "real" app we would
                            // either forward this error to the user ("too
                            // much time has passed; please re-run your
                            // search") or sneakily just switch to newest
                            // searcher w/o telling them...
                            if (VERBOSE)
                            {
                                Console.WriteLine("  searcher expired during search: " + see);
                                Console.Out.Write(see.StackTrace);
                            }
                            // We can't do this in general: on a very slow
                            // computer it's possible the local searcher
                            // expires before we can finish our search:
                            // assert prevSearchState != null;
                            if (prevSearchState != null)
                            {
                                priorSearches.Remove(prevSearchState);
                            }
                        }
                    }
                }
                finally
                {
                    Nodes[myNodeID].Release(localShardSearcher);
                    foreach (IndexReader sub in subs)
                    {
                        if (sub != null)
                        {
                            sub.DecRef();
                        }
                    }
                }

                if (searchState != null && searchState.SearchAfterLocal != null && Random().Next(5) == 3)
                {
                    priorSearches.Add(searchState);
                    if (priorSearches.Count > 200)
                    {
                        priorSearches = (List<PreviousSearchState>)CollectionsHelper.Shuffle(priorSearches);
                        priorSearches.SubList(100, priorSearches.Count).Clear();
                    }
                }
            }

            Finish();
        }
Example #28
0
        public virtual void TestTieBreaker()
        {
            Directory directory = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory);
            AddDoc("a123456", writer);
            AddDoc("c123456", writer);
            AddDoc("d123456", writer);
            AddDoc("e123456", writer);

            Directory directory2 = NewDirectory();
            RandomIndexWriter writer2 = new RandomIndexWriter(Random(), directory2);
            AddDoc("a123456", writer2);
            AddDoc("b123456", writer2);
            AddDoc("b123456", writer2);
            AddDoc("b123456", writer2);
            AddDoc("c123456", writer2);
            AddDoc("f123456", writer2);

            IndexReader ir1 = writer.Reader;
            IndexReader ir2 = writer2.Reader;

            MultiReader mr = new MultiReader(ir1, ir2);
            IndexSearcher searcher = NewSearcher(mr);
            FuzzyQuery fq = new FuzzyQuery(new Term("field", "z123456"), 1, 0, 2, false);
            TopDocs docs = searcher.Search(fq, 2);
            Assert.AreEqual(5, docs.TotalHits); // 5 docs, from the a and b's
            mr.Dispose();
            ir1.Dispose();
            ir2.Dispose();
            writer.Dispose();
            writer2.Dispose();
            directory.Dispose();
            directory2.Dispose();
        }
 public static async Task ExecuteNonQueryStoredProcedure(this SqlConnection dbConnection, string sqlStatement, params SqlParameter[] sqlParameters)
 {
     using MultiReader _multiReader = await dbConnection.ExecuteMultiResultStoredProcedure(sqlStatement, sqlParameters);
 }