Inheritance: MultiTermQuery
Esempio n. 1
0
		public Result Search (string term, int count, int start) {
			try {
				term = term.ToLower ();
				Term htTerm = new Term ("hottext", term);
				Query qq1 = new FuzzyQuery (htTerm);
				Query qq2 = new TermQuery (htTerm);
				qq2.Boost = 10f;
				Query qq3 = new PrefixQuery (htTerm);
				qq3.Boost = 10f;
				DisjunctionMaxQuery q1 = new DisjunctionMaxQuery (0f);
				q1.Add (qq1);
				q1.Add (qq2);
				q1.Add (qq3);
				Query q2 = new TermQuery (new Term ("text", term));
				q2.Boost = 3f;
				Query q3 = new TermQuery (new Term ("examples", term));
				q3.Boost = 3f;
				DisjunctionMaxQuery q = new DisjunctionMaxQuery (0f);

				q.Add (q1);
				q.Add (q2);
				q.Add (q3);
			
				TopDocs top = SearchInternal (q, count, start);
				Result r = new Result (term, searcher, top.ScoreDocs);
				Results.Add (r);
				return r;
			} catch (IOException) {
				Console.WriteLine ("No index in {0}", dir);
				return null;
			}
		}
        public virtual void TestPrefixQuery_Mem()
        {
            Directory directory = NewDirectory();

            string[] categories = new string[] { "/Computers", "/Computers/Mac", "/Computers/Windows" };
            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
            for (int i = 0; i < categories.Length; i++)
            {
                Document doc = new Document();
                doc.Add(NewStringField("category", categories[i], Field.Store.YES));
                writer.AddDocument(doc);
            }
            IndexReader reader = writer.Reader;

            PrefixQuery query = new PrefixQuery(new Term("category", "/Computers"));
            IndexSearcher searcher = NewSearcher(reader);
            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(3, hits.Length, "All documents in /Computers category and below");

            query = new PrefixQuery(new Term("category", "/Computers/Mac"));
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(1, hits.Length, "One in /Computers/Mac");

            query = new PrefixQuery(new Term("category", ""));
            Terms terms = MultiFields.GetTerms(searcher.IndexReader, "category");
            Assert.IsFalse(query.GetTermsEnum(terms) is PrefixTermsEnum);
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(3, hits.Length, "everything");
            writer.Dispose();
            reader.Dispose();
            directory.Dispose();
        }
Esempio n. 3
0
 public override Query VisitPrefixQuery(PrefixQuery prefixq)
 {
     _dump.Append("PrefixQ(");
     var q = base.VisitPrefixQuery(prefixq);
     _dump.Append(BoostToString(q));
     _dump.Append(")");
     return q;
 }
Esempio n. 4
0
		public virtual void  TestMethod()
		{
			RAMDirectory directory = new RAMDirectory();
			
			System.String[] categories = new System.String[]{"food", "foodanddrink", "foodanddrinkandgoodtimes", "food and drink"};
			
			Query rw1 = null;
			Query rw2 = null;
			try
			{
				IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
				for (int i = 0; i < categories.Length; i++)
				{
					Document doc = new Document();
					doc.Add(Field.Keyword("category", categories[i]));
					writer.AddDocument(doc);
				}
				writer.Close();
				
				IndexReader reader = IndexReader.Open(directory);
				PrefixQuery query = new PrefixQuery(new Term("category", "foo"));
				
				rw1 = query.Rewrite(reader);
				
				BooleanQuery bq = new BooleanQuery();
				bq.Add(query, true, false);
				
				rw2 = bq.Rewrite(reader);
			}
			catch (System.IO.IOException e)
			{
				Assert.Fail(e.Message);
			}
			
			BooleanQuery bq1 = null;
			if (rw1 is BooleanQuery)
			{
				bq1 = (BooleanQuery) rw1;
			}
			
			BooleanQuery bq2 = null;
			if (rw2 is BooleanQuery)
			{
				bq2 = (BooleanQuery) rw2;
			}
			else
			{
				Assert.Fail("Rewrite");
			}
			
			Assert.AreEqual(bq1.GetClauses().Length, bq2.GetClauses().Length, "Number of Clauses Mismatch");
		}
	    public override Query Rewrite(IndexReader reader)
		{
            if (_termIsPrefix)
            {
                MultiTermQuery rewritten =
                    new PrefixQuery(internalTerm.CreateTerm(internalTerm.Text.Substring(0, internalTerm.Text.IndexOf('*'))));
                rewritten.Boost = Boost;
                rewritten.RewriteMethod = RewriteMethod;
                return rewritten;
            }
            else
            {
                return base.Rewrite(reader);
            }
		}
        public virtual Query Build(IQueryNode queryNode)
        {
            PrefixWildcardQueryNode wildcardNode = (PrefixWildcardQueryNode)queryNode;

            string text = wildcardNode.Text.SubSequence(0, wildcardNode.Text.Length - 1).ToString();
            PrefixQuery q = new PrefixQuery(new Term(wildcardNode.GetFieldAsString(), text));

            MultiTermQuery.RewriteMethod method = (MultiTermQuery.RewriteMethod)queryNode.GetTag(MultiTermRewriteMethodProcessor.TAG_ID);
            if (method != null)
            {
                q.SetRewriteMethod(method);
            }

            return q;
        }
 public Query Parse(string text, string field)
 {
     if (string.IsNullOrWhiteSpace(text))
     {
         return null;
     }
     var tokens = this.Tokenize(text).ToList();
     if (!tokens.Any())
     {
         return null;
     }
     var outerQuery = new BooleanQuery();
     foreach (string token in tokens)
     {
         var query = new PrefixQuery(new Term(field, token));
         outerQuery.Add(query, Occur.MUST);
     }
     return outerQuery;
 }
Esempio n. 8
0
        public virtual void TestPrefixQuery_Mem()
        {
            Directory directory = NewDirectory();

            string[]          categories = new string[] { "/Computers", "/Computers/Mac", "/Computers/Windows" };
            RandomIndexWriter writer     = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, directory);

            for (int i = 0; i < categories.Length; i++)
            {
                Document doc = new Document();
                doc.Add(NewStringField("category", categories[i], Field.Store.YES));
                writer.AddDocument(doc);
            }
            IndexReader reader = writer.GetReader();

            PrefixQuery   query    = new PrefixQuery(new Term("category", "/Computers"));
            IndexSearcher searcher = NewSearcher(reader);

            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(3, hits.Length, "All documents in /Computers category and below");

            query = new PrefixQuery(new Term("category", "/Computers/Mac"));
            hits  = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(1, hits.Length, "One in /Computers/Mac");

            query = new PrefixQuery(new Term("category", ""));
            Terms terms = MultiFields.GetTerms(searcher.IndexReader, "category");

            Assert.IsFalse(query.GetTermsEnum(terms) is PrefixTermsEnum);
            hits = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(3, hits.Length, "everything");
            writer.Dispose();
            reader.Dispose();
            directory.Dispose();
        }
		public virtual void  TestPrefixQuery_Renamed_Method()
		{
			RAMDirectory directory = new RAMDirectory();
			
			System.String[] categories = new System.String[]{"/Computers", "/Computers/Mac", "/Computers/Windows"};
			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
			for (int i = 0; i < categories.Length; i++)
			{
				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
				doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.UN_TOKENIZED));
				writer.AddDocument(doc);
			}
			writer.Close();
			
			PrefixQuery query = new PrefixQuery(new Term("category", "/Computers"));
			IndexSearcher searcher = new IndexSearcher(directory);
			Hits hits = searcher.Search(query);
			Assert.AreEqual(3, hits.Length(), "All documents in /Computers category and below");
			
			query = new PrefixQuery(new Term("category", "/Computers/Mac"));
			hits = searcher.Search(query);
			Assert.AreEqual(1, hits.Length(), "One in /Computers/Mac");
		}
Esempio n. 10
0
		public virtual void  TestPrefixQuery_Renamed()
		{
			RAMDirectory directory = new RAMDirectory();
			
			System.String[] categories = new System.String[]{"/Computers", "/Computers/Mac", "/Computers/Windows"};
			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			for (int i = 0; i < categories.Length; i++)
			{
				Document doc = new Document();
				doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
				writer.AddDocument(doc);
			}
			writer.Close();
			
			PrefixQuery query = new PrefixQuery(new Term("category", "/Computers"));
			IndexSearcher searcher = new IndexSearcher(directory);
			ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(3, hits.Length, "All documents in /Computers category and below");
			
			query = new PrefixQuery(new Term("category", "/Computers/Mac"));
			hits = searcher.Search(query, null, 1000).ScoreDocs;
			Assert.AreEqual(1, hits.Length, "One in /Computers/Mac");
		}
Esempio n. 11
0
        public virtual void  TestMethod()
        {
            RAMDirectory directory = new RAMDirectory();

            System.String[] categories = new System.String[] { "food", "foodanddrink", "foodanddrinkandgoodtimes", "food and drink" };

            Query       rw1    = null;
            Query       rw2    = null;
            IndexReader reader = null;

            try
            {
                IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
                for (int i = 0; i < categories.Length; i++)
                {
                    Document doc = new Document();
                    doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
                    writer.AddDocument(doc);
                }
                writer.Close();

                reader = IndexReader.Open(directory);
                PrefixQuery query = new PrefixQuery(new Term("category", "foo"));
                rw1 = query.Rewrite(reader);

                BooleanQuery bq = new BooleanQuery();
                bq.Add(query, BooleanClause.Occur.MUST);

                rw2 = bq.Rewrite(reader);
            }
            catch (System.IO.IOException e)
            {
                Assert.Fail(e.Message);
            }

            Assert.AreEqual(GetCount(reader, rw1), GetCount(reader, rw2), "Number of Clauses Mismatch");
        }
Esempio n. 12
0
        public void TestPrefixTerm()
        {
            RAMDirectory indexStore = GetIndexStore("field", new String[] { "prefix", "prefixx" });
            IndexSearcher searcher = new IndexSearcher(indexStore, true);

            MultiTermQuery wq = new WildcardQuery(new Term("field", "prefix*"));
            AssertMatches(searcher, wq, 2);

            MultiTermQuery expected = new PrefixQuery(new Term("field", "prefix"));
            wq.RewriteMethod = MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE;
            wq.Boost = 0.1F;
            expected.RewriteMethod = wq.RewriteMethod;
            expected.Boost = wq.Boost;
            Assert.AreEqual(searcher.Rewrite(expected), searcher.Rewrite(wq));

            wq.RewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
            wq.Boost = 0.2F;
            expected.RewriteMethod = wq.RewriteMethod;
            expected.Boost = wq.Boost;
            Assert.AreEqual(searcher.Rewrite(expected), searcher.Rewrite(wq));

            wq.RewriteMethod = MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT;
            wq.Boost = 0.3F;
            expected.RewriteMethod = wq.RewriteMethod;
            expected.Boost = wq.Boost;
            Assert.AreEqual(searcher.Rewrite(expected), searcher.Rewrite(wq));

            wq.RewriteMethod = MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE;
            wq.Boost = 0.4F;
            expected.RewriteMethod = wq.RewriteMethod;
            expected.Boost = wq.Boost;
            Assert.AreEqual(searcher.Rewrite(expected), searcher.Rewrite(wq));
        }
Esempio n. 13
0
 protected Query Preq(float boost, String field, String text)
 {
     Query query = new PrefixQuery(new Term(field, text));
     query.Boost = boost;
     return query;
 }
Esempio n. 14
0
		public Result FastSearch (string term, int number)
		{
			try {
				term = term.ToLower ();
				Query q1 = new TermQuery (new Term ("hottext", term));
				Query q2 = new PrefixQuery (new Term ("hottext", term));
				q2.Boost = 0.5f;
				DisjunctionMaxQuery q = new DisjunctionMaxQuery (0f);
				q.Add (q1);
				q.Add (q2);
				TopDocs top = searcher.Search (q, number);
				return new Result (term, searcher, top.ScoreDocs);
			} catch (IOException) {
				Console.WriteLine ("No index in {0}", dir);
				return null;
			}
		}
Esempio n. 15
0
        public virtual void TestParsingAndSearching()
        {
            string field = "content";
            string[] docs = new string[] { "\\ abcdefg1", "\\x00079 hijklmn1", "\\\\ opqrstu1" };

            // queries that should find all docs
            Query[] matchAll = new Query[] { new WildcardQuery(new Term(field, "*")), new WildcardQuery(new Term(field, "*1")), new WildcardQuery(new Term(field, "**1")), new WildcardQuery(new Term(field, "*?")), new WildcardQuery(new Term(field, "*?1")), new WildcardQuery(new Term(field, "?*1")), new WildcardQuery(new Term(field, "**")), new WildcardQuery(new Term(field, "***")), new WildcardQuery(new Term(field, "\\\\*")) };

            // queries that should find no docs
            Query[] matchNone = new Query[] { new WildcardQuery(new Term(field, "a*h")), new WildcardQuery(new Term(field, "a?h")), new WildcardQuery(new Term(field, "*a*h")), new WildcardQuery(new Term(field, "?a")), new WildcardQuery(new Term(field, "a?")) };

            PrefixQuery[][] matchOneDocPrefix = new PrefixQuery[][] { new PrefixQuery[] { new PrefixQuery(new Term(field, "a")), new PrefixQuery(new Term(field, "ab")), new PrefixQuery(new Term(field, "abc")) }, new PrefixQuery[] { new PrefixQuery(new Term(field, "h")), new PrefixQuery(new Term(field, "hi")), new PrefixQuery(new Term(field, "hij")), new PrefixQuery(new Term(field, "\\x0007")) }, new PrefixQuery[] { new PrefixQuery(new Term(field, "o")), new PrefixQuery(new Term(field, "op")), new PrefixQuery(new Term(field, "opq")), new PrefixQuery(new Term(field, "\\\\")) } };

            WildcardQuery[][] matchOneDocWild = new WildcardQuery[][] { new WildcardQuery[] { new WildcardQuery(new Term(field, "*a*")), new WildcardQuery(new Term(field, "*ab*")), new WildcardQuery(new Term(field, "*abc**")), new WildcardQuery(new Term(field, "ab*e*")), new WildcardQuery(new Term(field, "*g?")), new WildcardQuery(new Term(field, "*f?1")) }, new WildcardQuery[] { new WildcardQuery(new Term(field, "*h*")), new WildcardQuery(new Term(field, "*hi*")), new WildcardQuery(new Term(field, "*hij**")), new WildcardQuery(new Term(field, "hi*k*")), new WildcardQuery(new Term(field, "*n?")), new WildcardQuery(new Term(field, "*m?1")), new WildcardQuery(new Term(field, "hij**")) }, new WildcardQuery[] { new WildcardQuery(new Term(field, "*o*")), new WildcardQuery(new Term(field, "*op*")), new WildcardQuery(new Term(field, "*opq**")), new WildcardQuery(new Term(field, "op*q*")), new WildcardQuery(new Term(field, "*u?")), new WildcardQuery(new Term(field, "*t?1")), new WildcardQuery(new Term(field, "opq**")) } };

            // prepare the index
            Directory dir = NewDirectory();
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
            for (int i = 0; i < docs.Length; i++)
            {
                Document doc = new Document();
                doc.Add(NewTextField(field, docs[i], Field.Store.NO));
                iw.AddDocument(doc);
            }
            iw.Dispose();

            IndexReader reader = DirectoryReader.Open(dir);
            IndexSearcher searcher = NewSearcher(reader);

            // test queries that must find all
            foreach (Query q in matchAll)
            {
                if (VERBOSE)
                {
                    Console.WriteLine("matchAll: q=" + q + " " + q.GetType().Name);
                }
                ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs;
                Assert.AreEqual(docs.Length, hits.Length);
            }

            // test queries that must find none
            foreach (Query q in matchNone)
            {
                if (VERBOSE)
                {
                    Console.WriteLine("matchNone: q=" + q + " " + q.GetType().Name);
                }
                ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs;
                Assert.AreEqual(0, hits.Length);
            }

            // thest the prefi queries find only one doc
            for (int i = 0; i < matchOneDocPrefix.Length; i++)
            {
                for (int j = 0; j < matchOneDocPrefix[i].Length; j++)
                {
                    Query q = matchOneDocPrefix[i][j];
                    if (VERBOSE)
                    {
                        Console.WriteLine("match 1 prefix: doc=" + docs[i] + " q=" + q + " " + q.GetType().Name);
                    }
                    ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs;
                    Assert.AreEqual(1, hits.Length);
                    Assert.AreEqual(i, hits[0].Doc);
                }
            }

            // test the wildcard queries find only one doc
            for (int i = 0; i < matchOneDocWild.Length; i++)
            {
                for (int j = 0; j < matchOneDocWild[i].Length; j++)
                {
                    Query q = matchOneDocWild[i][j];
                    if (VERBOSE)
                    {
                        Console.WriteLine("match 1 wild: doc=" + docs[i] + " q=" + q + " " + q.GetType().Name);
                    }
                    ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs;
                    Assert.AreEqual(1, hits.Length);
                    Assert.AreEqual(i, hits[0].Doc);
                }
            }

            reader.Dispose();
            dir.Dispose();
        }
 /// <summary>
 /// macro for readability </summary>
 public static Query Cspq(Term prefix)
 {
     PrefixQuery query = new PrefixQuery(prefix);
     query.SetRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
     return query;
 }
Esempio n. 17
0
        public virtual void TestParsingAndSearching()
        {
            string field = "content";

            string[] docs = new string[] { "\\ abcdefg1", "\\x00079 hijklmn1", "\\\\ opqrstu1" };

            // queries that should find all docs
            Query[] matchAll = new Query[] { new WildcardQuery(new Term(field, "*")), new WildcardQuery(new Term(field, "*1")), new WildcardQuery(new Term(field, "**1")), new WildcardQuery(new Term(field, "*?")), new WildcardQuery(new Term(field, "*?1")), new WildcardQuery(new Term(field, "?*1")), new WildcardQuery(new Term(field, "**")), new WildcardQuery(new Term(field, "***")), new WildcardQuery(new Term(field, "\\\\*")) };

            // queries that should find no docs
            Query[] matchNone = new Query[] { new WildcardQuery(new Term(field, "a*h")), new WildcardQuery(new Term(field, "a?h")), new WildcardQuery(new Term(field, "*a*h")), new WildcardQuery(new Term(field, "?a")), new WildcardQuery(new Term(field, "a?")) };

            PrefixQuery[][] matchOneDocPrefix = new PrefixQuery[][] { new PrefixQuery[] { new PrefixQuery(new Term(field, "a")), new PrefixQuery(new Term(field, "ab")), new PrefixQuery(new Term(field, "abc")) }, new PrefixQuery[] { new PrefixQuery(new Term(field, "h")), new PrefixQuery(new Term(field, "hi")), new PrefixQuery(new Term(field, "hij")), new PrefixQuery(new Term(field, "\\x0007")) }, new PrefixQuery[] { new PrefixQuery(new Term(field, "o")), new PrefixQuery(new Term(field, "op")), new PrefixQuery(new Term(field, "opq")), new PrefixQuery(new Term(field, "\\\\")) } };

            WildcardQuery[][] matchOneDocWild = new WildcardQuery[][] { new WildcardQuery[] { new WildcardQuery(new Term(field, "*a*")), new WildcardQuery(new Term(field, "*ab*")), new WildcardQuery(new Term(field, "*abc**")), new WildcardQuery(new Term(field, "ab*e*")), new WildcardQuery(new Term(field, "*g?")), new WildcardQuery(new Term(field, "*f?1")) }, new WildcardQuery[] { new WildcardQuery(new Term(field, "*h*")), new WildcardQuery(new Term(field, "*hi*")), new WildcardQuery(new Term(field, "*hij**")), new WildcardQuery(new Term(field, "hi*k*")), new WildcardQuery(new Term(field, "*n?")), new WildcardQuery(new Term(field, "*m?1")), new WildcardQuery(new Term(field, "hij**")) }, new WildcardQuery[] { new WildcardQuery(new Term(field, "*o*")), new WildcardQuery(new Term(field, "*op*")), new WildcardQuery(new Term(field, "*opq**")), new WildcardQuery(new Term(field, "op*q*")), new WildcardQuery(new Term(field, "*u?")), new WildcardQuery(new Term(field, "*t?1")), new WildcardQuery(new Term(field, "opq**")) } };

            // prepare the index
            Directory         dir = NewDirectory();
            RandomIndexWriter iw  = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));

            for (int i = 0; i < docs.Length; i++)
            {
                Document doc = new Document();
                doc.Add(NewTextField(field, docs[i], Field.Store.NO));
                iw.AddDocument(doc);
            }
            iw.Dispose();

            IndexReader   reader   = DirectoryReader.Open(dir);
            IndexSearcher searcher = NewSearcher(reader);

            // test queries that must find all
            foreach (Query q in matchAll)
            {
                if (VERBOSE)
                {
                    Console.WriteLine("matchAll: q=" + q + " " + q.GetType().Name);
                }
                ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs;
                Assert.AreEqual(docs.Length, hits.Length);
            }

            // test queries that must find none
            foreach (Query q in matchNone)
            {
                if (VERBOSE)
                {
                    Console.WriteLine("matchNone: q=" + q + " " + q.GetType().Name);
                }
                ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs;
                Assert.AreEqual(0, hits.Length);
            }

            // thest the prefi queries find only one doc
            for (int i = 0; i < matchOneDocPrefix.Length; i++)
            {
                for (int j = 0; j < matchOneDocPrefix[i].Length; j++)
                {
                    Query q = matchOneDocPrefix[i][j];
                    if (VERBOSE)
                    {
                        Console.WriteLine("match 1 prefix: doc=" + docs[i] + " q=" + q + " " + q.GetType().Name);
                    }
                    ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs;
                    Assert.AreEqual(1, hits.Length);
                    Assert.AreEqual(i, hits[0].Doc);
                }
            }

            // test the wildcard queries find only one doc
            for (int i = 0; i < matchOneDocWild.Length; i++)
            {
                for (int j = 0; j < matchOneDocWild[i].Length; j++)
                {
                    Query q = matchOneDocWild[i][j];
                    if (VERBOSE)
                    {
                        Console.WriteLine("match 1 wild: doc=" + docs[i] + " q=" + q + " " + q.GetType().Name);
                    }
                    ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs;
                    Assert.AreEqual(1, hits.Length);
                    Assert.AreEqual(i, hits[0].Doc);
                }
            }

            reader.Dispose();
            dir.Dispose();
        }
        /// <summary>
        /// check that the # of hits is the same as from a very
        /// simple prefixquery implementation.
        /// </summary>
        private void AssertSame(string prefix)
        {
            PrefixQuery smart = new PrefixQuery(new Term("field", prefix));
            DumbPrefixQuery dumb = new DumbPrefixQuery(this, new Term("field", prefix));

            TopDocs smartDocs = Searcher.Search(smart, 25);
            TopDocs dumbDocs = Searcher.Search(dumb, 25);
            CheckHits.CheckEqual(smart, smartDocs.ScoreDocs, dumbDocs.ScoreDocs);
        }
Esempio n. 19
0
        /// <summary>
        /// 批量添加TermQuery或PrefixQuery搜索条件
        /// </summary>
        /// <param name="fieldNameAndBoosts">字段名称及权重集合</param>
        /// <param name="values">字段值集合</param>
        /// <param name="exactMatch">是否精确搜索</param>
        /// <param name="occur">搜索条件间的关系</param>
        /// <param name="asFilter">是否作为过滤条件</param>
        /// <returns>LuceneSearchBuilder</returns>
        public LuceneSearchBuilder WithFields(Dictionary<string, BoostLevel> fieldNameAndBoosts, IEnumerable<string> values, bool exactMatch, BooleanClause.Occur occur, bool asFilter = false)
        {
            foreach (var fieldNameAndBoost in fieldNameAndBoosts)
            {
                BooleanQuery query = new BooleanQuery();

                foreach (string value in values)
                {
                    string filteredValue = ClauseScrubber.LuceneKeywordsScrub(value);
                    if (string.IsNullOrEmpty(filteredValue))
                        continue;

                    Term term = new Term(fieldNameAndBoost.Key, filteredValue);
                    Query q;
                    if (exactMatch)
                        q = new TermQuery(term);
                    else
                        q = new PrefixQuery(term);

                    SetBoost(q, fieldNameAndBoost.Value);

                    query.Add(q, BooleanClause.Occur.SHOULD);
                }

                if (asFilter)
                    filters.Add(new BooleanClause(query, occur));
                else
                    clauses.Add(new BooleanClause(query, occur));
            }

            return this;
        }
Esempio n. 20
0
        /// <summary>
        /// 批量添加PhraseQuery
        /// </summary>
        /// <param name="phrases">待搜索的短语集合</param>
        /// <param name="fieldNameAndBoosts">字段名称及权重集合</param>
        /// <param name="occur">搜索条件间的关系</param>
        /// <param name="asFilter">是否作为过滤器条件</param>
        /// <returns></returns>
        public LuceneSearchBuilder WithPhrases(Dictionary<string, BoostLevel> fieldNameAndBoosts, IEnumerable<string> phrases, BooleanClause.Occur occur, bool asFilter = false)
        {
            foreach (var fieldNameAndBoost in fieldNameAndBoosts)
            {
                BooleanQuery query = new BooleanQuery();
                foreach (string phrase in phrases)
                {
                    string filteredPhrase = ClauseScrubber.LuceneKeywordsScrub(phrase);
                    if (string.IsNullOrEmpty(filteredPhrase))
                        continue;

                    if (filteredPhrase.Length == 1)
                    {
                        Term term = new Term(fieldNameAndBoost.Key, filteredPhrase);
                        Query q = new PrefixQuery(term);

                        SetBoost(q, fieldNameAndBoost.Value);

                        query.Add(q, BooleanClause.Occur.SHOULD);

                        continue;
                    }

                    string[] nameSegments = ClauseScrubber.SegmentForPhraseQuery(filteredPhrase);

                    PhraseQuery phraseQuery = new PhraseQuery();
                    foreach (var nameSegment in nameSegments)
                        phraseQuery.Add(new Term(fieldNameAndBoost.Key, nameSegment));

                    phraseQuery.SetSlop(PhraseQuerySlop);

                    SetBoost(phraseQuery, fieldNameAndBoost.Value);

                    query.Add(phraseQuery, BooleanClause.Occur.SHOULD);

                }

                if (asFilter)
                    filters.Add(new BooleanClause(query, occur));
                else
                    clauses.Add(new BooleanClause(query, occur));
            }

            return this;
        }
Esempio n. 21
0
        /// <summary>
        /// 根据多个关键字添加TermQuery或PrefixQuery搜索条件
        /// </summary>
        /// <param name="fieldName">字段名称</param>
        /// <param name="values">字段值列表</param>
        /// <param name="exactMatch">是否精确搜索</param>
        /// <param name="boostLevel">权重级别</param>
        /// <param name="asFilter">是否作为过滤条件</param>
        public LuceneSearchBuilder WithFields(string fieldName, IEnumerable<string> values, bool exactMatch = true, BoostLevel? boostLevel = null, bool asFilter = false)
        {
            BooleanQuery query = new BooleanQuery();

            foreach (string value in values)
            {
                string filteredValue = ClauseScrubber.LuceneKeywordsScrub(value);
                if (string.IsNullOrEmpty(filteredValue))
                    continue;

                Term term = new Term(fieldName, filteredValue);
                Query q;
                if (exactMatch)
                    q = new TermQuery(term);
                else
                    q = new PrefixQuery(term);

                if (boostLevel.HasValue)
                    SetBoost(q, boostLevel.Value);

                query.Add(q, BooleanClause.Occur.SHOULD);
            }

            if (asFilter)
                filters.Add(new BooleanClause(query, BooleanClause.Occur.MUST));
            else
                clauses.Add(new BooleanClause(query, BooleanClause.Occur.MUST));
            return this;
        }
Esempio n. 22
0
        /// <summary>
        /// 添加TermQuery或PrefixQuery搜索条件
        /// </summary>
        /// <param name="fieldName">字段名称</param>
        /// <param name="value">字段值</param>
        /// <param name="exactMatch">是否精确搜索</param>
        /// <param name="boostLevel">权重级别</param>
        /// <param name="asFilter">是否作为过滤条件</param>
        /// <returns>LuceneSearchBuilder</returns>
        public LuceneSearchBuilder WithField(string fieldName, string value, bool exactMatch = true, BoostLevel? boostLevel = null, bool asFilter = false)
        {
            string filteredValue = ClauseScrubber.LuceneKeywordsScrub(value);
            if (string.IsNullOrEmpty(filteredValue))
                return this;

            Term term = new Term(fieldName, filteredValue);
            Query query;
            if (exactMatch)
                query = new TermQuery(term);
            else
                query = new PrefixQuery(term);

            if (boostLevel.HasValue)
                SetBoost(query, boostLevel.Value);

            if (asFilter)
                filters.Add(new BooleanClause(query, BooleanClause.Occur.MUST));
            else
                clauses.Add(new BooleanClause(query, BooleanClause.Occur.MUST));

            return this;
        }
Esempio n. 23
0
			public override Query ProcessQuery(string indexName, Query query, IndexQuery originalQuery)
			{
				if (indexName != SpecificIndexName)
					return query;

				var customQuery = new PrefixQuery(new Term("CustomField", "CustomPrefix"));

				return new BooleanQuery
					{
						{ query, Occur.MUST },
						{ customQuery, Occur.MUST}
					};
			}
Esempio n. 24
0
        public virtual void TestSimple()
        {
            int numNodes = TestUtil.NextInt(Random(), 1, 10);

            double runTimeSec = AtLeast(3);

            int minDocsToMakeTerms = TestUtil.NextInt(Random(), 5, 20);

            int maxSearcherAgeSeconds = TestUtil.NextInt(Random(), 1, 3);

            if (VERBOSE)
            {
                Console.WriteLine("TEST: numNodes=" + numNodes + " runTimeSec=" + runTimeSec + " maxSearcherAgeSeconds=" + maxSearcherAgeSeconds);
            }

            Start(numNodes, runTimeSec, maxSearcherAgeSeconds);

            List<PreviousSearchState> priorSearches = new List<PreviousSearchState>();
            List<BytesRef> terms = null;
            while (TimeHelper.NanoTime() < EndTimeNanos)
            {
                bool doFollowon = priorSearches.Count > 0 && Random().Next(7) == 1;

                // Pick a random node; we will run the query on this node:
                int myNodeID = Random().Next(numNodes);

                NodeState.ShardIndexSearcher localShardSearcher;

                PreviousSearchState prevSearchState;

                if (doFollowon)
                {
                    // Pretend user issued a followon query:
                    prevSearchState = priorSearches[Random().Next(priorSearches.Count)];

                    if (VERBOSE)
                    {
                        Console.WriteLine("\nTEST: follow-on query age=" + ((TimeHelper.NanoTime() - prevSearchState.SearchTimeNanos) / 1000000000.0));
                    }

                    try
                    {
                        localShardSearcher = Nodes[myNodeID].Acquire(prevSearchState.Versions);
                    }
                    catch (SearcherExpiredException see)
                    {
                        // Expected, sometimes; in a "real" app we would
                        // either forward this error to the user ("too
                        // much time has passed; please re-run your
                        // search") or sneakily just switch to newest
                        // searcher w/o telling them...
                        if (VERBOSE)
                        {
                            Console.WriteLine("  searcher expired during local shard searcher init: " + see);
                        }
                        priorSearches.Remove(prevSearchState);
                        continue;
                    }
                }
                else
                {
                    if (VERBOSE)
                    {
                        Console.WriteLine("\nTEST: fresh query");
                    }
                    // Do fresh query:
                    localShardSearcher = Nodes[myNodeID].Acquire();
                    prevSearchState = null;
                }

                IndexReader[] subs = new IndexReader[numNodes];

                PreviousSearchState searchState = null;

                try
                {
                    // Mock: now make a single reader (MultiReader) from all node
                    // searchers.  In a real shard env you can't do this... we
                    // do it to confirm results from the shard searcher
                    // are correct:
                    int docCount = 0;
                    try
                    {
                        for (int nodeID = 0; nodeID < numNodes; nodeID++)
                        {
                            long subVersion = localShardSearcher.NodeVersions[nodeID];
                            IndexSearcher sub = Nodes[nodeID].Searchers.Acquire(subVersion);
                            if (sub == null)
                            {
                                nodeID--;
                                while (nodeID >= 0)
                                {
                                    subs[nodeID].DecRef();
                                    subs[nodeID] = null;
                                    nodeID--;
                                }
                                throw new SearcherExpiredException("nodeID=" + nodeID + " version=" + subVersion);
                            }
                            subs[nodeID] = sub.IndexReader;
                            docCount += subs[nodeID].MaxDoc();
                        }
                    }
                    catch (SearcherExpiredException see)
                    {
                        // Expected
                        if (VERBOSE)
                        {
                            Console.WriteLine("  searcher expired during mock reader init: " + see);
                        }
                        continue;
                    }

                    IndexReader mockReader = new MultiReader(subs);
                    IndexSearcher mockSearcher = new IndexSearcher(mockReader);

                    Query query;
                    Sort sort;

                    if (prevSearchState != null)
                    {
                        query = prevSearchState.Query;
                        sort = prevSearchState.Sort;
                    }
                    else
                    {
                        if (terms == null && docCount > minDocsToMakeTerms)
                        {
                            // TODO: try to "focus" on high freq terms sometimes too
                            // TODO: maybe also periodically reset the terms...?
                            TermsEnum termsEnum = MultiFields.GetTerms(mockReader, "body").Iterator(null);
                            terms = new List<BytesRef>();
                            while (termsEnum.Next() != null)
                            {
                                terms.Add(BytesRef.DeepCopyOf(termsEnum.Term()));
                            }
                            if (VERBOSE)
                            {
                                Console.WriteLine("TEST: init terms: " + terms.Count + " terms");
                            }
                            if (terms.Count == 0)
                            {
                                terms = null;
                            }
                        }

                        if (VERBOSE)
                        {
                            Console.WriteLine("  maxDoc=" + mockReader.MaxDoc());
                        }

                        if (terms != null)
                        {
                            if (Random().NextBoolean())
                            {
                                query = new TermQuery(new Term("body", terms[Random().Next(terms.Count)]));
                            }
                            else
                            {
                                string t = terms[Random().Next(terms.Count)].Utf8ToString();
                                string prefix;
                                if (t.Length <= 1)
                                {
                                    prefix = t;
                                }
                                else
                                {
                                    prefix = t.Substring(0, TestUtil.NextInt(Random(), 1, 2));
                                }
                                query = new PrefixQuery(new Term("body", prefix));
                            }

                            if (Random().NextBoolean())
                            {
                                sort = null;
                            }
                            else
                            {
                                // TODO: sort by more than 1 field
                                int what = Random().Next(3);
                                if (what == 0)
                                {
                                    sort = new Sort(SortField.FIELD_SCORE);
                                }
                                else if (what == 1)
                                {
                                    // TODO: this sort doesn't merge
                                    // correctly... it's tricky because you
                                    // could have > 2.1B docs across all shards:
                                    //sort = new Sort(SortField.FIELD_DOC);
                                    sort = null;
                                }
                                else if (what == 2)
                                {
                                    sort = new Sort(new SortField[] { new SortField("docid", SortField.Type_e.INT, Random().NextBoolean()) });
                                }
                                else
                                {
                                    sort = new Sort(new SortField[] { new SortField("title", SortField.Type_e.STRING, Random().NextBoolean()) });
                                }
                            }
                        }
                        else
                        {
                            query = null;
                            sort = null;
                        }
                    }

                    if (query != null)
                    {
                        try
                        {
                            searchState = AssertSame(mockSearcher, localShardSearcher, query, sort, prevSearchState);
                        }
                        catch (SearcherExpiredException see)
                        {
                            // Expected; in a "real" app we would
                            // either forward this error to the user ("too
                            // much time has passed; please re-run your
                            // search") or sneakily just switch to newest
                            // searcher w/o telling them...
                            if (VERBOSE)
                            {
                                Console.WriteLine("  searcher expired during search: " + see);
                                Console.Out.Write(see.StackTrace);
                            }
                            // We can't do this in general: on a very slow
                            // computer it's possible the local searcher
                            // expires before we can finish our search:
                            // assert prevSearchState != null;
                            if (prevSearchState != null)
                            {
                                priorSearches.Remove(prevSearchState);
                            }
                        }
                    }
                }
                finally
                {
                    Nodes[myNodeID].Release(localShardSearcher);
                    foreach (IndexReader sub in subs)
                    {
                        if (sub != null)
                        {
                            sub.DecRef();
                        }
                    }
                }

                if (searchState != null && searchState.SearchAfterLocal != null && Random().Next(5) == 3)
                {
                    priorSearches.Add(searchState);
                    if (priorSearches.Count > 200)
                    {
                        priorSearches = (List<PreviousSearchState>)CollectionsHelper.Shuffle(priorSearches);
                        priorSearches.SubList(100, priorSearches.Count).Clear();
                    }
                }
            }

            Finish();
        }
Esempio n. 25
0
        private static Query BuildTermQuery(string termQuery)
        {
            Query mainQuery;
            if (termQuery.Length < 3)
                mainQuery = new PrefixQuery(new Term("name", termQuery));
            else
            {
                BooleanQuery boolQuery = new BooleanQuery();

                var fuzzQuery = new FuzzyQuery(new Term("name", termQuery));
                var prefQuery = new PrefixQuery(new Term("name", termQuery));
                boolQuery.Add(prefQuery, BooleanClause.Occur.SHOULD);
                boolQuery.Add(fuzzQuery, BooleanClause.Occur.SHOULD);

                mainQuery = boolQuery;

                //first implemetation
                //mainQuery = fuzzQuery;

            }
            return mainQuery;
        }
Esempio n. 26
0
 /// <summary>
 /// Factory method to generate a prefix query.
 /// </summary>
 protected virtual Query NewPrefixQuery(string text)
 {
     BooleanQuery bq = new BooleanQuery(true);
     foreach (var entry in weights)
     {
         PrefixQuery prefix = new PrefixQuery(new Term(entry.Key, text));
         prefix.Boost = entry.Value;
         bq.Add(prefix, BooleanClause.Occur.SHOULD);
     }
     return Simplify(bq);
 }
Esempio n. 27
0
		public virtual void  TestMethod()
		{
			RAMDirectory directory = new RAMDirectory();
			
			System.String[] categories = new System.String[]{"food", "foodanddrink", "foodanddrinkandgoodtimes", "food and drink"};
			
			Query rw1 = null;
			Query rw2 = null;
			IndexReader reader = null;
			try
			{
				IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
				for (int i = 0; i < categories.Length; i++)
				{
					Document doc = new Document();
					doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
					writer.AddDocument(doc);
				}
				writer.Close();
				
				reader = IndexReader.Open(directory);
				PrefixQuery query = new PrefixQuery(new Term("category", "foo"));
				rw1 = query.Rewrite(reader);
				
				BooleanQuery bq = new BooleanQuery();
				bq.Add(query, BooleanClause.Occur.MUST);
				
				rw2 = bq.Rewrite(reader);
			}
			catch (System.IO.IOException e)
			{
				Assert.Fail(e.Message);
			}
			
			Assert.AreEqual(GetCount(reader, rw1), GetCount(reader, rw2), "Number of Clauses Mismatch");
		}
Esempio n. 28
0
        public virtual void TestSimple()
        {
            int numNodes = TestUtil.NextInt(Random(), 1, 10);

            double runTimeSec = AtLeast(3);

            int minDocsToMakeTerms = TestUtil.NextInt(Random(), 5, 20);

            int maxSearcherAgeSeconds = TestUtil.NextInt(Random(), 1, 3);

            if (VERBOSE)
            {
                Console.WriteLine("TEST: numNodes=" + numNodes + " runTimeSec=" + runTimeSec + " maxSearcherAgeSeconds=" + maxSearcherAgeSeconds);
            }

            Start(numNodes, runTimeSec, maxSearcherAgeSeconds);

            List <PreviousSearchState> priorSearches = new List <PreviousSearchState>();
            List <BytesRef>            terms         = null;

            while (Time.NanoTime() < endTimeNanos)
            {
                bool doFollowon = priorSearches.Count > 0 && Random().Next(7) == 1;

                // Pick a random node; we will run the query on this node:
                int myNodeID = Random().Next(numNodes);

                NodeState.ShardIndexSearcher localShardSearcher;

                PreviousSearchState prevSearchState;

                if (doFollowon)
                {
                    // Pretend user issued a followon query:
                    prevSearchState = priorSearches[Random().Next(priorSearches.Count)];

                    if (VERBOSE)
                    {
                        Console.WriteLine("\nTEST: follow-on query age=" + ((Time.NanoTime() - prevSearchState.SearchTimeNanos) / 1000000000.0));
                    }

                    try
                    {
                        localShardSearcher = Nodes[myNodeID].Acquire(prevSearchState.Versions);
                    }
                    catch (SearcherExpiredException see)
                    {
                        // Expected, sometimes; in a "real" app we would
                        // either forward this error to the user ("too
                        // much time has passed; please re-run your
                        // search") or sneakily just switch to newest
                        // searcher w/o telling them...
                        if (VERBOSE)
                        {
                            Console.WriteLine("  searcher expired during local shard searcher init: " + see);
                        }
                        priorSearches.Remove(prevSearchState);
                        continue;
                    }
                }
                else
                {
                    if (VERBOSE)
                    {
                        Console.WriteLine("\nTEST: fresh query");
                    }
                    // Do fresh query:
                    localShardSearcher = Nodes[myNodeID].Acquire();
                    prevSearchState    = null;
                }

                IndexReader[] subs = new IndexReader[numNodes];

                PreviousSearchState searchState = null;

                try
                {
                    // Mock: now make a single reader (MultiReader) from all node
                    // searchers.  In a real shard env you can't do this... we
                    // do it to confirm results from the shard searcher
                    // are correct:
                    int docCount = 0;
                    try
                    {
                        for (int nodeID = 0; nodeID < numNodes; nodeID++)
                        {
                            long          subVersion = localShardSearcher.NodeVersions[nodeID];
                            IndexSearcher sub        = Nodes[nodeID].Searchers.Acquire(subVersion);
                            if (sub == null)
                            {
                                nodeID--;
                                while (nodeID >= 0)
                                {
                                    subs[nodeID].DecRef();
                                    subs[nodeID] = null;
                                    nodeID--;
                                }
                                throw new SearcherExpiredException("nodeID=" + nodeID + " version=" + subVersion);
                            }
                            subs[nodeID] = sub.IndexReader;
                            docCount    += subs[nodeID].MaxDoc;
                        }
                    }
                    catch (SearcherExpiredException see)
                    {
                        // Expected
                        if (VERBOSE)
                        {
                            Console.WriteLine("  searcher expired during mock reader init: " + see);
                        }
                        continue;
                    }

                    IndexReader   mockReader   = new MultiReader(subs);
                    IndexSearcher mockSearcher = new IndexSearcher(mockReader);

                    Query query;
                    Sort  sort;

                    if (prevSearchState != null)
                    {
                        query = prevSearchState.Query;
                        sort  = prevSearchState.Sort;
                    }
                    else
                    {
                        if (terms == null && docCount > minDocsToMakeTerms)
                        {
                            // TODO: try to "focus" on high freq terms sometimes too
                            // TODO: maybe also periodically reset the terms...?
                            TermsEnum termsEnum = MultiFields.GetTerms(mockReader, "body").GetIterator(null);
                            terms = new List <BytesRef>();
                            while (termsEnum.Next() != null)
                            {
                                terms.Add(BytesRef.DeepCopyOf(termsEnum.Term));
                            }
                            if (VERBOSE)
                            {
                                Console.WriteLine("TEST: init terms: " + terms.Count + " terms");
                            }
                            if (terms.Count == 0)
                            {
                                terms = null;
                            }
                        }

                        if (VERBOSE)
                        {
                            Console.WriteLine("  maxDoc=" + mockReader.MaxDoc);
                        }

                        if (terms != null)
                        {
                            if (Random().NextBoolean())
                            {
                                query = new TermQuery(new Term("body", terms[Random().Next(terms.Count)]));
                            }
                            else
                            {
                                string t = terms[Random().Next(terms.Count)].Utf8ToString();
                                string prefix;
                                if (t.Length <= 1)
                                {
                                    prefix = t;
                                }
                                else
                                {
                                    prefix = t.Substring(0, TestUtil.NextInt(Random(), 1, 2));
                                }
                                query = new PrefixQuery(new Term("body", prefix));
                            }

                            if (Random().NextBoolean())
                            {
                                sort = null;
                            }
                            else
                            {
                                // TODO: sort by more than 1 field
                                int what = Random().Next(3);
                                if (what == 0)
                                {
                                    sort = new Sort(SortField.FIELD_SCORE);
                                }
                                else if (what == 1)
                                {
                                    // TODO: this sort doesn't merge
                                    // correctly... it's tricky because you
                                    // could have > 2.1B docs across all shards:
                                    //sort = new Sort(SortField.FIELD_DOC);
                                    sort = null;
                                }
                                else if (what == 2)
                                {
                                    sort = new Sort(new SortField[] { new SortField("docid", SortFieldType.INT32, Random().NextBoolean()) });
                                }
                                else
                                {
                                    sort = new Sort(new SortField[] { new SortField("title", SortFieldType.STRING, Random().NextBoolean()) });
                                }
                            }
                        }
                        else
                        {
                            query = null;
                            sort  = null;
                        }
                    }

                    if (query != null)
                    {
                        try
                        {
                            searchState = AssertSame(mockSearcher, localShardSearcher, query, sort, prevSearchState);
                        }
                        catch (SearcherExpiredException see)
                        {
                            // Expected; in a "real" app we would
                            // either forward this error to the user ("too
                            // much time has passed; please re-run your
                            // search") or sneakily just switch to newest
                            // searcher w/o telling them...
                            if (VERBOSE)
                            {
                                Console.WriteLine("  searcher expired during search: " + see);
                                Console.Out.Write(see.StackTrace);
                            }
                            // We can't do this in general: on a very slow
                            // computer it's possible the local searcher
                            // expires before we can finish our search:
                            // assert prevSearchState != null;
                            if (prevSearchState != null)
                            {
                                priorSearches.Remove(prevSearchState);
                            }
                        }
                    }
                }
                finally
                {
                    Nodes[myNodeID].Release(localShardSearcher);
                    foreach (IndexReader sub in subs)
                    {
                        if (sub != null)
                        {
                            sub.DecRef();
                        }
                    }
                }

                if (searchState != null && searchState.SearchAfterLocal != null && Random().Next(5) == 3)
                {
                    priorSearches.Add(searchState);
                    if (priorSearches.Count > 200)
                    {
                        Collections.Shuffle(priorSearches);
                        priorSearches.SubList(100, priorSearches.Count).Clear();
                    }
                }
            }

            Finish();
        }
Esempio n. 29
0
        private ParameterizedSql BuildPrefix(PrefixQuery prefixQuery)
        {
            string field = prefixQuery.Prefix.Field;
            if (field != null)
            {
                return BuildQuery(new WildcardQuery(new Term(field, prefixQuery.Prefix.Text + "*")));
            }

            return null;
        }
 public virtual void TestPrefixQuery()
 {
     Query query = new PrefixQuery(new Term(FIELD, "tang"));
     Assert.AreEqual(2, Searcher.Search(query, null, 1000).TotalHits, "Number of matched documents");
 }
		public virtual void  TestPrefixQuery()
		{
			IndexSearcher indexSearcher = new IndexSearcher(directory, true);
			Query query = new PrefixQuery(new Term(FIELD, "tang"));
			Assert.AreEqual(2, indexSearcher.Search(query, null, 1000).TotalHits, "Number of matched documents");
		}
Esempio n. 32
0
		/// <summary>Delets an item from the index and any descendants.</summary>
		/// <param name="itemID">The id of the item to delete.</param>
		public virtual void Delete(int itemID)
		{
			logger.Debug("Deleting item #" + itemID);

			lock (accessor)
			{
				var iw = accessor.GetWriter();
				var s = accessor.GetSearcher();
				string trail = GetTrail(s, new Term(Properties.ID, itemID.ToString()));
				if (trail == null)
					return; // not indexed

				var query = new PrefixQuery(new Term(Properties.Trail, trail));
				iw.DeleteDocuments(query);
				iw.Commit();
				accessor.RecreateSearcher();
			}
		}
        public virtual void TestPrefix()
        {
            PrefixQuery expected = new PrefixQuery(new Term("field", "foobar"));

            assertEquals(expected, Parse("foobar*"));
        }
Esempio n. 34
0
        public virtual void TestPrefixQuery()
        {
            Query query = new PrefixQuery(new Term(FIELD, "tang"));

            Assert.AreEqual(2, Searcher.Search(query, null, 1000).TotalHits, "Number of matched documents");
        }
 private static void MultiTermQuery(PrefixQuery query, AzureQueryLogger.IndentedTextWriter writer)
 {
     AzureQueryLogger.VisitTerm(query.Prefix, "Prefix Term", writer);
 }