コード例 #1
0
ファイル: IndexManager.cs プロジェクト: arturcp/WebToolkit
 public void RemoveFromIndex(string[] keys, string value)
 {
     var analyzer = new AccentedAnalyzer();
     var query = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, keys, analyzer);
     var directory = FSDirectory.Open(System.IO.Directory.GetParent(IndexPath));
     IndexReader idxReader = IndexReader.Open(indexPath);
     var searcher = new IndexSearcher(directory, true);
     query.SetDefaultOperator(QueryParser.Operator.AND);
     var q = query.Parse(value);
     int top = idxReader.MaxDoc();
     var results = TopScoreDocCollector.create(top, true);
     searcher.Search(q, results);
     ScoreDoc[] hits = results.TopDocs().scoreDocs;
     Document[] documents = new Document[hits.Length];
     IndexReader indexReader = null;
     for (int i = 0; i < hits.Length; i++)
     {
         int docId = hits[i].doc;
         indexReader = IndexReader.Open(directory, false);
         indexReader.DeleteDocument(docId);
         indexReader.Commit();
         indexReader.Flush();
         indexReader.Close();
     }
     searcher.Close();
     directory.Close();
 }
コード例 #2
0
        public virtual void  TestSimple()
        {
            System.String[]       fields = new System.String[] { "b", "t" };
            MultiFieldQueryParser mfqp   = new MultiFieldQueryParser(fields, new StandardAnalyzer());

            Query q = mfqp.Parse("one");

            Assert.AreEqual("b:one t:one", q.ToString());

            q = mfqp.Parse("one two");
            Assert.AreEqual("(b:one t:one) (b:two t:two)", q.ToString());

            q = mfqp.Parse("+one +two");
            Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString());

            q = mfqp.Parse("+one -two -three");
            Assert.AreEqual("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.ToString());

            q = mfqp.Parse("one^2 two");
            Assert.AreEqual("((b:one t:one)^2.0) (b:two t:two)", q.ToString());

            q = mfqp.Parse("one~ two");
            Assert.AreEqual("(b:one~0.5 t:one~0.5) (b:two t:two)", q.ToString());

            q = mfqp.Parse("one~0.8 two^2");
            Assert.AreEqual("(b:one~0.8 t:one~0.8) ((b:two t:two)^2.0)", q.ToString());

            q = mfqp.Parse("one* two*");
            Assert.AreEqual("(b:one* t:one*) (b:two* t:two*)", q.ToString());

            q = mfqp.Parse("[a TO c] two");
            Assert.AreEqual("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.ToString());

            q = mfqp.Parse("w?ldcard");
            Assert.AreEqual("b:w?ldcard t:w?ldcard", q.ToString());

            q = mfqp.Parse("\"foo bar\"");
            Assert.AreEqual("b:\"foo bar\" t:\"foo bar\"", q.ToString());

            q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
            Assert.AreEqual("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q.ToString());

            q = mfqp.Parse("\"foo bar\"~4");
            Assert.AreEqual("b:\"foo bar\"~4 t:\"foo bar\"~4", q.ToString());

            // LUCENE-1213: MultiFieldQueryParser was ignoring slop when phrase had a field.
            q = mfqp.Parse("b:\"foo bar\"~4");
            Assert.AreEqual("b:\"foo bar\"~4", q.ToString());

            // make sure that terms which have a field are not touched:
            q = mfqp.Parse("one f:two");
            Assert.AreEqual("(b:one t:one) f:two", q.ToString());

            // AND mode:
            mfqp.SetDefaultOperator(QueryParser.AND_OPERATOR);
            q = mfqp.Parse("one two");
            Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString());
            q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
            Assert.AreEqual("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.ToString());
        }
コード例 #3
0
        public virtual void  TestStopWordSearching()
        {
            Analyzer    analyzer = new StandardAnalyzer();
            Directory   ramDir   = new RAMDirectory();
            IndexWriter iw       = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
            Document    doc      = new Document();

            doc.Add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.ANALYZED));
            iw.AddDocument(doc);
            iw.Close();

            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(new System.String[] { "body" }, analyzer);

            mfqp.SetDefaultOperator(QueryParser.Operator.AND);
            Query         q          = mfqp.Parse("the footest");
            IndexSearcher is_Renamed = new IndexSearcher(ramDir);

            ScoreDoc[] hits = is_Renamed.Search(q, null, 1000).ScoreDocs;
            Assert.AreEqual(1, hits.Length);
            is_Renamed.Close();
        }
コード例 #4
0
ファイル: 新闻Controller.cs プロジェクト: 269378737/go81
 /// <summary>
 /// 从索引搜索结果
 /// </summary>
 private TopDocs SearchIndex(string indexdic, string keyword)
 {
     PanGu.Segment.Init(PanGuXmlPath);
     Dictionary<string, string> dic = new Dictionary<string, string>();
     BooleanQuery bQuery = new BooleanQuery();
     string title = string.Empty;
     if ((keyword != null && keyword != ""))
     {
         title = GetKeyWordsSplitBySpace(keyword);
         QueryParser parse = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, new String[] { "Title", "Content" }, PanGuAnalyzer);
         Lucene.Net.Search.Query query = parse.Parse(title);
         parse.SetDefaultOperator(QueryParser.Operator.AND);
         bQuery.Add(query, BooleanClause.Occur.MUST);
         dic.Add("title", keyword);
     }
     if (bQuery != null && bQuery.GetClauses().Length > 0)
     {
         return GetSearchResult(bQuery, dic, indexdic);
     }
     else
     {
         return null;
     }
 }
コード例 #5
0
		public virtual void  TestSimple()
		{
			System.String[] fields = new System.String[]{"b", "t"};
			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, new StandardAnalyzer());
			
			Query q = mfqp.Parse("one");
			Assert.AreEqual("b:one t:one", q.ToString());
			
			q = mfqp.Parse("one two");
			Assert.AreEqual("(b:one t:one) (b:two t:two)", q.ToString());
			
			q = mfqp.Parse("+one +two");
			Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString());
			
			q = mfqp.Parse("+one -two -three");
			Assert.AreEqual("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.ToString());
			
			q = mfqp.Parse("one^2 two");
			Assert.AreEqual("((b:one t:one)^2.0) (b:two t:two)", q.ToString());
			
			q = mfqp.Parse("one~ two");
			Assert.AreEqual("(b:one~0.5 t:one~0.5) (b:two t:two)", q.ToString());
			
			q = mfqp.Parse("one~0.8 two^2");
			Assert.AreEqual("(b:one~0.8 t:one~0.8) ((b:two t:two)^2.0)", q.ToString());
			
			q = mfqp.Parse("one* two*");
			Assert.AreEqual("(b:one* t:one*) (b:two* t:two*)", q.ToString());
			
			q = mfqp.Parse("[a TO c] two");
			Assert.AreEqual("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.ToString());
			
			q = mfqp.Parse("w?ldcard");
			Assert.AreEqual("b:w?ldcard t:w?ldcard", q.ToString());
			
			q = mfqp.Parse("\"foo bar\"");
			Assert.AreEqual("b:\"foo bar\" t:\"foo bar\"", q.ToString());
			
			q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
			Assert.AreEqual("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q.ToString());
			
			q = mfqp.Parse("\"foo bar\"~4");
			Assert.AreEqual("b:\"foo bar\"~4 t:\"foo bar\"~4", q.ToString());
			
			// LUCENE-1213: MultiFieldQueryParser was ignoring slop when phrase had a field.
			q = mfqp.Parse("b:\"foo bar\"~4");
			Assert.AreEqual("b:\"foo bar\"~4", q.ToString());
			
			// make sure that terms which have a field are not touched:
			q = mfqp.Parse("one f:two");
			Assert.AreEqual("(b:one t:one) f:two", q.ToString());
			
			// AND mode:
			mfqp.SetDefaultOperator(QueryParser.AND_OPERATOR);
			q = mfqp.Parse("one two");
			Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString());
			q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
			Assert.AreEqual("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.ToString());
		}
コード例 #6
0
        public void Search(string indexDir, string q, int pageSize, int pageIndex, out int recCount)
        {
            indexDir = HttpContext.Current.Server.MapPath("~/Search/");
            string keywords = q;

            var search = new IndexSearcher(indexDir);

            q = GetKeyWordsSplitBySpace(q, new PanGuTokenizer());

            string[] fields = { "title", "Category", "Desc" };

            QueryParser qp= new MultiFieldQueryParser(fields, new PanGuAnalyzer(true));
            qp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.OR_OPERATOR);

            //var queryParser = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "Desc", new PanGuAnalyzer(true));

            Query query = qp.Parse(q);

            Hits hits = search.Search(query);

            // 新的查询
            TopDocs newHits = search.Search(query, 100);
            ScoreDoc[] scoreDocs = newHits.ScoreDocs;
            for (int i = 0; i < scoreDocs.Length; i++)
            {
                Document document = search.Doc(scoreDocs[i].doc);
                document.GetField("id").StringValue();
            }

            recCount = hits.Length();
            int j = (pageIndex - 1) * pageSize;

            for (int i = 0; i <= hits.Length() - 1; i++)
            {
                hits.Doc(i).GetField("Desc").StringValue();
            }

            search.Close();
        }
コード例 #7
0
		public virtual void  TestStopWordSearching()
		{
			Analyzer analyzer = new StandardAnalyzer();
			Directory ramDir = new RAMDirectory();
			IndexWriter iw = new IndexWriter(ramDir, analyzer, true);
			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
			doc.Add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.TOKENIZED));
			iw.AddDocument(doc);
			iw.Close();
			
			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(new System.String[]{"body"}, analyzer);
			mfqp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.Operator.AND);
			Query q = mfqp.Parse("the footest");
			IndexSearcher is_Renamed = new IndexSearcher(ramDir);
			Hits hits = is_Renamed.Search(q);
			Assert.AreEqual(1, hits.Length());
			is_Renamed.Close();
		}
コード例 #8
0
		public virtual void  TestStopWordSearching()
		{
			Analyzer analyzer = new StandardAnalyzer();
			Directory ramDir = new RAMDirectory();
			IndexWriter iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
			Document doc = new Document();
			doc.Add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.ANALYZED));
			iw.AddDocument(doc);
			iw.Close();
			
			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(new System.String[]{"body"}, analyzer);
			mfqp.SetDefaultOperator(QueryParser.Operator.AND);
			Query q = mfqp.Parse("the footest");
			IndexSearcher is_Renamed = new IndexSearcher(ramDir);
			ScoreDoc[] hits = is_Renamed.Search(q, null, 1000).scoreDocs;
			Assert.AreEqual(1, hits.Length);
			is_Renamed.Close();
		}
コード例 #9
0
ファイル: Default.aspx.cs プロジェクト: kiichi7/Search-Engine
    public void Query()
    {
        //������ѯ��Ҫ��ʵ������
        //==================================>(1)������������:  File ��File2<==============================
        //Lucene.Net.Index.MultiReader reader = new Lucene.Net.Index.MultiReader(new IndexReader[] { IndexReader.Open(@"\\172.16.32.189\File"), IndexReader.Open(Server.MapPath("./") + @"index\\File\\") });

        Lucene.Net.Search.ParallelMultiSearcher searcher;

        System.String index1 = Server.MapPath("./") + "index\\File\\";
        IndexReader reader1 = IndexReader.Open(index1);//��ȡ������ȡ����ʵ�����ô�Ϊ������ȡ��reader��ȡ�ļ���Ϊindex���ļ���(Ŀ¼)
        Searcher searcher1 = new IndexSearcher(reader1);//ָ��searcher(����)�� IndexReader(������ȡ��)
        Analyzer analyzer = new StandardAnalyzer();//��׼������

        System.String index2 = @"\\172.16.32.189\File";
        //System.String index = Server.MapPath("./") + "index\\File2\\";//�������ڵ�Ŀ¼��

        if (new System.IO.FileInfo(index2).Exists)
        {
            IndexReader reader2 = Lucene.Net.Index.IndexReader.Open(index2);//��ȡ������ȡ����ʵ�����ô�Ϊ������ȡ��reader��ȡ�ļ���Ϊindex���ļ���(Ŀ¼)
            IndexSearcher searcher2 = new IndexSearcher(reader2);
            searcher = new Lucene.Net.Search.ParallelMultiSearcher(new Lucene.Net.Search.Searchable[] { searcher1, searcher2 });
        }
        else
        {
            searcher = new Lucene.Net.Search.ParallelMultiSearcher(new Lucene.Net.Search.Searchable[] { searcher1 });
        }

        //==================================>(2)�����ڴ�����<==============================
        //Lucene.Net.Search.IndexSearcher searcher = new IndexSearcher(ramdir);

        //��ѯǰ���ѽ������ʾ���
        this.result_TxtBox.Text = "";
        //�����ѯ����
        if (this.query_txtbox.Text.Trim(new char[] { ' ' }) == String.Empty)
            return;

        System.String Query_condition = this.query_txtbox.Text.Trim();

        //====================>(1)�����ѯ����<==============================================
           //System.String field= "title";//���ڱ�����ҳɹ����ļ����ڵ�Ŀ¼
           //QueryParser parser = new QueryParser("field", analyzer);//������ѯ��������ָ����field(������ָcontents��Ŀ¼), analyzer����׼��������

           //====================>(2)�����ѯ����<==============================================
           string[] strs = new string[] { "title", "contents" };
           Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.MultiFieldQueryParser(strs, new StandardAnalyzer());
           parser.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.OR_OPERATOR);

           Query query = parser.Parse(Query_condition);//����һ����ѯ����ָ����ѯ����Query_condition������ this.Query_txt.Text��
           //�����
        Hits hits = searcher.Search(query);//����hits��������Search������)query���õ����ļ�
        if (hits.Length()==0)
        {
            this.result_TxtBox.Text += "==========������˼����ѯ����������Ĺؼ��֡�^+^===========" + "\n";
         }
           else{
           for (int i = 0; i < hits.Length(); i++)
        {
            // �õ����е��ĵ�
            Document doc = hits.Doc(i);

            //string filename = doc.Get("path");//�õ������ļ���������Ŀ¼·��
            string result_path = doc.Get("path");//�õ����е��ļ�Ŀ¼��

            //string folder = Path.GetDirectoryName(result_path);

            string filename = Path.GetFileName(result_path);//�õ������ļ���

            //DirectoryInfo di = new DirectoryInfo(folder);

            this.result_TxtBox.Text += "======================================================" + "\n";
            this.result_TxtBox.Text += "��ѯ������ڵ��ļ�:   "+filename + "\n";
            //this.result_TxtBox.Text += "��ѯ������ڵ�Ŀ¼:   " + di.FullName + "\n";

        }
        }
           //reader.Close();
           searcher.Close();
    }
コード例 #10
0
ファイル: 公告Controller.cs プロジェクト: 269378737/go81
        /// <summary>
        /// 从索引搜索结果
        /// </summary>
        private TopDocs SearchIndex(string indexdic, string pro, string city, string area, string hy, string adclass, string keyword)
        {
            PanGu.Segment.Init(PanGuXmlPath);
            Dictionary<string, string> dic = new Dictionary<string, string>();
            BooleanQuery bQuery = new BooleanQuery();


            if (!string.IsNullOrEmpty(pro))
            {
                //Lucene.Net.Search.Query query = new QueryParser("AdPro", PanGuAnalyzer).Parse(pro);
                Lucene.Net.Search.Query query = new TermQuery(new Term("AdPro", pro));
                bQuery.Add(query, BooleanClause.Occur.MUST);//其中的MUST、SHOULD、MUST_NOT表示与、或、非

                if (!string.IsNullOrEmpty(city) && city != "不限")
                {
                    query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdCity", PanGuAnalyzer).Parse(city);
                    //query = new TermQuery(new Term("AdCity", city));
                    bQuery.Add(query, BooleanClause.Occur.MUST);

                    if (!string.IsNullOrEmpty(area) && area != "不限")
                    {
                        query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdArea", PanGuAnalyzer).Parse(area);
                        //query = new TermQuery(new Term("AdArea", area));
                        bQuery.Add(query, BooleanClause.Occur.MUST);
                    }
                }
            }
            if (!string.IsNullOrEmpty(hy))
            {
                Lucene.Net.Search.Query query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdHy", PanGuAnalyzer).Parse(hy);
                //Lucene.Net.Search.Query query = new TermQuery(new Term("AdHy", hy));
                bQuery.Add(query, BooleanClause.Occur.MUST);
            }
            if (!string.IsNullOrEmpty(adclass) && adclass != "中标商品公告查询")
            {
                if (adclass == "公开招标" || adclass == "其他")
                {
                    //Lucene.Net.Search.Query query = new QueryParser("AdClass", PanGuAnalyzer).Parse(adclass);
                    Lucene.Net.Search.Query query = new TermQuery(new Term("AdClass", adclass));
                    bQuery.Add(query, BooleanClause.Occur.MUST);
                }
                else
                {
                    BooleanQuery bQueryt = new BooleanQuery();
                    //Lucene.Net.Search.Query query = new TermQuery(new Term("AdClass", "公开招标"));
                    //bQuery.Add(query, BooleanClause.Occur.MUST_NOT);

                    ////Lucene.Net.Search.Query query = new QueryParser("AdClass", PanGuAnalyzer).Parse(adclass);
                    //query = new TermQuery(new Term("AdClass", "其他"));
                    //bQuery.Add(query, BooleanClause.Occur.MUST_NOT);
                    Lucene.Net.Search.Query query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdClass", PanGuAnalyzer).Parse("邀请");
                    bQueryt.Add(query, BooleanClause.Occur.SHOULD);

                    //Lucene.Net.Search.Query query = new QueryParser("AdClass", PanGuAnalyzer).Parse(adclass);
                    query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdClass", PanGuAnalyzer).Parse("协议采购");
                    bQueryt.Add(query, BooleanClause.Occur.SHOULD);

                    query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdClass", PanGuAnalyzer).Parse("单一来源");
                    bQueryt.Add(query, BooleanClause.Occur.SHOULD);

                    query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdClass", PanGuAnalyzer).Parse("询价采购");
                    bQueryt.Add(query, BooleanClause.Occur.SHOULD);

                    query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdClass", PanGuAnalyzer).Parse("竞争性谈判");
                    bQueryt.Add(query, BooleanClause.Occur.SHOULD);
                    bQuery.Add(bQueryt, BooleanClause.Occur.MUST);

                }
            }
            else
            {
                BooleanQuery bQueryt = new BooleanQuery();
                Lucene.Net.Search.Query query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdClass", PanGuAnalyzer).Parse("公开招标");
                bQueryt.Add(query, BooleanClause.Occur.SHOULD);

                //Lucene.Net.Search.Query query = new QueryParser("AdClass", PanGuAnalyzer).Parse(adclass);
                query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdClass", PanGuAnalyzer).Parse("其他");
                bQueryt.Add(query, BooleanClause.Occur.SHOULD);

                query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdClass", PanGuAnalyzer).Parse("邀请");
                bQueryt.Add(query, BooleanClause.Occur.SHOULD);

                //Lucene.Net.Search.Query query = new QueryParser("AdClass", PanGuAnalyzer).Parse(adclass);
                query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdClass", PanGuAnalyzer).Parse("协议采购");
                bQueryt.Add(query, BooleanClause.Occur.SHOULD);

                query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdClass", PanGuAnalyzer).Parse("单一来源");
                bQueryt.Add(query, BooleanClause.Occur.SHOULD);

                query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdClass", PanGuAnalyzer).Parse("询价采购");
                bQueryt.Add(query, BooleanClause.Occur.SHOULD);

                query = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "AdClass", PanGuAnalyzer).Parse("竞争性谈判");
                bQueryt.Add(query, BooleanClause.Occur.SHOULD);
                bQuery.Add(bQueryt, BooleanClause.Occur.MUST);
            }

            if (!string.IsNullOrEmpty(keyword))
            {
                string title = string.Empty;
                if ((keyword != null && keyword != ""))
                {
                    title = GetKeyWordsSplitBySpace(keyword);
                    //QueryParser parse = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, new String[] { "Title","Content"}, PanGuAnalyzer);
                    QueryParser parse = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, new String[] { "Title"}, PanGuAnalyzer);
                    var query = parse.Parse(title);
                    parse.SetDefaultOperator(QueryParser.Operator.AND);
                    bQuery.Add(query, BooleanClause.Occur.MUST);
                    dic.Add("title", keyword);

                }
            }
            //if (bQuery != null && bQuery.GetClauses().Length > 0)
            //{
            return GetSearchResult(bQuery, dic, indexdic);
            //}
            //else
            //{
            //    return null;
            //}
        }
コード例 #11
0
ファイル: Searcher.cs プロジェクト: usmanghani/Misc
        private QueryParser _createQueryParser()
        {
            string[] searchfields = null;
            if (_searchFields.Count == 0)
                searchfields = new string[] { "contents" };
            else
                searchfields = _searchFields.ToArray();

            MultiFieldQueryParser parser = new MultiFieldQueryParser(searchfields, _analyzer);

            switch (_operator)
            {
                case DefaultOperator.OR:
                    parser.SetDefaultOperator(QueryParser.OR_OPERATOR);
                    break;

                case DefaultOperator.AND:
                    parser.SetDefaultOperator(QueryParser.AND_OPERATOR);
                    break;

            }

            return parser;
        }
コード例 #12
0
ファイル: Index.cs プロジェクト: alecrt/FormEditor
        private Result GetSearchResults(string searchQuery, string [] searchFields, string sortField, bool sortDescending, int count, int skip)
        {
            var reader = GetIndexReader();
            var searcher = GetIndexSearcher(reader);

            string sortFieldName;
            if(string.IsNullOrWhiteSpace(sortField))
            {
                sortField = sortFieldName = CreatedField;
                sortDescending = true;
            }
            else if(sortField == CreatedField)
            {
                sortFieldName = CreatedField;
            }
            else
            {
                sortFieldName = FieldNameForSorting(sortField);
            }

            Query query;
            if(string.IsNullOrWhiteSpace(searchQuery) == false && searchFields != null && searchFields.Any())
            {
                searchQuery = searchQuery.Replace("*", "").Replace(" ", "* ") + "*";
                var parser = new MultiFieldQueryParser(Version.LUCENE_29, searchFields, GetAnalyzer());
                parser.SetDefaultOperator(QueryParser.Operator.AND);
                try
                {
                    query = parser.Parse(searchQuery.Trim());
                }
                catch(ParseException)
                {
                    query = parser.Parse(QueryParser.Escape(searchQuery.Trim()));
                }
            }
            else
            {
                query = new MatchAllDocsQuery();
            }

            var docs = searcher.Search(
                query,
                null, reader.MaxDoc(),
                new Sort(new SortField(sortFieldName, SortField.STRING, sortDescending))
                );

            var scoreDocs = docs.ScoreDocs;

            var rows = new List<Row>();
            for(var i = skip; i < (skip + count) && i < scoreDocs.Length; i++)
            {
                if(reader.IsDeleted(scoreDocs[i].doc))
                {
                    continue;
                }
                var doc = searcher.Doc(scoreDocs[i].doc);
                var row = ParseRow(doc);
                rows.Add(row);
            }

            searcher.Close();
            reader.Close();

            return new Result(scoreDocs.Count(), rows, sortField, sortDescending);
        }
コード例 #13
0
ファイル: Search.aspx.cs プロジェクト: kiichi7/Search-Engine
    /// <summary>
    /// �������ڡ��������ķ���
    /// </summary>
    private void search()
    {
        DateTime start = DateTime.Now;

        //try
        //{
        // }
        //catch (Exception e)
        //{

        //    Response.Write("<script type='text/javascript'>window.alert(' " + e.ToString() + " ');</script>");

        //}

        // ��������
        //�������ڡ�index��Ŀ¼��
        string indexDirectory1 = Server.MapPath("./")+"index\\1.5\\";
        IndexSearcher searcher1 = new IndexSearcher(indexDirectory1);

           string indexDirectory2 = Server.MapPath("./") + "index\\1.4\\";
        IndexSearcher searcher2 = new IndexSearcher(indexDirectory2);

        //System.String index3 = @"\\192.168.1.130\index\1.5";
        //Lucene.Net.Index.IndexReader reader3;
        Lucene.Net.Search.ParallelMultiSearcher searcher;

           /* if (System.IO.Directory.Exists(index3))//�Ƿ����Ŀ¼
        {
            reader3 = Lucene.Net.Index.IndexReader.Open(index3);//��ȡ������ȡ����ʵ�����ô�Ϊ������ȡ��reader��ȡ�ļ���Ϊindex���ļ���(Ŀ¼)
            IndexSearcher searcher3 = new IndexSearcher(reader3);

            searcher = new Lucene.Net.Search.ParallelMultiSearcher(new Lucene.Net.Search.Searchable[] { searcher3,searcher1, searcher2 });
        }
        else
        {
            searcher = new Lucene.Net.Search.ParallelMultiSearcher(new Lucene.Net.Search.Searchable[] { searcher1, searcher2 });
        }*/
            searcher = new Lucene.Net.Search.ParallelMultiSearcher(new Lucene.Net.Search.Searchable[] { searcher1, searcher2 });

        //====================>(1)�����ѯ����<==============================================
        //System.String field = "text";//���ڱ�����ҳɹ����ļ����ڵ�Ŀ¼
        //QueryParser parser = new QueryParser(field, new StandardAnalyzer());//������ѯ��������ָ����field(������ָcontents��Ŀ¼), analyzer����׼��������

        //====================>(2)�����ѯ����<==============================================
        string[] strs = new string[] { "text", "path","fullpath","keywords","description","title" };
        Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.MultiFieldQueryParser(strs, new StandardAnalyzer());
        parser.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.OR_OPERATOR);

        // ���������
        this.Results.Columns.Add("link", typeof(string));
        this.Results.Columns.Add("title", typeof(string));
        this.Results.Columns.Add("sample", typeof(string));
        this.Results.Columns.Add("path", typeof(string));

        // ����
        Query query = parser.Parse(this.Query);//����һ����ѯ����ָ����ѯ����Query_condition������ this.Query_txt.Text��
        Hits hits = searcher.Search(query);

        this.total = hits.Length();

        // ���� �����Ĺؼ���,Ĭ����<b>..</b>
          // �����ָ��<read>..</read>
          SimpleHTMLFormatter simpleHTMLFormatter = new SimpleHTMLFormatter("<B style='color:Red;'>", "</B>");
          Highlighter highlighter = new Highlighter(simpleHTMLFormatter, new QueryScorer(query));
          // ���һ�������Ҫ���صģ����������ݳ���
          // ���̫С����ֻ�����ݵĿ�ʼ���ֱ��������������ҷ��ص�����Ҳ��    ̫����ʱ̫�˷��ˡ�
          highlighter.SetTextFragmenter(new SimpleFragmenter(100));

        // initialize startAt
        this.startAt = initStartAt();

        // ��ʾ�������ġ������Ŀ
        int resultsCount = smallerOf(total, this.maxResults + this.startAt);

        for (int i = startAt; i < resultsCount; i++)
        {
            // �õ����е��ĵ�
            Document doc = hits.Doc(i);

            //��ӽ�β����֤��β������Ų�������
            string title = doc.Get("title") + "  ";
            // �õ��ļ�����
            System.String text =Search.CutString( doc.Get("text"),480);
            // �õ��ļ�������ȷ·��
            string path = doc.Get("path");
            string orpath = doc.Get("fullpath");

            Lucene.Net.Analysis.TokenStream titkeStream = analyzer.TokenStream("title", new System.IO.StringReader(title));//��Ŀ
            Lucene.Net.Analysis.TokenStream tokenStream = analyzer.TokenStream("text", new System.IO.StringReader(text));//ժҪ
            Lucene.Net.Analysis.TokenStream pathStream = analyzer.TokenStream("path", new System.IO.StringReader(path));//��ʾ��·��

            System.String result = highlighter.GetBestFragments(tokenStream, text, 2, "...");
            string tresult = highlighter.GetBestFragments(titkeStream, title, 2, "..");
            string pathwords = highlighter.GetBestFragments(pathStream, path, 2, ".."); //·����ʱ��ʾ

            // ����һ������ʾ�������Ľ��
            DataRow row = this.Results.NewRow();
            if (tresult == "")
            {
                row["title"] = title;
            }
            else {
                row["title"] = tresult;

            }
            if (getpath(row, System.IO.Path.GetFileName(path.Replace("\\", "/"))))
            {
               row["link"]=getFullpath( System.IO.Path.GetFileName(doc.Get("path")));
            }
            else {
                row["link"] =orpath;
                if (pathwords=="")
                {
                    row["path"] = orpath;

                }
                else
                {
                    row["path"] = pathwords.Replace("\\", "/");

                }

            }

            if (result == ""){
                row["sample"] = text;

            }
            else {
                row["sample"] = result;

            }

            this.Results.Rows.Add(row);
        }
        searcher.Close();

        // �����Ϣ
        this.duration = DateTime.Now - start;
        this.fromItem = startAt + 1;
        this.toItem = smallerOf(startAt + maxResults, total);
    }