예제 #1
0
        /// <summary>
        /// 从索引库中检索关键字
        /// </summary>
        /// <param name="sourceList">检索关键字 </param>
        public List<MetaSource> SearchFromIndexData(string kewWords)
        {
            FSDirectory directory = FSDirectory.Open(new DirectoryInfo(IndexPath), new NoLockFactory());
            IndexReader reader = IndexReader.Open(directory, true);
            IndexSearcher searcher = new IndexSearcher(reader);
            //搜索条件
            PhraseQuery query = new PhraseQuery();
            //把用户输入的关键字进行分词
            foreach (string word in SplitContent.SplitWords(kewWords))
            {
                query.Add(new Term(Constants.CONTENT, word));
            }
            //query.Add(new Term("content", "C#"));//多个查询条件时 为且的关系
            //指定关键词相隔最大距离
            query.Slop=100;

            //TopScoreDocCollector盛放查询结果的容器
            TopScoreDocCollector collector = TopScoreDocCollector.Create(200, true);

            //根据query查询条件进行查询,查询结果放入collector容器
            searcher.Search(query, null, collector);
            //TopDocs 指定0到GetTotalHits() 即所有查询结果中的文档 如果TopDocs(20,10)则意味着获取第20-30之间文档内容 达到分页的效果
            ScoreDoc[] docs = collector.TopDocs(0, collector.TotalHits).ScoreDocs;

            //展示数据实体对象集合
            List<MetaSource> searchResult = new List<MetaSource>();
            for (int i = 0; i < docs.Length; i++)
            {
                //得到查询结果文档的id(Lucene内部分配的id)
                int docId = docs[i].Doc;
                //根据文档id来获得文档对象Document
                Document doc = searcher.Doc(docId);
                MetaSource result = new MetaSource();
                result.Title = doc.Get(Constants.TITILE);
                //搜索关键字高亮显示 使用盘古提供高亮插件
                result.Content = SplitContent.HightLight(kewWords, doc.Get(Constants.CONTENT));
                result.Time = doc.Get(Constants.TIME);
                result.Imgs = doc.Get(Constants.IMGS);
                result.Url = doc.Get(Constants.URL);
                searchResult.Add(result);
            }
            return searchResult;
        }
예제 #2
0
        /// <summary>
        /// 按页从索引库中检索关键字
        /// </summary>
        /// <param name="kewWords">关键字</param>
        /// <param name="startRowIndex">第几页</param>
        /// <param name="pagesize">页大小</param>
        /// <param name="totalHits">匹配总数</param>
        /// <returns></returns>
        public List<MetaSource> SearchFromIndexDataByPage(string kewWords, int startRowIndex, int pagesize, out int totalHits)
        {
            FSDirectory directory = FSDirectory.Open(new DirectoryInfo(IndexPath), new NoLockFactory());
            IndexReader reader = IndexReader.Open(directory, true);
            IndexSearcher searcher = new IndexSearcher(reader);
            //搜索条件
            BooleanQuery query=new BooleanQuery();
            PhraseQuery titleQuery = new PhraseQuery();
            PhraseQuery contentQuery = new PhraseQuery();

            //把用户输入的关键字进行分词
            foreach (string word in SplitContent.SplitWords(kewWords))
            {
                titleQuery.Add(new Term(Constants.TITILE, word));
                contentQuery.Add(new Term(Constants.CONTENT, word));
            }

            //query.Add(new Term("content", "C#"));//多个查询条件时 为且的关系
            //指定关键词相隔最大距离
            titleQuery.Slop = 20;
            contentQuery.Slop = 100;

            // 建立标题与内容或的关系
            query.Add(new BooleanClause(titleQuery, Occur.SHOULD));
            query.Add(new BooleanClause(contentQuery, Occur.SHOULD));

            //TopScoreDocCollector盛放查询结果的容器
            TopScoreDocCollector collector = TopScoreDocCollector.Create(500, true);

            //根据query查询条件进行查询,查询结果放入collector容器
            searcher.Search(query, null, collector);
            //TopDocs 指定0到GetTotalHits() 即所有查询结果中的文档 如果TopDocs(20,10)则意味着获取第20-30之间文档内容 达到分页的效果
            int endIndex =
                startRowIndex * pagesize > collector.TotalHits ? collector.TotalHits : startRowIndex * pagesize;
            ScoreDoc[] docs = collector.TopDocs((startRowIndex - 1) * pagesize, endIndex).ScoreDocs;

            //展示数据实体对象集合
            List<MetaSource> searchResult = new List<MetaSource>();
            for (int i = 0; i < docs.Length; i++)
            {
                //得到查询结果文档的id(Lucene内部分配的id)
                int docId = docs[i].Doc;
                //根据文档id来获得文档对象Document
                Document doc = searcher.Doc(docId);
                MetaSource result = new MetaSource();

                //搜索关键字高亮显示 使用盘古提供高亮插件
                result.Title = SplitContent.HightLight(kewWords, doc.Get(Constants.TITILE));
                result.Content = SplitContent.HightLight(kewWords, doc.Get(Constants.CONTENT));
                result.Time = doc.Get(Constants.TIME);
                result.Imgs = doc.Get(Constants.IMGS);
                result.Url = doc.Get(Constants.URL);
                result.CreatedTime = Convert.ToDateTime(doc.Get(Constants.CREATETIME));
                result.ProvinceId = doc.Get(Constants.PROVINCEID);
                result.CityId = doc.Get(Constants.CITYID);
                result.AreaId = doc.Get(Constants.AREAID);
                result.ResourceId = Convert.ToInt32(doc.Get(Constants.RESOURCEID));
                result.ResultType = (SearchResultType)(Convert.ToInt32(doc.Get(Constants.RESULTTYPE)));

                searchResult.Add(result);
            }
            totalHits = collector.TotalHits;
            return searchResult;
        }
예제 #3
0
        public ActionResult SiteSerach(FormCollection form)
        {
            PublicConfig serachConfig = null;
            try
            {
                DateTime LastUpdateTime;
                string result = string.Empty;
                const string http = "http://{0}/";
                string baseroot = string.Format(http, Request.Url.Authority);
                // Use related url instead of full url, becuase always link to PC site when view search result in wechat client.
                //string baseroot = "/";

                serachConfig = GetConfig();

                LastUpdateTime = serachConfig.LastUpdatedTime;
                IndexManager indexManager =
                    new IndexManager(Server.MapPath(IndexPath), Server.MapPath(DictPath));

                List<MetaSource> sourceList = new List<MetaSource>();
                MetaSource source = null;

                int newAddIndex = 0;

                #region 活动

                ActivityService activityService=new ActivityService();
                List<Activity> activities = activityService.QueryActivities(
                    new QueryActivityCriteria
                    {
                        QueryType = 6,
                        LastUpdatedTime = LastUpdateTime,
                        PageSize = int.MaxValue,
                        StartRowIndex = 1,

                    });
                if (activities != null && activities.Count > 0)
                {
                    foreach (Activity n in activities)
                    {
                        source = new MetaSource()
                        {
                            ResourceId = n.Id,
                            Title = n.Title,
                            Time = n.CreatedTime.Value.ToShortDateString(),
                            CreatedTime = n.CreatedTime.Value,
                            ResultType = SearchResultType.Activity,
                            ProvinceId = n.Province,
                            CityId = n.City,
                            AreaId = n.Area
                        };
                        source.Url = GenFullUrl(baseroot, n.Id, "activity");
                        source.Imgs = string.Empty;
                        if (n.ImgUrls != null && n.ImgUrls.Length > 0)
                        {
                            foreach (var img in n.ImgUrls)
                            {
                                source.Imgs = source.Imgs + split + img;
                            }
                            source.Imgs = source.Imgs.TrimStart(split.ToCharArray());
                        }
                        // 过滤内容
                        source.Content = ParseTags(n.ContentStyle);
                        source.CheckFields(baseroot);
                        sourceList.Add(source);
                        newAddIndex++;
                    }
                }

                #endregion

                #region 资源

                ResourceManager resourceManager = new ResourceManager();
                var resources = resourceManager.GetResourcesByTime(LastUpdateTime);
                if (resources != null && resources.Count > 0)
                {
                    foreach (Witbird.SHTS.DAL.New.Resource n in resources)
                    {
                        source = new MetaSource()
                        {
                            ResourceId = n.Id,
                            Title = n.Title,
                            Time = n.LastUpdatedTime.ToShortDateString(),
                            CreatedTime = n.LastUpdatedTime,
                            // 资源存储类型为场地1, 演员2,设备3,其他4
                            ResultType = (SearchResultType)(n.ResourceType - 1),
                            ProvinceId = n.ProvinceId,
                            CityId = n.CityId,
                            AreaId = n.AreaId
                        };
                        source.Url = GenFullUrl(baseroot, n.Id, "Resource");
                        source.Imgs = string.Empty;
                        if (n.ImgUrls != null && n.ImgUrls.Length > 0)
                        {
                            foreach (var img in n.ImgUrls)
                            {
                                source.Imgs = source.Imgs + split + img;
                            }
                            source.Imgs = source.Imgs.TrimStart(split.ToCharArray());
                        }
                        // 过滤内容
                        source.Content = ParseTags(n.Description);
                        source.CheckFields(baseroot);
                        sourceList.Add(source);
                    }
                }

                #endregion

                #region 需求

                DemandManager demandManager = new DemandManager();
                var demands = demandManager.QueryDemandsByTime(LastUpdateTime);
                if (demands != null && demands.Count > 0)
                {
                    foreach (Demand n in demands)
                    {
                        source = new MetaSource()
                        {
                            ResourceId = n.Id,
                            Title = n.Title,
                            Time = n.InsertTime.ToShortDateString(),
                            CreatedTime = n.InsertTime,
                            ResultType = SearchResultType.Demand,
                            ProvinceId = n.Province,
                            CityId = n.City,
                            AreaId = n.Area
                        };
                        source.Url = GenFullUrl(baseroot, n.Id, "Demand");
                        source.Imgs = string.Empty;
                        // 过滤内容
                        source.Content = ParseTags(n.Description);
                        source.CheckFields(baseroot);
                        sourceList.Add(source);
                    }
                }

                #endregion

                indexManager.AddIndexByData(sourceList);
                serachConfig.LastUpdatedTime=DateTime.Now;
                UpdateConfig(serachConfig);
            }
            catch (Exception e)
            {
                LogService.Log("Cretae Serach index", e.ToString());
            }
            return View(serachConfig);
        }