Пример #1
0
        public void DeletedItems_DoesntShowUp_InSearch()
        {
            var item = CreateOneItem <PersistableItem>(2, "Hello world", root);

            indexer.Update(item);
            indexer.Delete(item.ID);


            var hits = searcher.Search(Query.For("hello"));

            Assert.That(hits.Hits.Count(), Is.EqualTo(0));
        }
Пример #2
0
        public virtual void ReindexDescendants(int rootID, bool clearBeforeReindex)
        {
            Execute(new Work
            {
                Name   = "Reindex descendants of #" + rootID,
                Action = () =>
                {
                    if (clearBeforeReindex)
                    {
                        indexer.Delete(rootID);
                    }

                    Reindex(rootID, true);
                }
            });
        }
        public ActionResult Delete(ContentReference id)
        {
            var content = _contentLoader.Get <IContent>(id);

            _contentIndexer.Delete(content);

            return(null);
        }
Пример #4
0
        /// <summary>
        /// Method used to start the content crawl. This method retrieves all searchable content from the CMS
        /// and calls the assigned indexer which will post the content to the search server. The crawler runs in
        /// two modes. These modes are triggered by the crawlstartdate.
        ///
        /// Full Crawl: When the crawlstartdate is null all searchable content is retrieved from the CMS
        /// and posted to the search server. When performing a full crawl the indexer will compare what is
        /// posted with what is currently in the index and will deleting everything in the index that is not in
        /// the current set of posted items.
        ///
        /// Partial Crawl: When a crawlstartdate is defined then only content published on or after
        /// the crawl start date will be retrieved and posted to the search server. In addition
        /// deleted and archived items will be deleted from the search index.
        /// </summary>
        /// <returns></returns>
        public IndexResults RunCrawler(Global <T> .StatusCallBack statusCallback, DateTime?crawlStartDate)
        {
            var dateStart = DateTime.Now;

            var fullCrawl = (crawlStartDate == null);

            var searchablePages = GetSearchablePages(ContentReference.RootPage, crawlStartDate);

            var results = (fullCrawl) ?
                          _contentIndexer.RunFullIndex(searchablePages, statusCallback, IndexerCallback) :
                          _contentIndexer.RunUpdate(searchablePages, statusCallback, IndexerCallback);

            if (!fullCrawl)
            {
                results.DeleteCnt  = _contentIndexer.Delete(GetTrashCanPages(ContentReference.RootPage, crawlStartDate));
                results.DeleteCnt += _contentIndexer.Delete(GetArchivedPages(ContentReference.RootPage, crawlStartDate));
            }

            results.Duration = (DateTime.Now - dateStart);

            return(results);
        }