/// <summary> /// This is the main workhorse which runs recursively. /// It will stop once the GoneFarEnough returns a true value for the LimitReached. /// LimitReached is in the webconfig. It controls the # of levels to walk/traverse. /// The pageName is the current url to index /// The ParentID is the ID of the page which contains the link. /// The SiteIndexID is the ID assigned to the site or group of related pages which is being indexed /// </summary> /// <param name="pageName"></param> /// <param name="parentID"></param> /// <param name="siteIndexID"></param> /// <returns></returns> public JsonResult doPageIndexing(int parentID, int siteIndexID) { SearchTotal finalCount; try { //this method runs recursively until the limit is reached. ConcurrentBag <ContentSearchResult> searchResults = new ConcurrentBag <ContentSearchResult>(); // get the links from the saved links bool limitReached = DBSearchResult.GoneFarEnough(NUMBER_OF_LEVELS, siteIndexID); if (!limitReached) { List <LinkedPageData> pageLinksMain = DBSearchResult.GetLinkDataForSiteIndexID(siteIndexID); //put the links into a list so that they can be run in Parallel. Parallel.ForEach(pageLinksMain, (sr) => { string fullURL = string.Join("", sr.PageDirectory, sr.PageName); ContentSearchResult csr = SearchLibrary.LoadPageContent(fullURL, sr.ParentID, siteIndexID); searchResults.Add(csr); }); // now that all the links have content, do a regular loop for the parsing and saving . foreach (ContentSearchResult csr in searchResults) { SearchLibrary.GetLinksAndKeywords(csr); csr.PageID = DBSearchResult.SaveSearchResults(csr); doPageIndexing(csr.PageID, siteIndexID); } } } catch (DbEntityValidationException ex) { MessageLogger.LogThis(ex); Server.ClearError(); } catch (Exception ex) { MessageLogger.LogThis(ex); Server.ClearError(); } finally { finalCount = DBSearchResult.GetIndexedPageTotals(siteIndexID); } return(Json(finalCount, JsonRequestBehavior.AllowGet)); }
/// <summary> /// ContentSearch based on search text and templates (facets). /// </summary> /// <param name="filter">The ContentSearchFilter.</param> /// <returns></returns> public ContentSearchResult SearchWithFacets(ContentSearchFilter filter) { var contentSearchResult = new ContentSearchResult { Paging = new Paging(), ContentSearchResultItems = new List <ContentSearchResultItem>() }; string index = GetSearchIndexName(); using (var context = ContentSearchManager.GetIndex(index).CreateSearchContext()) { // Null query, will be updated with search text and template filter. var query = context.GetQueryable <SearchResultItem>(); // Retrieve all possible facets before a query is done with the search text. var facetResults = query.FacetOn(item => item.TemplateId).GetFacets(); FacetResults facetResultsAfterSearch = null; if (filter != null) { ApplySearchStringQuery(filter.SearchText, ref query); facetResultsAfterSearch = query.FacetOn(item => item.TemplateId).GetFacets(); ApplyTemplateQuery(filter.Templates, ref query); } var searchResult = query.FacetOn(item => item.TemplateId).GetResults(); UpdateResultWithFacets(facetResults, facetResultsAfterSearch, searchResult, contentSearchResult); var distinctHits = searchResult.Hits.Distinct().ToList(); UpdateResultWithItem( contentSearchResult, distinctHits, NewsTemplate.ID); UpdateResultWithItem( contentSearchResult, distinctHits, EventTemplate.ID); contentSearchResult.ResultCount = contentSearchResult.ContentSearchResultItems.Count; int numberToSkip; UpdateResultWithPaging(contentSearchResult, filter, out numberToSkip); contentSearchResult.ContentSearchResultItems = contentSearchResult.ContentSearchResultItems.Skip(numberToSkip).Take(contentSearchResult.Paging.PageSize).ToList(); return(contentSearchResult); } }
// GET: SearchResults public ActionResult Index(RenderModel model, string keyword) { var contentResults = new List <Models.ContentResult>(); var queryResult = Umbraco.TypedSearch(keyword, true); var searchResults = new ContentSearchResult(UmbracoContext.PublishedContentRequest.PublishedContent); foreach (var result in queryResult) { contentResults.Add(new Models.ContentResult() { ContentId = result.Id.ToString(), ContentName = result.Name, ContentType = result.DocumentTypeAlias, ContentUrl = result.Url }); } searchResults.Result = contentResults; return(CurrentTemplate(searchResults)); }
public async Task ShouldDownloadMultiple() { int maxNumberOfDownloadFiles = 3; string searchString = string.Empty; ContentSearchResult result = await _fixture.GetRandomContentsAsync(searchString, maxNumberOfDownloadFiles); Assert.True(result.Results.Count > 0); await _client.Contents.DownloadFilesAsync( result, _fixture.TempDirectory, true, successDelegate : (content) => { Console.WriteLine(content.GetFileMetadata().FileName); }, errorDelegate : (error) => { Console.WriteLine(error); }); }
public static async Task <string> GetRandomContentIdAsync(IPictureparkService client, string searchString, int limit) { string contentId = string.Empty; ContentSearchRequest request = new ContentSearchRequest { Limit = limit }; if (!string.IsNullOrEmpty(searchString)) { request.SearchString = searchString; } ContentSearchResult result = await client.Content.SearchAsync(request); if (result.Results.Count > 0) { int randomNumber = SafeRandomNext(result.Results.Count); contentId = result.Results.Skip(randomNumber).First().Id; } return(contentId); }
private static void UpdateResultWithItem( ContentSearchResult contentSearchResult, IEnumerable <SearchHit <SearchResultItem> > searchHitCollection, ID templateId) { var searchItems = searchHitCollection.Where( result => result.Document.TemplateId == templateId); foreach (var searchItem in searchItems) { var item = ItemHelper.GetItem(searchItem.Document.ItemId); if (item != null) { var resultaat = new ContentSearchResultItem { Title = item[PublicationBaseTemplate.Fields.Title], Url = LinkManager.GetItemUrl(item) }; contentSearchResult.ContentSearchResultItems.Add(resultaat); } } }
public override IEnumerable <SearchResult> Search(Query query) { var contentSearchHits = new List <ContentSearchHit <TContentData> >(); CultureInfo language = Language.GetRequestLanguage(); if (!query.SearchRoots.Any() || ForceRootLookup) { query.SearchRoots = new[] { GetSearchRoot() } } ; foreach (string searchRoot in query.SearchRoots) { if (!Int32.TryParse(searchRoot, out int searchRootId)) { if (searchRoot.Contains("__")) { Int32.TryParse(searchRoot.Split(new[] { "__" }, StringSplitOptions.None)[0], out searchRootId); } } if (searchRootId != 0) { IElasticSearchService <TContentData> searchQuery = CreateQuery(query, language, searchRootId); ContentSearchResult <TContentData> contentSearchResult = searchQuery.GetContentResults(false, GetProviderKeys()); contentSearchHits.AddRange(contentSearchResult.Hits); } } return (contentSearchHits.OrderByDescending(hit => hit.Score) .Take(_elasticSearchSettings.ProviderMaxResults) .Select(hit => CreateSearchResult(hit.Content))); }
//save each word and the # of times it occurs, word by word found on a parent page. public static void SaveTheKeywords(ContentSearchResult searchResults, IndexedPage pg) { List <PageKeyWord> keywordRankingList = new List <PageKeyWord>(); try { //save the keywords for this page. foreach (KeywordRanking kw in searchResults.KeyWordRankingList) { PageKeyWord pkw = new PageKeyWord(); pkw.PageID = pg.PageID; pkw.Keyword = kw.Keyword; pkw.KeywordCount = kw.Rank; keywordRankingList.Add(pkw); } DB.PageKeyWords.AddRange(keywordRankingList); DB.SaveChanges(); } catch (Exception ex) { string data = Services.SerializeIt.SerializeThis(searchResults.KeyWordRankingList); MessageLogger.LogThis(ex, data); } }
private async Task <List <string> > GetContentsDownloadUrls(ContentSearchResult contentSearchResult) { var urls = new List <string>(); foreach (var content in contentSearchResult.Results) { var downloadRequest = new ContentDownloadLinkCreateRequest() { Contents = new List <ContentDownloadRequestItem> { new ContentDownloadRequestItem { ContentId = content.Id, OutputFormatId = "Original", } } }; var downloadResult = await _client.Content.CreateDownloadLinkAsync(downloadRequest).ConfigureAwait(false); urls.Add(downloadResult.DownloadUrl); } return(urls); }
/// <summary>Downloads multiple files.</summary> /// <param name="contents">The files to download.</param> /// <param name="exportDirectory">The directory to store the downloaded files.</param> /// <param name="overwriteIfExists">Specifies whether to overwrite files.</param> /// <param name="concurrentDownloads">Specifies the number of concurrent downloads.</param> /// <param name="outputFormat">The output format name (e.g. 'Original').</param> /// <param name="outputExtension">The expected output file extension.</param> /// <param name="successDelegate">The success delegate/callback.</param> /// <param name="errorDelegate">The error delegate/callback.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>The task.</returns> public async Task DownloadFilesAsync( ContentSearchResult contents, string exportDirectory, bool overwriteIfExists, int concurrentDownloads = 4, string outputFormat = "Original", string outputExtension = "", Action <ContentDetail> successDelegate = null, Action <Exception> errorDelegate = null, CancellationToken cancellationToken = default(CancellationToken)) { List <Task> allTasks = new List <Task>(); // Limits Concurrent Downloads SemaphoreSlim throttler = new SemaphoreSlim(concurrentDownloads); // Create directory if it does not exist if (!Directory.Exists(exportDirectory)) { Directory.CreateDirectory(exportDirectory); } foreach (var content in contents.Results) { await throttler.WaitAsync(cancellationToken).ConfigureAwait(false); allTasks.Add(Task.Run(async() => { try { var contentDetail = await GetAsync(content.Id, cancellationToken).ConfigureAwait(false); var metadata = contentDetail.GetFileMetadata(); string fileNameOriginal = metadata.FileName; try { var fileName = string.IsNullOrEmpty(outputExtension) ? fileNameOriginal : fileNameOriginal.Replace(Path.GetExtension(fileNameOriginal), outputExtension); if (string.IsNullOrEmpty(fileName)) { throw new Exception("Filename empty: " + metadata); } var filePath = Path.Combine(exportDirectory, fileName); if (!new FileInfo(filePath).Exists || overwriteIfExists) { try { using (var response = await DownloadAsync(content.Id, outputFormat, cancellationToken: cancellationToken).ConfigureAwait(false)) { using (var fileStream = File.Create(filePath)) { response.Stream.CopyTo(fileStream); } } successDelegate?.Invoke(contentDetail); } catch (Exception ex) { errorDelegate?.Invoke(ex); } } } catch (Exception ex) { errorDelegate?.Invoke(ex); } } finally { throttler.Release(); } })); } await Task.WhenAll(allTasks).ConfigureAwait(false); }
/// <summary> /// Updates the content search result with facets. /// </summary> /// <param name="facetResults">All possible facets.</param> /// <param name="facetResultsAfterSearch">Facets that contain results after querying using the search text. This is used to get the result count per facet.</param> /// <param name="searchResults">The collection of SearchResultItems after the search on searchtext and filtering on templates.</param> /// <param name="contentSearchResult">Result object which is updated with facets.</param> private void UpdateResultWithFacets(FacetResults facetResults, FacetResults facetResultsAfterSearch, SearchResults <SearchResultItem> searchResults, ContentSearchResult contentSearchResult) { var allFacets = facetResults.Categories.FirstOrDefault(); FacetCategory searchFacets = null; if (facetResultsAfterSearch != null) { searchFacets = facetResultsAfterSearch.Categories.FirstOrDefault(); } var searchResultFacets = searchResults.Facets.Categories.FirstOrDefault(); if (allFacets != null && searchFacets != null) { contentSearchResult.Facets = allFacets.Values.Select(facet => GetFacet(facet, searchFacets, searchResultFacets)).OrderBy(facet => facet.Name).ToList(); } }