public Task <IList <TResponse> > SearchWithTitleAsync <TResponse>(ElasticSearchRequest request) where TResponse : SearchableWithTitle { request.SearchParams = GetIndexSearchParams(request.Index); return(_elasticsearchClient.SearchAsync <TResponse>(request, q => q.Bool(s => s .Should( qs => qs.MultiMatch(c => c .Query(request.Text) .MinimumShouldMatch(request.SearchParams.MinShouldMatchPercentage) .Type(TextQueryType.MostFields) .Fields(f => f .Field(p => p.Title) .Field(p => p.Title.Suffix(Stemming)) .Field(p => p.Title.Suffix(Original)) .Field(p => p.Title.Suffix(Shingles)) .Field(p => p.Text)) .Boost(2)), qs => qs.MultiMatch(c => c .Query(request.Text) .MinimumShouldMatch(request.SearchParams.MinShouldMatchPercentage) .Fuzziness(Fuzziness.Auto) .PrefixLength(request.SearchParams.PrefixLength) .MaxExpansions(request.SearchParams.MaxExpansions) .Type(TextQueryType.MostFields) .Fields(f => f .Field(p => p.Title.Suffix(Stemming)) .Field(p => p.Title.Suffix(Original)) .Field(p => p.Title.Suffix(Shingles)))) )))); }
public IActionResult ExportAsCSV([FromBody] ElasticSearchRequest request) { IEnumerable <ScrapedComment> history = CommentScraper.All(request.Query, request.Sort).Data; byte[] serialized = CsvSerialization.Serialize(history, CsvSerialization.MapComment); return(File(serialized, "text/csv", "export.csv")); }
public PagedResponse AllPages([FromBody] ElasticSearchRequest request) { PagedResponse <PageMetadata> response = PageRepository.Paged(request.PageNumber, request.PageSize, request.Query, request.Sort); response.Data = response.Data.OrderBy(p => p.Name); return(response); }
public IActionResult ExportPagesAsCSV([FromBody] ElasticSearchRequest request) { IEnumerable <PageScrapeHistory> history = PageScrapeHistoryRepository.All(request.Query, request.Sort).Data; byte[] serialized = CsvSerialization.Serialize(history, CsvSerialization.MapPageScrape); return(File(serialized, "text/csv", "export.csv")); }
public void SearchAfterTest() { var elasticClient = new ElasticSearchClient( new HttpClient(), Environment.GetEnvironmentVariable("ELASTICSEARCH_API_ENDPOINT_FOUNDATION"), Environment.GetEnvironmentVariable("ELASTICSEARCH_API_KEY_GONZALEZ_ART_FOUNDATION_ADMIN")); var result = elasticClient.SendToElasticSearch( HttpMethod.Get, "/classification/_search", ElasticSearchRequest.GetSearchRequestBody( "", "Sir Lawrence Alma-Tadema", 100, null) ).Result; var resultAfter = elasticClient.SendToElasticSearch( HttpMethod.Get, "/classification/_search", ElasticSearchRequest.GetSearchRequestBody( "", "Sir Lawrence Alma-Tadema", 100, JObject.Parse(result)["hits"]["hits"].Last["sort"] ) ).Result; }
public IActionResult ExportAsJson([FromBody] ElasticSearchRequest request) { IEnumerable <ScrapedComment> history = CommentScraper.All(request.Query, request.Sort).Data; byte[] serialized = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(history)); return(File(serialized, "application/json-download", "export.json")); }
public async Task <PartialCollectionModel <SearchableProduct> > SearchProducts(SearchArgs searchArgs) { var fieldsToSearch = new Collection <TextSearchField <ProductIndexItem> > { new() { Field = field => field.ProductName }, new() { Path = path => path.Supplier, Field = field => field.Supplier.CompanyName }, new() { Path = path => path.Category, Field = field => field.Category.CategoryName } }; var filters = _searchFiltersService.CreateFilters(searchArgs); var request = await ElasticSearchRequest <ProductIndexItem> .Init(_elasticSearchService) .Pipe(searchRequest => searchRequest.CreateSearchRequestQuery(searchArgs, fieldsToSearch, filters)) .CreateSort(searchArgs) .PipeAsync(async searchRequest => await searchRequest.BuildAsync()); var response = await _elasticSearchService.SearchAsync(request); var products = response.SearchResults.Documents; return(new PartialCollectionModel <SearchableProduct> { Values = _mapper.Map <IReadOnlyCollection <SearchableProduct> >(products), Count = response.SearchResults.Total, Offset = searchArgs.Offset + 1, Limit = response.SearchResults.Hits.Count }); } }
public void ConstructorHasSensibleDefaultValues() { var request = new ElasticSearchRequest(); Assert.Equal(0, request.From); Assert.Null(request.Size); Assert.Empty(request.Fields); Assert.Empty(request.SortOptions); Assert.Null(request.Filter); }
public void ConstructorSetsProperties() { var expectedSearch = new ElasticSearchRequest { Type = "someType" }; var expectedMaterializer = new ElasticManyHitsMaterializer(o => o, typeof(ElasticConnectionTests)); var result = new ElasticTranslateResult(expectedSearch, expectedMaterializer); Assert.Same(expectedSearch, result.SearchRequest); Assert.Same(expectedMaterializer, result.Materializer); }
public static async Task NoAuthorizationWithEmptyUserName() { var messageHandler = new SpyMessageHandler(); var localConnection = new ElasticConnection(messageHandler, new Uri("http://localhost")); var processor = new ElasticRequestProcessor(localConnection, mapping, log, retryPolicy); var request = new ElasticSearchRequest { Type = "docType" }; await processor.SearchAsync(request); Assert.Null(messageHandler.Request.Headers.Authorization); }
public void TestSearchJson() { var json = ElasticSearchRequest.GetSearchRequestBody(RijksmuseumIndexer.Source, "lawrence", 100, null); Console.WriteLine(json); var expected = @"{ ""track_total_hits"": true, ""query"": { ""bool"": { ""must"": { ""multi_match"": { ""query"": ""lawrence"", ""type"": ""best_fields"", ""fields"": [ ""artist^2"", ""name"", ""date"" ] } }, ""filter"": { ""bool"": { ""must"": [ { ""term"": { ""source.keyword"": ""https://www.rijksmuseum.nl"" } } ] } } } }, ""size"": 100, ""sort"": [ { ""_score"": { ""order"": ""desc"" } }, { ""_id"": { ""order"": ""asc"" } } ] }"; Assert.AreEqual(expected, json.ToString()); }
public static void NonSuccessfulHttpRequestThrows() { var messageHandler = new SpyMessageHandler(); messageHandler.Response.StatusCode = HttpStatusCode.NotFound; var localConnection = new ElasticConnection(messageHandler, new Uri("http://localhost"), "myUser", "myPass"); var processor = new ElasticRequestProcessor(localConnection, mapping, log, retryPolicy); var request = new ElasticSearchRequest { Type = "docType" }; var ex = Record.Exception(() => processor.SearchAsync(request).GetAwaiter().GetResult()); Assert.IsType <HttpRequestException>(ex); Assert.Equal("Response status code does not indicate success: 404 (Not Found).", ex.Message); }
public static async Task ForcesBasicAuthorizationWhenProvidedWithUsernameAndPassword() { var messageHandler = new SpyMessageHandler(); var localConnection = new ElasticConnection(messageHandler, new Uri("http://localhost"), "myUser", "myPass"); var processor = new ElasticRequestProcessor(localConnection, mapping, log, retryPolicy); var request = new ElasticSearchRequest { Type = "docType" }; await processor.SearchAsync(request); var auth = messageHandler.Request.Headers.Authorization; Assert.NotNull(auth); Assert.Equal("Basic", auth.Scheme); Assert.Equal("myUser:myPass", Encoding.ASCII.GetString(Convert.FromBase64String(auth.Parameter))); }
public async Task <IList <SearchableSupplier> > GetSuppliers(SearchArgs searchArgs) { var fieldsToSearch = new Collection <TextSearchField <SupplierIndexItem> > { new() { Field = field => field.CompanyName } }; var request = await ElasticSearchRequest <SupplierIndexItem> .Init(_elasticSearchService) .CreateSearchRequestQuery(searchArgs, fieldsToSearch) .CreateSort(searchArgs) .PipeAsync(async searchRequest => await searchRequest.BuildAsync()); var response = await _elasticSearchService.SearchAsync(request); var suppliers = response.SearchResults.Documents; var dtos = _mapper.Map <IList <SearchableSupplier> >(suppliers); return(dtos); }
private static ISearchRequest CreateSearchDescriptor <TEntity>(ElasticSearchRequest <TEntity> elasticSearchRequest, SearchDescriptor <TEntity> descriptor, string indexName) where TEntity : class { var sort = elasticSearchRequest.Sort; var searchDescriptor = descriptor .Index(indexName) .Query(elasticSearchRequest.Query) .Sort(sort) .From(elasticSearchRequest.Offset) .Take(elasticSearchRequest.Limit); if (elasticSearchRequest.Aggregation != null) { searchDescriptor.Aggregations(elasticSearchRequest.Aggregation); } return(searchDescriptor); }
public static async Task LogsDebugMessagesDuringExecution() { var responseString = BuildResponseString(2, 1, 1, 0.3141, "testIndex", "testType", "testId"); var messageHandler = new SpyMessageHandler(); var log = new SpyLog(); messageHandler.Response.Content = new StringContent(responseString); var localConnection = new ElasticConnection(messageHandler, new Uri("http://localhost"), "myUser", "myPass", index: "SearchIndex"); var processor = new ElasticRequestProcessor(localConnection, mapping, log, retryPolicy); var request = new ElasticSearchRequest { Type = "abc123", Size = 2112 }; await processor.SearchAsync(request); Assert.Equal(4, log.Messages.Count); Assert.Equal(@"[VERBOSE] Request: POST http://localhost/SearchIndex/abc123/_search", log.Messages[0]); Assert.Equal(@"[VERBOSE] Body: {""size"":2112,""timeout"":""10s""}", log.Messages[1]); Assert.True(new Regex(@"\[VERBOSE\] Response: 200 OK \(in \d+ms\)").Match(log.Messages[2]).Success); Assert.True(new Regex(@"\[VERBOSE\] De-serialized \d+ bytes into 1 hits in \d+ms").Match(log.Messages[3]).Success); }
public async Task <ElasticSearchResponse <TEntity> > SearchAsync <TEntity>(ElasticSearchRequest <TEntity> elasticSearchRequest) where TEntity : class { var result = new ElasticSearchResponse <TEntity>(); var indexName = ElasticSearchExtensions.GetIndexNameFrom <TEntity>(); var searchResponse = await _elasticClient.SearchAsync <TEntity>(s => CreateSearchDescriptor(elasticSearchRequest, s, indexName)) .ConfigureAwait(false); if (!searchResponse.IsValid) { result.IsOk = false; result.ErrorMessage = searchResponse.OriginalException?.Message; result.Exception = searchResponse.OriginalException; return(result); } result.SearchResults = searchResponse; return(result); }
public async Task <IList <TResponse> > SearchAsync <TResponse>(ElasticSearchRequest request, Func <QueryContainerDescriptor <TResponse>, QueryContainer> query) where TResponse : SearchableText { if (string.IsNullOrEmpty(request?.Index)) { throw new ArgumentException(nameof(request.Index)); } var searchResponse = await _elasticClient.SearchAsync <TResponse>(s => s.Index(request.Index) .Type(DefaultType) .From(request.Offset) .Size(request.Count) .Query(query)); if (searchResponse.ApiCall.HttpStatusCode != (int)HttpStatusCode.OK) { throw new ElasticException( $"{searchResponse.ApiCall.Uri} error: response status code {searchResponse.ApiCall.HttpStatusCode}"); } return(searchResponse.Documents?.ToList()); }
protected RequestFormatter(ElasticConnection connection, ElasticSearchRequest searchRequest) { Connection = connection; SearchRequest = searchRequest; }
public PagedResponse <PageScrapeHistory> AllScrapeHistory([FromBody] ElasticSearchRequest request) { return(PageScrapeHistoryRepository.Paged(request.PageNumber, request.PageSize, request.Query, request.Sort)); }
public PagedResponse <ScrapedComment> AllComments([FromBody] ElasticSearchRequest request) { return(CommentScraper.Paged(request.PageNumber, request.PageSize, request.Query, request.Sort)); }
public PostBodyRequestFormatter(ElasticConnection connection, IElasticMapping mapping, ElasticSearchRequest searchRequest) : base(connection, searchRequest) { this.mapping = mapping; body = new Lazy <string>(() => CreateJsonPayload().ToString(Formatting.None)); }
public PagedResponse AllScrapes([FromBody] ElasticSearchRequest request) { return(PageScraper.Paged(request.PageNumber, request.PageSize, request.Query, request.Sort)); }
public ElasticTranslateResult(ElasticSearchRequest searchRequest, IElasticMaterializer materializer) { this.searchRequest = searchRequest; this.materializer = materializer; }
public PostBodyRequestFormatter(ElasticConnection connection, IElasticMapping mapping, ElasticSearchRequest searchRequest) : base(connection, searchRequest) { this.mapping = mapping; body = new Lazy<string>(() => CreateJsonPayload().ToString(Formatting.None)); }
public PagedResponse AllScrapes(ElasticSearchRequest request) { return(PostScrapeHistoryRepository.Paged(request.PageNumber, request.PageSize, request.Query, request.Sort)); }