public void TermsStatsScript() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .QueryRawJson(@"{ raw : ""query""}") .FacetTermsStats("date_minute", ts => ts .KeyScript("doc['date'].date.minuteOfHour * factor1") .ValueScript("doc['num1'].value * factor2") .Order(TermsStatsOrder.reverse_max) .Params(p=>p .Add("factor1", 2) .Add("factor2", 3) .Add("randomString", "stringy") ) ); var json = ElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, facets : { ""date_minute"" : { terms_stats : { key_script : ""doc['date'].date.minuteOfHour * factor1"", value_script : ""doc['num1'].value * factor2"", order : ""reverse_max"", params : { factor1 : 2, factor2 : 3, randomString: ""stringy"" } } } }, query : { raw : ""query""} }"; Assert.True(json.JsonEquals(expected), json); }
public void GeoDistanceScript() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .QueryRaw(@"{ raw : ""query""}") .FacetGeoDistance("geo1", gd => gd .OnField(f => f.Origin) .OnValueScript("doc['num1'].value * factor") .Params(p=>p.Add("factor", 5)) .PinTo(40, -70) .Unit(GeoUnit.mi) .DistanceType(GeoDistance.arc) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, facets : { ""geo1"" : { geo_distance : { ""origin"" : ""40, -70"", value_script: ""doc['num1'].value * factor"", params: { factor: 5 }, unit: ""mi"", distance_type: ""arc"" } } }, query : { raw : ""query""} }"; Assert.True(json.JsonEquals(expected), json); }
public void DateHistogramTimeZone() { var s = new SearchDescriptor<ElasticsearchProject>() .From(0) .Size(10) .QueryRaw(@"{ raw : ""query""}") .FacetDateHistogram(h => h .OnField(f => f.StartedOn) .Interval(DateInterval.Day, DateRounding.Half_Floor) .TimeZone("-2") ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, facets : { ""startedOn"" : { date_histogram : { field : ""startedOn"", interval : ""day:half_floor"", time_zone : ""-2"" } } }, query : { raw : ""query""} }"; Assert.True(json.JsonEquals(expected), json); }
public void GeoDistanceUsingHashAndOptions() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .QueryRawJson(@"{ raw : ""query""}") .FacetGeoDistance("geo1", gd => gd .OnValueField(f => f.Origin) .PinTo("drm3btev3e86") .Unit(GeoUnit.mi) .DistanceType(GeoDistance.arc) ); var json = ElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, facets : { ""geo1"" : { geo_distance : { ""pin.location"" : ""drm3btev3e86"", value_field: ""origin"", unit: ""mi"", distance_type: ""arc"" } } }, query : { raw : ""query""} }"; Assert.True(json.JsonEquals(expected), json); }
public void IndicesQuery() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Query(q => q .Indices(fz => fz .Indices(new[] { "elasticsearchprojects", "people", "randomindex" }) .Query(qq => qq.Term(f => f.Name, "elasticsearch.pm")) .NoMatchQuery(qq => qq.MatchAll()) ) ); var json = ElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { indices: { query: { term : { name : { value : ""elasticsearch.pm"" } } }, no_match_query: { match_all: {} }, indices: [ ""elasticsearchprojects"", ""people"", ""randomindex"" ] } }}"; Assert.True(json.JsonEquals(expected), json); }
public void OrFilterCacheNamed() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Filter(filter => filter.Cache(true).Name("and_filter") .Or( f => f.MatchAll(), f => f.Missing(p => p.LOC) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, filter : { ""or"": { ""filters"": [ { ""match_all"": {} }, { ""missing"": { ""field"": ""loc"" } } ], _cache:true, _name:""and_filter"" } } }"; Assert.True(json.JsonEquals(expected), json); }
public void DateHistogram() { var s = new SearchDescriptor<ElasticsearchProject>() .From(0) .Size(10) .QueryRaw(@"{ raw : ""query""}") .FacetDateHistogram(h => h .OnField(f => f.StartedOn) .Interval(DateInterval.Day) .Factor(1000) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, facets : { ""startedOn"" : { date_histogram : { field : ""startedOn"", interval : ""day"", factor: 1000 } } }, query : { raw : ""query""} }"; Assert.True(json.JsonEquals(expected), json); }
public void NumericRange() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Filter(ff=>ff .NumericRange(n=>n .OnField(f=>f.LOC) .From(10) .To(20) .FromExclusive() ) ); var json = ElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, filter : { numeric_range: { ""loc.sort"": { from: 10, to: 20, from_inclusive: false } } } }"; Assert.True(json.JsonEquals(expected), json); }
public void NumericRangeDates() { var format = "yyyy/MM/dd"; var lowerBound = DateTime.UtcNow.AddYears(-1); var upperBound = DateTime.UtcNow.AddYears(1); var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Filter(ff => ff .NumericRange(n => n .OnField(f => f.StartedOn) .From(lowerBound, format) .To(upperBound, format) .FromExclusive() ) ); var json = ElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, filter : { numeric_range: { ""startedOn"": { from: """ + lowerBound.ToString(format) + @""", to: """ + upperBound.ToString(format) + @""", from_inclusive: false } } } }"; Assert.True(json.JsonEquals(expected), json); }
public void GeoBoundingBoxFilterCacheNamed() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Filter(filter => filter .Cache(true) .Name("my_geo_filter") .GeoBoundingBox(f => f.Origin, topLeftX: 40.73, topLeftY: -74.1, bottomRightX: 40.717, bottomRightY: -73.99, Type: GeoExecution.indexed ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, filter : { geo_bounding_box: { origin : { top_left: ""40.73, -74.1"", bottom_right: ""40.717, -73.99"" }, _cache : true, _name : ""my_geo_filter"", type: ""indexed"" } } }"; Assert.True(json.JsonEquals(expected), json); }
public void GeoDistanceFilter() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Filter(filter => filter .Cache(true) .Name("my_geo_filter") .GeoDistance(f=>f.Origin, d=>d .Location(Lat: 40, Lon: -70) .Distance(12, GeoUnit.km) .Optimize(GeoOptimizeBBox.memory) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, filter : { geo_distance: { distance: 12.0, unit: ""km"", optimize_bbox: ""memory"", origin: ""40, -70"", _cache: true, _name: ""my_geo_filter"" } } }"; Assert.True(json.JsonEquals(expected), json); }
public void RangeInts() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Query(ff => ff .Range(n => n .OnField(f => f.LOC) .From(10) .To(20) .FromExclusive() ) ); var json = ElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { range: { ""loc.sort"": { from: 10, to: 20, include_lower: false } } } }"; Assert.True(json.JsonEquals(expected), json); }
public void RangeDoubles() { var s = new SearchDescriptor<ElasticsearchProject>() .From(0) .Size(10) .Query(ff => ff .Range(n => n .OnField(f => f.LOC) .GreaterOrEquals(10.1) .LowerOrEquals(20.1) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { range: { ""loc"": { gte: ""10.1"", lte: ""20.1"", } } } }"; Assert.True(json.JsonEquals(expected), json); }
public void TestRangeDateFacet() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .QueryRaw(@"{ raw : ""query""}") .FacetRange<DateTime>(t => t .OnField(f => f.StartedOn) .Ranges( r => r.To(new DateTime(1990,1,1).Date) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, facets : { ""startedOn"" : { range : { field : ""startedOn"", ranges: [ { to: ""1990-01-01T00:00:00"" } ] } } }, query : { raw : ""query""} }"; Assert.True(json.JsonEquals(expected), json); }
public void TestRangeDateFacetKeyField() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .QueryRaw(@"{ raw : ""query""}") .FacetRange<DateTime>("needs_a_name", t => t .KeyField("field_name") .ValueField("another_field_name") .Ranges( r => r.To(new DateTime(1990, 1, 1).Date) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, facets : { ""needs_a_name"" : { range : { key_field : ""field_name"", value_field : ""another_field_name"", ranges: [ { to: ""1990-01-01T00:00:00"" } ] } } }, query : { raw : ""query""} }"; Assert.True(json.JsonEquals(expected), json); }
public void HistogramTestKeyScript() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .QueryRaw(@"{ raw : ""query""}") .FacetHistogram("needs_a_name", h => h .KeyScript("doc['date'].date.minuteOfHour") .ValueScript("doc['num1'].value") .Interval(100) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, facets : { ""needs_a_name"" : { histogram : { key_script : ""doc['date'].date.minuteOfHour"", value_script : ""doc['num1'].value"", interval : 100 } } }, query : { raw : ""query""} }"; Assert.True(json.JsonEquals(expected), json); }
public void QueryFacetScoped() { var s = new SearchDescriptor<ElasticsearchProject>() .From(0) .Size(10) .QueryRaw(@"{ raw : ""query""}") .FacetTerm(q=>q .Scope("some_nested_query") .OnField(f=>f.Name) .FacetFilter(ff=>ff.Exists(f=>f.Name)) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, facets : { ""name"" : { _scope: ""some_nested_query"", terms: { field: ""name"" }, facet_filter: { exists: { field: ""name"" } } } }, query : { raw : ""query""} }"; Assert.True(json.JsonEquals(expected), json); }
public void SpanFirstQuery() { var s = new SearchDescriptor<ElasticsearchProject>() .From(0) .Size(10) .Query(q => q .SpanFirst(sf=>sf .MatchTerm(f => f.Name, "elasticsearch.pm", 1.1) .End(3) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { span_first: { match: { span_term: { name: { value: ""elasticsearch.pm"", boost: 1.1 } } }, end:3 }}}"; Assert.True(json.JsonEquals(expected), json); }
public void HistogramTestKeyField() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .QueryRaw(@"{ raw : ""query""}") .FacetHistogram("needs_a_name", h => h .KeyField("key_field_name") .ValueField("value_field_name") .Interval(100) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, facets : { ""needs_a_name"" : { histogram : { key_field : ""key_field_name"", value_field : ""value_field_name"", interval : 100 } } }, query : { raw : ""query""} }"; Assert.True(json.JsonEquals(expected), json); }
public void NestedQuery() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Query(ff=>ff .Nested(n=>n .Path(f=>f.Followers[0]) .Query(q=>q.Term(f=>f.Followers[0].FirstName,"elasticsearch.pm")) ) ); var json = ElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { nested: { query: { term: { ""followers.firstName"": { value: ""elasticsearch.pm"" } } }, path: ""followers"" } } }"; Assert.True(json.JsonEquals(expected), json); }
public void NuRangeGtLtWithCache() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Filter(ff => ff.Cache(true).Name("my_name") .Range(n => n .OnField(f => f.LOC) .GreaterOrEquals("10") .LowerOrEquals("20") ) ); var json = ElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, filter : { range: { ""loc.sort"": { from: ""10"", to: ""20"", include_lower: true, include_upper: true }, _cache: true, _name : ""my_name"" } } }"; Assert.True(json.JsonEquals(expected), json); }
public void TextPhraseQuery() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Query(q=>q .TextPhrase(t=>t .OnField(f=>f.Name) .Query("this is a test") ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { text: { name : { type: ""phrase"", query : ""this is a test"" } } } }"; Assert.True(json.JsonEquals(expected), json); }
public void QueryFacetGlobal() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Query(@"{ raw : ""query""}") .FacetTerm(q=>q .OnField(f=>f.Name) .Global() .FacetFilter(ff=>ff.Exists(f=>f.Name)) ); var json = ElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, facets : { ""name.sort"" : { global: true, terms: { field: ""name.sort"" }, facet_filter: { exists: { field: ""name"" } } } }, query : { raw : ""query""} }"; Assert.True(json.JsonEquals(expected), json); }
public void TestFuzzyLikeThisAllQuery() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Query(q => q .FuzzyLikeThis(fz => fz .OnFields(f => f.Name) .LikeText("elasticsearcc") .PrefixLength(3) .MaxQueryTerms(25) .MinimumSimilarity(3) .IgnoreTermFrequency(true) .Boost(1.1) .Analyzer("my_analyzer") ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { flt: { fields : [""name"" ], like_text : ""elasticsearcc"", ignore_tf: true, max_query_terms: 25, min_similarity: 3, prefix_length: 3, boost: 1.1, analyzer: ""my_analyzer"" }}}"; Assert.True(json.JsonEquals(expected), json); }
public void AndFilter() { var s = new SearchDescriptor<ElasticsearchProject>() .From(0) .Size(10) .Filter(filter=>filter .And( f=>f.MatchAll(), f=>f.Missing(p=>p.LOC) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, filter : { ""and"": { ""filters"": [ { ""match_all"": {} }, { ""missing"": { ""field"": ""loc"" } } ] } } }"; Assert.True(json.JsonEquals(expected), json); }
public void HasChildThisQuery() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Query(q => q .HasChild<Person>(fz => fz .Query(qq=>qq.Term(f=>f.FirstName, "john")) .Scope("my_scope") .Score(ChildScoreType.avg) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { has_child: { type: ""people"", _scope: ""my_scope"", score_type: ""avg"", query: { term: { firstName: { value: ""john"" } } } }}}"; Assert.True(json.JsonEquals(expected), json); }
public void HasChildFilter() { var s = new SearchDescriptor<ElasticsearchProject>().From(0).Size(10) .Filter(ff=>ff .HasChild<Person>(d=>d .Query(q=>q.Term(p=>p.FirstName, "value")) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, filter : { ""has_child"": { ""type"": ""person"", ""query"": { ""term"": { ""firstName"": { ""value"": ""value"" } } } } } }"; Assert.True(json.JsonEquals(expected), json); }
public void TermsFilterWithConditionlessQueryWithCache() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .FacetTerm(ft => ft .FacetFilter(ff => ff.Query(fq => fq.Term("somefield", ""))) .AllTerms() .OnField(p=>p.Name) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, facets: { name: { terms: { field: ""name"", all_terms: true } } } }"; Assert.True(json.JsonEquals(expected), json); }
public void RangeDatesCustom() { var format = "yyyy/MM/dd"; var lowerBound = DateTime.UtcNow.AddYears(-1); var upperBound = DateTime.UtcNow.AddYears(1); var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Query(ff => ff .Range(n => n .OnField(f => f.StartedOn) .From(lowerBound, format) .To(upperBound, format) .FromExclusive() ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { range: { ""startedOn"": { from: """ + lowerBound.ToString(format) + @""", to: """ + upperBound.ToString(format) + @""", include_lower: false } } } }"; Assert.True(json.JsonEquals(expected), json + Environment.NewLine + expected); }
public void TextPhraseQuerySomeOptions() { var s = new SearchDescriptor<ElasticSearchProject>() .From(0) .Size(10) .Query(q => q .TextPhrase(t => t .OnField(f => f.Name) .Query("this is a test") .Fuzziness(1.0) .Analyzer("my_analyzer") .PrefixLength(2) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { text: { name : { type: ""phrase"", query : ""this is a test"", analyzer : ""my_analyzer"", fuzziness: 1.0, prefix_length: 2 } } } }"; Assert.True(json.JsonEquals(expected), json); }
public async Task <NumbersTimelineStatsResult> GetNumbersTimelineStatsAsync(IEnumerable <FieldAggregation> fields, DateTime utcStart, DateTime utcEnd, IExceptionlessSystemFilterQuery systemFilter, string userFilter = null, TimeSpan?displayTimeOffset = null, int desiredDataPoints = 100) { if (!displayTimeOffset.HasValue) { displayTimeOffset = TimeSpan.Zero; } var filter = new ElasticQuery() .WithSystemFilter(systemFilter) .WithFilter(userFilter) .WithDateRange(utcStart, utcEnd, EventIndexType.Fields.Date) .WithIndexes(utcStart, utcEnd); // if no start date then figure out first event date if (!filter.DateRanges.First().UseStartDate) { await UpdateFilterStartDateRangesAsync(filter, utcEnd).AnyContext(); } utcStart = filter.DateRanges.First().GetStartDate(); utcEnd = filter.DateRanges.First().GetEndDate(); var interval = GetInterval(utcStart, utcEnd, desiredDataPoints); var descriptor = new SearchDescriptor <PersistentEvent>() .SearchType(SearchType.Count) .IgnoreUnavailable() .Indices(_configuration.Events.Event.GetIndexesByQuery(filter)) .Type(_configuration.Events.Event.Name) .Aggregations(agg => BuildAggregations(agg .DateHistogram("timelime", t => t .Field(ev => ev.Date) .MinimumDocumentCount(0) .Interval(interval.Item1) .TimeZone(HoursAndMinutes(displayTimeOffset.Value)) .Aggregations(agg2 => BuildAggregations(agg2, fields)) ) .Min("first_occurrence", t => t.Field(ev => ev.Date)) .Max("last_occurrence", t => t.Field(ev => ev.Date)), fields) ); _configuration.Events.Event.QueryBuilder.ConfigureSearch(filter, GetQueryOptions(), descriptor); var response = await _configuration.Client.SearchAsync <PersistentEvent>(descriptor).AnyContext(); _logger.Trace(() => response.GetRequest()); if (!response.IsValid) { string message = $"Retrieving stats failed: {response.GetErrorMessage()}"; _logger.Error().Exception(response.ConnectionStatus.OriginalException).Message(message).Property("request", response.GetRequest()).Write(); throw new ApplicationException(message, response.ConnectionStatus.OriginalException); } var stats = new NumbersTimelineStatsResult { Total = response.Total, Numbers = GetNumbers(response.Aggs, fields) }; var timeline = response.Aggs.DateHistogram("timelime"); if (timeline != null) { stats.Timeline.AddRange(timeline.Items.Select(i => new NumbersTimelineItem { Date = i.Date, Total = i.DocCount, Numbers = GetNumbers(i, fields) })); } stats.Start = stats.Timeline.Count > 0 ? stats.Timeline.Min(tl => tl.Date).SafeAdd(displayTimeOffset.Value) : utcStart.SafeAdd(displayTimeOffset.Value); stats.End = utcEnd.SafeAdd(displayTimeOffset.Value); var totalHours = stats.End.Subtract(stats.Start).TotalHours; if (totalHours > 0.0) { stats.AvgPerHour = stats.Total / totalHours; } if (stats.Timeline.Count <= 0) { return(stats); } var firstOccurrence = response.Aggs.Min("first_occurrence"); if (firstOccurrence?.Value != null) { stats.FirstOccurrence = firstOccurrence.Value.Value.ToDateTime().SafeAdd(displayTimeOffset.Value); } var lastOccurrence = response.Aggs.Max("last_occurrence"); if (lastOccurrence?.Value != null) { stats.LastOccurrence = lastOccurrence.Value.Value.ToDateTime().SafeAdd(displayTimeOffset.Value); } return(stats); }
public virtual async Task <ISearchResponse <T> > SimpleSearchAsync <T, TKey>(string indexName, SearchDescriptor <T> query) where T : ElasticEntity <TKey> { query.Index(indexName); var response = await ElasticSearchClient.SearchAsync <T>(query); return(response); }
/// <summary> /// Searches the specified query. /// </summary> /// <param name="query">The query.</param> /// <param name="searchType">Type of the search.</param> /// <param name="entities">The entities.</param> /// <param name="fieldCriteria">The field criteria.</param> /// <param name="size">The size.</param> /// <param name="from">From.</param> /// <param name="totalResultsAvailable">The total results available.</param> /// <returns></returns> public override List <IndexModelBase> Search(string query, SearchType searchType, List <int> entities, SearchFieldCriteria fieldCriteria, int?size, int?from, out long totalResultsAvailable) { List <IndexModelBase> documents = new List <IndexModelBase>(); totalResultsAvailable = 0; if (_client != null) { ISearchResponse <dynamic> results = null; List <SearchResultModel> searchResults = new List <SearchResultModel>(); QueryContainer queryContainer = new QueryContainer(); // add and field constraints var searchDescriptor = new SearchDescriptor <dynamic>().AllIndices(); if (entities == null || entities.Count == 0) { searchDescriptor = searchDescriptor.AllTypes(); } else { var entityTypes = new List <string>(); foreach (var entityId in entities) { // get entities search model name var entityType = new EntityTypeService(new RockContext()).Get(entityId); entityTypes.Add(entityType.IndexModelType.Name.ToLower()); // check if this is a person model, if so we need to add two model types one for person and the other for businesses // wish there was a cleaner way to do this if (entityType.Guid == SystemGuid.EntityType.PERSON.AsGuid()) { entityTypes.Add("businessindex"); } } searchDescriptor = searchDescriptor.Type(string.Join(",", entityTypes)); // todo: consider adding indexmodeltype to the entity cache } QueryContainer matchQuery = null; if (fieldCriteria != null && fieldCriteria.FieldValues?.Count > 0) { foreach (var match in fieldCriteria.FieldValues) { if (fieldCriteria.SearchType == CriteriaSearchType.Or) { matchQuery |= new MatchQuery { Field = match.Field, Query = match.Value, Boost = match.Boost }; } else { matchQuery &= new MatchQuery { Field = match.Field, Query = match.Value }; } } } switch (searchType) { case SearchType.ExactMatch: { if (!string.IsNullOrWhiteSpace(query)) { queryContainer &= new QueryStringQuery { Query = query, AnalyzeWildcard = true }; } // special logic to support emails if (query.Contains("@")) { queryContainer |= new QueryStringQuery { Query = "email:" + query, Analyzer = "whitespace" }; // analyzer = whitespace to keep the email from being parsed into 3 variables because the @ will act as a delimitor by default } // special logic to support phone search if (query.IsDigitsOnly()) { queryContainer |= new QueryStringQuery { Query = "phone:*" + query + "*", AnalyzeWildcard = true }; } // add a search for all the words as one single search term queryContainer |= new QueryStringQuery { Query = query, AnalyzeWildcard = true, PhraseSlop = 0 }; if (matchQuery != null) { queryContainer &= matchQuery; } if (size.HasValue) { searchDescriptor.Size(size.Value); } if (from.HasValue) { searchDescriptor.From(from.Value); } searchDescriptor.Query(q => queryContainer); results = _client.Search <dynamic>(searchDescriptor); break; } case SearchType.Fuzzy: { results = _client.Search <dynamic>(d => d.AllIndices().AllTypes() .Query(q => q.Fuzzy(f => f.Value(query) .Rewrite(MultiTermQueryRewrite.TopTerms(size ?? 10))))); break; } case SearchType.Wildcard: { bool enablePhraseSearch = true; if (!string.IsNullOrWhiteSpace(query)) { QueryContainer wildcardQuery = null; // break each search term into a separate query and add the * to the end of each var queryTerms = query.Split(' ').Select(p => p.Trim()).ToList(); // special logic to support emails if (queryTerms.Count == 1 && query.Contains("@")) { wildcardQuery |= new QueryStringQuery { Query = "email:*" + query + "*", Analyzer = "whitespace" }; enablePhraseSearch = false; } else { // We want to require each of the terms to exists for a result to be returned. var searchString = "+" + queryTerms.JoinStrings("* +") + "*"; wildcardQuery &= new QueryStringQuery { Query = searchString, Analyzer = "whitespace", MinimumShouldMatch = "100%", MultiTermQueryRewrite = MultiTermQueryRewrite.ScoringBoolean }; // add special logic to help boost last names if (queryTerms.Count > 1) { QueryContainer nameQuery = null; nameQuery &= new QueryStringQuery { Query = "lastName:" + queryTerms.Last() + "*", Analyzer = "whitespace", Boost = 30 }; nameQuery &= new QueryStringQuery { Query = "firstName:" + queryTerms.First() + "*", Analyzer = "whitespace" }; wildcardQuery |= nameQuery; } // special logic to support phone search if (query.IsDigitsOnly()) { wildcardQuery |= new QueryStringQuery { Query = "phoneNumbers:*" + query, Analyzer = "whitespace" }; } } queryContainer &= wildcardQuery; // add a search for all the words as one single search term if (enablePhraseSearch) { var searchString = "+" + queryTerms.JoinStrings(" +"); queryContainer |= new QueryStringQuery { Query = searchString, AnalyzeWildcard = true, PhraseSlop = 0 }; } } if (matchQuery != null) { queryContainer &= matchQuery; } if (size.HasValue) { searchDescriptor.Size(size.Value); } if (from.HasValue) { searchDescriptor.From(from.Value); } searchDescriptor.Query(q => queryContainer); var indexBoost = GlobalAttributesCache.Value("UniversalSearchIndexBoost"); if (indexBoost.IsNotNullOrWhiteSpace()) { var boostItems = indexBoost.Split(new char[] { '|' }, StringSplitOptions.RemoveEmptyEntries); foreach (var boostItem in boostItems) { var boostParms = boostItem.Split(new char[] { '^' }); if (boostParms.Length == 2) { int boost = 1; Int32.TryParse(boostParms[1], out boost); searchDescriptor.IndicesBoost(b => b.Add(boostParms[0], boost)); } } } results = _client.Search <dynamic>(searchDescriptor); break; } } totalResultsAvailable = results.Total; // normalize the results to rock search results if (results != null) { foreach (var hit in results.Hits) { IndexModelBase document = new IndexModelBase(); try { if (hit.Source != null) { Type indexModelType = Type.GetType($"{ ( ( string ) ( ( JObject ) hit.Source )["indexModelType"] )}, { ( ( string ) ( ( JObject ) hit.Source )["indexModelAssembly"] )}"); if (indexModelType != null) { document = ( IndexModelBase )(( JObject )hit.Source).ToObject(indexModelType); // return the source document as the derived type } else { document = (( JObject )hit.Source).ToObject <IndexModelBase>(); // return the source document as the base type } } if (hit.Explanation != null) { document["Explain"] = hit.Explanation.ToJson(); } document.Score = hit.Score; documents.Add(document); } catch { // ignore if the result if an exception resulted (most likely cause is getting a result from a non-rock index) } } } } return(documents); }
protected override SearchDescriptor <Person> BuildQuery(SearchDescriptor <Person> descriptor) { return(descriptor.MatchAll()); }
public void StartFilmsSearch(SearchDescriptor searchDescriptor) { _searchDescriptor = searchDescriptor; new Task(StartSearch).Start(); }
static void Main(string[] args) { var nodeURI = new Uri(""); var settings = new ConnectionSettings( nodeURI ).DefaultFieldNameInferrer(p => p); var client = new ElasticClient(settings); //var person = new Person //{ // Id = "1", // Firstname = "Martijn", // Lastname = "Laarman" //}; // var index = client.Index(person); //var searchResults = client.Search<Product>(p => p // .From(0).Size(10) // .Query(q => q // .Bool(t => t // .Must(u => u // .Bool(v => v // .Should( // w => w.Match(x => x.Field("SKU").Query("1508017")), // w => w.Match(x => x.Field("GTIN").Query("00886860404635")) // ) // ) // ) // ) // ) // ); Func <SearchDescriptor <Product>, ISearchRequest> query = p => p .From(0).Size(100) .Query(q => q .Bool(t => t .Must(u => u .Bool(v => v .Should( SkuBuilder("1508017", "1003319") ) ), w => w.Terms(z => z.Field(a => a.Status).Terms("active")) ) ) ); SearchDescriptor <Product> sd = new SearchDescriptor <Product>() .From(0).Size(100) .Query(q => q .Bool(t => t .Must(u => u .Bool(v => v .Should( SkuBuilder("1508017", "1003319") ) ) ) ) ); using (MemoryStream mStream = new MemoryStream()) { client.Serializer.Serialize(sd, mStream); Console.WriteLine(Encoding.ASCII.GetString(mStream.ToArray())); } //Func<SearchDescriptor<Product>, ISearchRequest> q2 = (x) => x. var temo = client.Search <Product>(s => sd); var searchResults = client.Search(query); // Console.WriteLine(System.Text.Encoding.UTF8.GetString(searchResults.CallDetails.RequestBodyInBytes)); //var searchResults = client.Search<Product>(s => s // .Query(q => q // .QueryString(qs => qs // .Fields(f => f.Field(ff => ff.SKU)) // .Query("1508017") // ) // ) //); //searchResults = client.Search<Person>(p => p // .From(0) // .Size(10) // .Query(q => q // .Bool(r => r // .Filter(s => s // .Term(t => t.Name("Status").Value("active"))))) //); foreach (var hit in searchResults.Hits) { Console.WriteLine(hit.Source.SKU); } Console.WriteLine("Total Hits: " + searchResults.Total); }
public async Task <NumbersTermStatsResult> GetNumbersTermsStatsAsync(string term, IEnumerable <FieldAggregation> fields, DateTime utcStart, DateTime utcEnd, IExceptionlessSystemFilterQuery systemFilter, string userFilter = null, TimeSpan?displayTimeOffset = null, int max = 25) { var allowedTerms = new[] { "organization_id", "project_id", "stack_id", "tags", "version" }; if (!allowedTerms.Contains(term)) { throw new ArgumentException("Must be a valid term.", nameof(term)); } if (!displayTimeOffset.HasValue) { displayTimeOffset = TimeSpan.Zero; } var filter = new ElasticQuery() .WithSystemFilter(systemFilter) .WithFilter(userFilter) .WithDateRange(utcStart, utcEnd, EventIndexType.Fields.Date) .WithIndexes(utcStart, utcEnd); // if no start date then figure out first event date if (!filter.DateRanges.First().UseStartDate) { await UpdateFilterStartDateRangesAsync(filter, utcEnd).AnyContext(); } utcStart = filter.DateRanges.First().GetStartDate(); utcEnd = filter.DateRanges.First().GetEndDate(); var descriptor = new SearchDescriptor <PersistentEvent>() .SearchType(SearchType.Count) .IgnoreUnavailable() .Indices(_configuration.Events.Event.GetIndexesByQuery(filter)) .Type(_configuration.Events.Event.Name) .Aggregations(agg => BuildAggregations(agg .Terms("terms", t => BuildTermSort(t .Field(term) .Size(max) .Aggregations(agg2 => BuildAggregations(agg2 .Min("first_occurrence", o => o.Field(ev => ev.Date)) .Max("last_occurrence", o => o.Field(ev => ev.Date)), fields) ), fields) ), fields) ); _configuration.Events.Event.QueryBuilder.ConfigureSearch(filter, GetQueryOptions(), descriptor); var response = await _configuration.Client.SearchAsync <PersistentEvent>(descriptor).AnyContext(); _logger.Trace(() => response.GetRequest()); if (!response.IsValid) { string message = $"Retrieving stats failed: {response.GetErrorMessage()}"; _logger.Error().Exception(response.ConnectionStatus.OriginalException).Message(message).Property("request", response.GetRequest()).Write(); throw new ApplicationException(message, response.ConnectionStatus.OriginalException); } var stats = new NumbersTermStatsResult { Total = response.Total, Start = utcStart.SafeAdd(displayTimeOffset.Value), End = utcEnd.SafeAdd(displayTimeOffset.Value), Numbers = GetNumbers(response.Aggs, fields) }; var terms = response.Aggs.Terms("terms"); if (terms != null) { stats.Terms.AddRange(terms.Items.Select(i => { var item = new NumbersTermStatsItem { Total = i.DocCount, Term = i.Key, Numbers = GetNumbers(i, fields) }; var termFirstOccurrence = i.Min("first_occurrence"); if (termFirstOccurrence?.Value != null) { item.FirstOccurrence = termFirstOccurrence.Value.Value.ToDateTime().SafeAdd(displayTimeOffset.Value); } var termLastOccurrence = i.Max("last_occurrence"); if (termLastOccurrence?.Value != null) { item.LastOccurrence = termLastOccurrence.Value.Value.ToDateTime().SafeAdd(displayTimeOffset.Value); } return(item); })); } return(stats); }
protected virtual void ApplySort <T>(SearchDescriptor <T> searchDescriptor) where T : class { searchDescriptor.Sort(s => s.Descending("_score")); }
public static SearchDescriptor <T> FilteredOn <T>(this SearchDescriptor <T> searchDescriptor, FilterContainer container) where T : class { return(searchDescriptor.Query(q => q.Filtered(fil => fil.Filter(f => container)))); }
protected virtual void ApplyPaging <T>(SearchDescriptor <T> searchDescriptor, SearchTextQuery query) where T : class { searchDescriptor.Take(query.Take); }
public static SearchDescriptor <T> FilterOn <T>(this SearchDescriptor <T> searchDescriptor, Expression <Func <T, bool> > filterRule) where T : class { var filterDescriptor = GenerateFilterDescription <T>(filterRule.Body); return(searchDescriptor.Filter(f => filterDescriptor)); }
/// <summary> /// search /// </summary> /// <typeparam name="T"></typeparam> /// <typeparam name="TKey"></typeparam> /// <param name="indexName"></param> /// <param name="query"></param> /// <param name="skip">skip num</param> /// <param name="size">return document size</param> /// <param name="includeFields">return fields</param> /// <param name="preTags">Highlight tags</param> /// <param name="postTags">Highlight tags</param> /// <param name="disableHigh"></param> /// <param name="highField">Highlight fields</param> /// <returns></returns> public virtual async Task <ISearchResponse <T> > SearchAsync <T, TKey>(string indexName, SearchDescriptor <T> query, int skip, int size, string[] includeFields = null, string preTags = "<strong style=\"color: red;\">", string postTags = "</strong>", bool disableHigh = false, params string[] highField) where T : class { query.Index(indexName); var highlight = new HighlightDescriptor <T>(); if (disableHigh) { preTags = ""; postTags = ""; } highlight.PreTags(preTags).PostTags(postTags); var isHigh = highField != null && highField.Length > 0; var hfs = new List <Func <HighlightFieldDescriptor <T>, IHighlightField> >(); //分页 query.Skip(skip).Take(size); //关键词高亮 if (isHigh) { foreach (var s in highField) { hfs.Add(f => f.Field(s)); } } highlight.Fields(hfs.ToArray()); query.Highlight(h => highlight); if (includeFields != null) { query.Source(ss => ss.Includes(ff => ff.Fields(includeFields.ToArray()))); } var response = await _esClient.SearchAsync <T>(query); return(response); }
public SearchDescriptorEventArgs(SearchDescriptor searchDescriptor) { SearchDescriptor = searchDescriptor; }
public static SearchDescriptor <T> FilteredOn <T>(this SearchDescriptor <T> searchDescriptor, Expression <Func <T, bool> > filterRule) where T : class { var binaryExpression = filterRule.Body as BinaryExpression; return(searchDescriptor.Query(q => q.Filtered(fil => fil.Filter(f => GenerateFilterDescription <T>(binaryExpression))))); }
public SearchDescriptor <DbUser> Process(SearchDescriptor <DbUser> descriptor) => descriptor.Take(1) .MultiQuery(q => q.Filter((FilterQuery <ulong>)_id, u => u.DiscordId));
private ISearchResponse <Document> DoSearch(int?skip) { lastskip = skip ?? 0; ISearchResponse <Document> searchResult; if (_luceneQuery != null) { var extractTermsSupported = CheckQueryForExtractTerms(_luceneQuery); if (extractTermsSupported) { //This try catch is because analyzers strip out stop words and sometimes leave the query //with null values. This simply tries to extract terms, if it fails with a null //reference then its an invalid null query, NotSupporteException occurs when the query is //valid but the type of query can't extract terms. //This IS a work-around, theoretically Lucene itself should check for null query parameters //before throwing exceptions. try { var set = new HashSet <Term>(); _luceneQuery.ExtractTerms(set); } catch (NullReferenceException) { //this means that an analyzer has stipped out stop words and now there are //no words left to search on //it could also mean that potentially a IIndexFieldValueType is throwing a null ref TotalItemCount = 0; return(new SearchResponse <Document>() { }); } catch (NotSupportedException) { //swallow this exception, we should continue if this occurs. } } _queryContainer = new QueryContainer(new QueryStringQuery() { Query = _luceneQuery.ToString(), AnalyzeWildcard = true }); } if (_queryContainer != null) { SearchDescriptor <Document> searchDescriptor = new SearchDescriptor <Document>(); searchDescriptor.Index(_indexName) .Skip(skip) .Size(_maxResults) .Query(q => _queryContainer) .Sort(s => _sortDescriptor); var json = _client.RequestResponseSerializer.SerializeToString(searchDescriptor); searchResult = _client.Search <Document>(searchDescriptor); } else if (_searchRequest != null) { searchResult = _client.Search <Document>(_searchRequest); } else { searchResult = _client.Search <Document>(_searchSelector); } TotalItemCount = searchResult.Total; Aggregation = searchResult.Aggregations; return(searchResult); }
public async Task <ISearchResponse <T> > SimpleSearchAsync <T>(string indexName, SearchDescriptor <T> query) where T : class, new() { query.Index(indexName); var response = await ElasticSearchClient.SearchAsync <T>(query); return(response); }
/// <summary> /// 给关键词添加高亮 /// </summary> /// <typeparam name="T"></typeparam> /// <param name="sd"></param> /// <param name="pre"></param> /// <param name="after"></param> public static void AddHighlightWrapper <T>(this SearchDescriptor <T> sd, string pre = "<em>", string after = "</em>") where T : class { sd.Highlight(x => x.PreTags(pre).PostTags(after)); }
public static SearchDescriptor <T> MultiQuery <T>(this SearchDescriptor <T> searchDesc, Func <QueryWrapper <T>, QueryWrapper <T> > query) where T : class => searchDesc.Query(q => q.MultiQueryInternal(query));
protected abstract SearchDescriptor <TDocument> BuildQuery(SearchDescriptor <TDocument> descriptor);
protected ICollection <TModel> FindAs <TModel>(ElasticSearchOptions <T> options) where TModel : class, new() { if (options == null) { throw new ArgumentNullException("options"); } ICollection <TModel> result; if (EnableCache && options.UseCache) { result = Cache.Get <ICollection <TModel> >(GetScopedCacheKey(options.CacheKey)); if (result != null) { return(result); } } var searchDescriptor = new SearchDescriptor <T>().Filter(options.GetElasticSearchFilter()); searchDescriptor.Indices(options.Indices); searchDescriptor.IgnoreUnavailable(); if (options.UsePaging) { searchDescriptor.Skip(options.GetSkip()); } searchDescriptor.Size(options.GetLimit()); searchDescriptor.Type(typeof(T)); if (options.Fields.Count > 0) { searchDescriptor.Source(s => s.Include(options.Fields.ToArray())); } else { searchDescriptor.Source(s => s.Exclude("idx")); } if (options.SortBy.Count > 0) { foreach (var sort in options.SortBy) { searchDescriptor.Sort(sort); } } _elasticClient.EnableTrace(); var results = _elasticClient.Search <T>(searchDescriptor); _elasticClient.DisableTrace(); if (!results.IsValid) { throw new ApplicationException(String.Format("ElasticSearch error code \"{0}\".", results.ConnectionStatus.HttpStatusCode), results.ConnectionStatus.OriginalException); } options.HasMore = options.UseLimit && results.Total > options.GetLimit(); var items = results.Documents.ToList(); if (typeof(T) != typeof(TModel)) { if (Mapper.FindTypeMapFor <T, TModel>() == null) { Mapper.CreateMap <T, TModel>(); } result = Enumerable.ToList(items.Select(Mapper.Map <T, TModel>)); } else { result = items as List <TModel>; } if (EnableCache && options.UseCache) { Cache.Set(GetScopedCacheKey(options.CacheKey), result, options.GetCacheExpirationDate()); } return(result); }
/// <summary> /// 搜索日志 /// </summary> /// <param name="highlight"></param> /// <param name="start"></param> /// <param name="end"></param> /// <param name="keyword"></param> /// <param name="logger_name"></param> /// <param name="page"></param> /// <param name="pagesize"></param> /// <returns></returns> public static async Task <PagerData <ESLogLine, QueryExtData> > Search( bool highlight = true, DateTime?start = null, DateTime?end = null, string keyword = null, string logger_name = null, int page = 1, int pagesize = 10) { var sd = new SearchDescriptor <ESLogLine>(); sd = sd.Index(IndexName); var query = new QueryContainer(); if (start != null) { query &= new DateRangeQuery() { Field = nameof(temp.UpdateTime), GreaterThanOrEqualTo = start.Value }; } if (end != null) { query &= new DateRangeQuery() { Field = nameof(temp.UpdateTime), LessThan = end.Value }; } if (ValidateHelper.IsPlumpString(keyword)) { query &= new MatchQuery() { Field = nameof(temp.Message), Query = keyword, Operator = Operator.Or, MinimumShouldMatch = "100%" }; } if (ValidateHelper.IsPlumpString(logger_name)) { query &= new TermQuery() { Field = nameof(temp.LoggerName), Value = logger_name }; } //查询条件 sd = sd.Query(_ => query); //聚合 sd = sd.Aggregations(x => x.Terms(nameof(temp.LoggerName), av => av.Field(nameof(temp.LoggerName)).Size(1000)) .Terms(nameof(temp.Level), av => av.Field(nameof(temp.Level)).Size(1000)) .Terms(nameof(temp.Domain), av => av.Field(nameof(temp.Domain)).Size(1000))); //高亮 if (highlight) { sd = sd.AddHighlightWrapper("<em class='kwd'>", "</em>", x => x.Field(nameof(temp.Message))); } //排序 var sort = new SortDescriptor <ESLogLine>(); sort = sort.Descending(x => x.UpdateTime); sort = sort.Descending(new Field("", boost: null)); sd = sd.Sort(_ => sort); //分页 sd = sd.QueryPage_(page, pagesize); //请求服务器 var client = new ElasticClient(ElasticsearchClientManager.Instance.DefaultClient); var re = await client.SearchAsync <ESLogLine>(_ => sd); re.ThrowIfException(); var data = new PagerData <ESLogLine, QueryExtData>(); data.ItemCount = (int)re.Total; data.DataList = re.Hits.Select(x => x.Source).ToList(); //聚合数据 data.ExtData = new QueryExtData(); return(data); }
public virtual void Apply(SearchDescriptor <T> searchDescriptor, SearchTextQuery query) { SortByName(searchDescriptor, query.OrderingString); }
protected TModel FindOneAs <TModel>(OneOptions options) where TModel : class, new() { if (options == null) { throw new ArgumentNullException("options"); } TModel result = null; if (EnableCache) { if (options.UseCache) { result = Cache.Get <TModel>(GetScopedCacheKey(options.CacheKey)); } if (options.UseCache && result != null) { Log.Trace().Message("Cache hit: type={0}", _entityType).Write(); } else if (options.UseCache) { Log.Trace().Message("Cache miss: type={0}", _entityType).Write(); } if (result != null) { return(result); } } var searchDescriptor = new SearchDescriptor <T>().Filter(options.GetElasticSearchFilter <T>()).Size(1); if (options.Fields.Count > 0) { searchDescriptor.Source(s => s.Include(options.Fields.ToArray())); } else { searchDescriptor.Source(s => s.Exclude("idx")); } var elasticSearchOptions = options as ElasticSearchOptions <T>; if (elasticSearchOptions != null && elasticSearchOptions.SortBy.Count > 0) { searchDescriptor.Indices(elasticSearchOptions.Indices); foreach (var sort in elasticSearchOptions.SortBy) { searchDescriptor.Sort(sort); } } _elasticClient.EnableTrace(); var item = _elasticClient.Search <T>(searchDescriptor).Documents.FirstOrDefault(); _elasticClient.DisableTrace(); if (typeof(T) != typeof(TModel)) { if (Mapper.FindTypeMapFor <T, TModel>() == null) { Mapper.CreateMap <T, TModel>(); } result = Mapper.Map <T, TModel>(item); } else { result = item as TModel; } if (EnableCache && result != null && options.UseCache) { Cache.Set(GetScopedCacheKey(options.CacheKey), result, options.GetCacheExpirationDate()); } return(result); }
protected async Task <FindResults <TResult> > FindAsAsync <TResult>(IRepositoryQuery query) where TResult : class, new() { if (query == null) { throw new ArgumentNullException(nameof(query)); } var pagableQuery = query as IPagableQuery; var pagingOptions = pagableQuery?.Options as IPagingOptions; var elasticPagingOptions = pagableQuery?.Options as ElasticPagingOptions; bool useSnapshotPaging = elasticPagingOptions?.UseSnapshotPaging ?? false; // don't use caching with snapshot paging. bool allowCaching = IsCacheEnabled && useSnapshotPaging == false; var queryOptions = GetQueryOptions(); await OnBeforeQueryAsync(query, queryOptions, typeof(TResult)).AnyContext(); Func <FindResults <TResult>, Task <FindResults <TResult> > > getNextPageFunc = async r => { var previousResults = r; if (previousResults == null) { throw new ArgumentException(nameof(r)); } if (!String.IsNullOrEmpty(previousResults.GetScrollId())) { var scrollResponse = await _client.ScrollAsync <TResult>(pagableQuery.GetLifetime(), previousResults.GetScrollId()).AnyContext(); _logger.Trace(() => scrollResponse.GetRequest()); var results = scrollResponse.ToFindResults(); results.Page = previousResults.Page + 1; results.HasMore = scrollResponse.Hits.Count() >= pagableQuery.GetLimit(); return(results); } if (pagableQuery == null) { return(new FindResults <TResult>()); } if (pagingOptions != null) { pagingOptions.Page = pagingOptions.Page == null ? 2 : pagingOptions.Page + 1; } return(await FindAsAsync <TResult>(query).AnyContext()); }; string cacheSuffix = pagableQuery?.ShouldUseLimit() == true?String.Concat(pagingOptions.Page?.ToString() ?? "1", ":", pagableQuery.GetLimit().ToString()) : String.Empty; FindResults <TResult> result; if (allowCaching) { result = await GetCachedQueryResultAsync <FindResults <TResult> >(query, cacheSuffix : cacheSuffix).AnyContext(); if (result != null) { ((IGetNextPage <TResult>)result).GetNextPageFunc = async r => await getNextPageFunc(r).AnyContext(); return(result); } } ISearchResponse <TResult> response = null; if (useSnapshotPaging == false || String.IsNullOrEmpty(elasticPagingOptions?.ScrollId)) { SearchDescriptor <T> searchDescriptor = await CreateSearchDescriptorAsync(query, queryOptions).AnyContext(); if (useSnapshotPaging) { searchDescriptor.SearchType(SearchType.Scan).Scroll(pagableQuery.GetLifetime()); } response = await _client.SearchAsync <TResult>(searchDescriptor).AnyContext(); _logger.Trace(() => response.GetRequest()); if (!response.IsValid) { if (response.ConnectionStatus.HttpStatusCode.GetValueOrDefault() == 404) { return(new FindResults <TResult>()); } string message = response.GetErrorMessage(); _logger.Error().Exception(response.ConnectionStatus.OriginalException).Message(message).Property("request", response.GetRequest()).Write(); throw new ApplicationException(message, response.ConnectionStatus.OriginalException); } } if (useSnapshotPaging) { // The response might have returned 0 search results. if (response?.Total == 0) { return(response.ToFindResults()); } var scrollResponse = await _client.ScrollAsync <TResult>(pagableQuery.GetLifetime(), response?.ScrollId ?? elasticPagingOptions?.ScrollId).AnyContext(); _logger.Trace(() => scrollResponse.GetRequest()); if (!scrollResponse.IsValid) { string message = scrollResponse.GetErrorMessage(); _logger.Error().Exception(scrollResponse.ConnectionStatus.OriginalException).Message(message).Property("request", scrollResponse.GetRequest()).Write(); throw new ApplicationException(message, scrollResponse.ConnectionStatus.OriginalException); } result = scrollResponse.ToFindResults(); result.HasMore = scrollResponse.Hits.Count() >= pagableQuery.GetLimit(); ((IGetNextPage <TResult>)result).GetNextPageFunc = getNextPageFunc; } else if (pagableQuery?.ShouldUseLimit() == true) { result = response.ToFindResults(pagableQuery.GetLimit()); result.HasMore = response.Hits.Count() > pagableQuery.GetLimit(); ((IGetNextPage <TResult>)result).GetNextPageFunc = getNextPageFunc; } else { result = response.ToFindResults(); } result.Page = pagingOptions?.Page ?? 1; if (!allowCaching) { return(result); } var nextPageFunc = ((IGetNextPage <TResult>)result).GetNextPageFunc; ((IGetNextPage <TResult>)result).GetNextPageFunc = null; await SetCachedQueryResultAsync(query, result, cacheSuffix : cacheSuffix).AnyContext(); ((IGetNextPage <TResult>)result).GetNextPageFunc = nextPageFunc; return(result); }
public void Test() { var s = new SearchDescriptor() .From(0) .Size(10); }
private ProviderApprenticeshipLocationSearchResult PerformFrameworkProviderSearchWithQuery(SearchDescriptor <FrameworkProviderSearchResultsItem> qryStr, int page, int pageSize) { var skipAmount = pageSize * (page - 1); var results = _elasticsearchCustomClient.Search <FrameworkProviderSearchResultsItem>(_ => qryStr.Skip(skipAmount).Take(pageSize)); if (results.ApiCall?.HttpStatusCode != 200) { return(new ProviderApprenticeshipLocationSearchResult()); } return(MapToProviderApprenticeshipLocationSearchResult(results, page, pageSize)); }
/// <summary> /// 方法有点长,需要重构 /// </summary> /// <typeparam name="T"></typeparam> /// <param name="pageParams"></param> /// <param name="index"></param> /// <returns></returns> public IQueryResult <T> Query <T>(IPageParam pageParams, string index = null) where T : class { if (pageParams == null) { pageParams = new PageParam { PageIndex = 0, PageSize = 20 }; } SearchDescriptor <T> searchDescriptor = new SearchDescriptor <T>() .Type(_defaultType) .Index(index ?? _defaultIndex) .From(pageParams.From) .Size(pageParams.PageSize); if (pageParams is PageParamWithSearch) { PageParamWithSearch pageParamsSearch = pageParams as PageParamWithSearch; searchDescriptor = searchDescriptor.Query(q => q.QueryString(qs => qs.Fields(pageParamsSearch.SearchKeys) .Query(pageParamsSearch.KeyWord) .DefaultOperator(pageParamsSearch.Operator))); } else if (pageParams is PageParam) { searchDescriptor = searchDescriptor.Query(q => q.QueryString(qs => qs.Query(pageParams.KeyWord) .DefaultOperator(pageParams.Operator))); } //是否需要高亮 bool hasHighlight = pageParams.Highlight?.Keys?.Length > 0; if (hasHighlight) { //TODO BuildHighLightQuery <T>(pageParams, ref searchDescriptor); } //所有条件配置完成之后执行查询 ISearchResponse <T> response = _builder?.Client.Search <T>(s => searchDescriptor); var list = response.Documents; var listWithHightlight = new List <T>(); if (hasHighlight) { response.Hits.ToList().ForEach(x => { if (x.Highlights?.Count > 0) { PropertyInfo[] properties = typeof(T).GetProperties(); foreach (string key in pageParams.Highlight?.Keys) { //先得到要替换的内容 if (x.Highlights.ContainsKey(key)) { string value = string.Join("", x.Highlights[key]?.Highlights); PropertyInfo info = properties.FirstOrDefault(p => p.Name == pageParams.Highlight.PrefixOfKey + key); //没找到带前缀的属性,则替换之前的 if (info == null && pageParams.Highlight.ReplaceAuto) { var filed = key[0].ToString().ToUpper() + key.Substring(1, key.Length - 1); info = properties.FirstOrDefault(p => p.Name == filed); } if (info?.CanWrite == true) { if (!string.IsNullOrEmpty(value)) { //如果高亮字段不为空,才赋值,否则就赋值成空 info.SetValue(x.Source, value); } } } } } listWithHightlight.Add(x.Source); }); } IQueryResult <T> result = new CustomQueryResult <T> { List = listWithHightlight, Took = response.Took, Total = response.Total }; return(result); }
public virtual async Task <ISearchResponse <T> > SearchAsync <T, TKey>(string indexName, SearchDescriptor <T> query, int skip, int size, string[] includeFields = null, string preTags = "<strong style=\"color: red;\">", string postTags = "</strong>", bool disableHigh = false, params string[] highField) where T : ElasticEntity <TKey> { query.Index(indexName); var highdes = new HighlightDescriptor <T>(); if (disableHigh) { preTags = ""; postTags = ""; } highdes.PreTags(preTags).PostTags(postTags); var ishigh = highField != null && highField.Length > 0; var hfs = new List <Func <HighlightFieldDescriptor <T>, IHighlightField> >(); //Pagination query.Skip(skip).Take(size); //Keyword highlighting if (ishigh) { foreach (var s in highField) { hfs.Add(f => f.Field(s)); } } highdes.Fields(hfs.ToArray()); query.Highlight(h => highdes); if (includeFields != null) { query.Source(ss => ss.Includes(ff => ff.Fields(includeFields.ToArray()))); } var data = JsonConvert.SerializeObject(query); var response = await ElasticSearchClient.SearchAsync <T>(query); return(response); }
/// <summary> /// 分页查询 /// </summary> /// <typeparam name="T">实体类型</typeparam> /// <param name="param">分页参数</param> /// <param name="indexName">索引名</param> /// <returns></returns> public async Task <IQueryResult <T> > PageQueryAsync <T>(IPageParam param, string indexName) where T : class { if (param == null) { param = new PageParam() { Page = 1, PageSize = 20 }; } var searchRequest = new SearchDescriptor <T>() .Index(indexName) .From(param.GetSkipCount()) .Size(param.PageSize); if (param is PageParamWithSearch pageSearch) { ConfigPageRequest(pageSearch, ref searchRequest); } else if (param is PageParam pageParam) { ConfigPageRequest(pageParam, ref searchRequest); } // 是否需要高亮 bool hasHighlight = param.Highlight?.Keys?.Length > 0; if (hasHighlight) { BuildHighLightQuery(param, ref searchRequest); } var client = await _builder.GetClientAsync(); var response = await client.SearchAsync <T>(x => searchRequest); //if (hasHighlight) //{ // var listWithHightlight = new List<T>(); // response.Hits.ToList().ForEach(x => // { // if (x.Highlights?.Count > 0) // { // PropertyInfo[] properties = typeof(T).GetProperties(); // foreach (string key in pageParams.Highlight?.Keys) // { // //先得到要替换的内容 // if (x.Highlights.ContainsKey(key)) // { // string value = string.Join("", x.Highlights[key]?.Highlights); // PropertyInfo info = properties.FirstOrDefault(p => p.Name == pageParams.Highlight.PrefixOfKey + key); // //没找到带前缀的属性,则替换之前的 // if (info == null && pageParams.Highlight.ReplaceAuto) // { // info = properties.FirstOrDefault(p => p.Name == key); // } // if (info?.CanWrite == true) // { // if (!string.IsNullOrEmpty(value)) // { // //如果高亮字段不为空,才赋值,否则就赋值成空 // info.SetValue(x.Source, value); // } // } // } // } // } // listWithHightlight.Add(x.Source); // }); //} return(new CustomQueryResult <T>() { Data = response.Documents, Took = response.Took, TotalCount = response.Total }); }