private IAggregate GetMultiBucketAggregate(JsonReader reader, JsonSerializer serializer) { var bucket = new BucketAggregate(); var propertyName = (string)reader.Value; if (propertyName == Parser.DocCountErrorUpperBound) { reader.Read(); bucket.DocCountErrorUpperBound = reader.Value as long?; reader.Read(); } propertyName = (string)reader.Value; if (propertyName == Parser.SumOtherDocCount) { reader.Read(); bucket.SumOtherDocCount = reader.Value as long?; reader.Read(); } var items = new List <IBucket>(); reader.Read(); if (reader.TokenType == JsonToken.StartObject) { reader.Read(); var aggs = new Dictionary <string, IAggregate>(); while (reader.TokenType != JsonToken.EndObject) { var name = reader.Value.ToString(); reader.Read(); var innerAgg = this.ReadAggregate(reader, serializer); aggs.Add(name, innerAgg); reader.Read(); } reader.Read(); return(new FiltersAggregate(aggs)); } if (reader.TokenType != JsonToken.StartArray) { return(null); } reader.Read(); //move from start array to start object if (reader.TokenType == JsonToken.EndArray) { reader.Read(); bucket.Items = EmptyReadOnly <IBucket> .Collection; return(bucket); } do { var item = this.ReadBucket(reader, serializer); items.Add(item); reader.Read(); } while (reader.TokenType != JsonToken.EndArray); bucket.Items = items; reader.Read(); return(bucket); }
private static void ExtractKeyAndValues(Dictionary <string, float> values, Nest.BucketAggregate bucket, int i, string key, bool docCount) { i++; foreach (var lvl in bucket.Items) { var nestTag = lvl as Nest.KeyedBucket; var nestTagKey = key + ((key.Length > 0)? "_" : "") + ((nestTag.KeyAsString != null)? nestTag.KeyAsString : nestTag.Key); if (nestTag.Aggregations != null && nestTag.Aggregations.ContainsKey("level" + i)) { ExtractKeyAndValues(values, nestTag.Aggregations["level" + i] as Nest.BucketAggregate, i, nestTagKey, docCount); } else { if (docCount) { values.Add(key + "_" + ((nestTag.KeyAsString != null) ? nestTag.KeyAsString : nestTag.Key), (float)nestTag.DocCount); } else { double sum = 0.0; var sumAggs = (nestTag.Aggregations["summe"] as Nest.ValueAggregate); sum = sumAggs.Value.Value; values.Add(key + "_" + ((nestTag.KeyAsString != null) ? nestTag.KeyAsString : nestTag.Key), (float)sum); } } } }
private IAggregate GetMultiBucketAggregate(ref JsonReader reader, IJsonFormatterResolver formatterResolver, ref ArraySegment <byte> propertyName ) { var bucket = new BucketAggregate(); if (propertyName.EqualsBytes(DocCountErrorUpperBound)) { bucket.DocCountErrorUpperBound = reader.ReadNullableLong(); reader.ReadIsValueSeparatorWithVerify(); propertyName = reader.ReadPropertyNameSegmentRaw(); } if (propertyName.EqualsBytes(SumOtherDocCount)) { bucket.SumOtherDocCount = reader.ReadNullableLong(); reader.ReadIsValueSeparatorWithVerify(); reader.ReadNext(); // "buckets" reader.ReadNext(); // : } var items = new List <IBucket>(); var count = 0; var token = reader.GetCurrentJsonToken(); if (token == JsonToken.BeginObject) { var filterAggregates = new Dictionary <string, IAggregate>(); while (reader.ReadIsInObject(ref count)) { var name = reader.ReadPropertyName(); var innerAgg = ReadAggregate(ref reader, formatterResolver); filterAggregates[name] = innerAgg; } return(new FiltersAggregate(filterAggregates)); } while (reader.ReadIsInArray(ref count)) { var item = ReadBucket(ref reader, formatterResolver); items.Add(item); } bucket.Items = items; reader.ReadNext(); // close outer } return(bucket); }
private IAggregate GetMultiBucketAggregate(ref JsonReader reader, IJsonFormatterResolver formatterResolver, ref ArraySegment <byte> propertyName, IReadOnlyDictionary <string, object> meta) { var bucket = new BucketAggregate { Meta = meta }; if (propertyName.EqualsBytes(DocCountErrorUpperBound)) { bucket.DocCountErrorUpperBound = reader.ReadNullableLong(); reader.ReadIsValueSeparatorWithVerify(); propertyName = reader.ReadPropertyNameSegmentRaw(); } if (propertyName.EqualsBytes(SumOtherDocCount)) { bucket.SumOtherDocCount = reader.ReadNullableLong(); reader.ReadIsValueSeparatorWithVerify(); reader.ReadNext(); // "buckets" reader.ReadNext(); // : } var items = new List <IBucket>(); bucket.Items = items; var count = 0; var token = reader.GetCurrentJsonToken(); if (token == JsonToken.BeginObject) { var filterAggregates = new Dictionary <string, IAggregate>(); while (reader.ReadIsInObject(ref count)) { var name = reader.ReadPropertyName(); var innerAgg = ReadAggregate(ref reader, formatterResolver); filterAggregates[name] = innerAgg; } return(new FiltersAggregate(filterAggregates) { Meta = meta }); } while (reader.ReadIsInArray(ref count)) { var item = ReadBucket(ref reader, formatterResolver); items.Add(item); } token = reader.GetCurrentJsonToken(); if (token == JsonToken.ValueSeparator) { reader.ReadNext(); propertyName = reader.ReadPropertyNameSegmentRaw(); if (propertyName.EqualsBytes(JsonWriter.GetEncodedPropertyNameWithoutQuotation("interval"))) { bucket.Interval = formatterResolver.GetFormatter <DateMathTime>().Deserialize(ref reader, formatterResolver); } else { // skip for now reader.ReadNextBlock(); } } return(bucket); }
public static Dictionary <int, Dictionary <int, BasicData> > OblastiPerYear(string query, int[] interestedInYearsOnly) { AggregationContainerDescriptor <HlidacStatu.Lib.Data.Smlouva> aggYSum = new AggregationContainerDescriptor <HlidacStatu.Lib.Data.Smlouva>() .DateHistogram("x-agg", h => h .Field(f => f.datumUzavreni) .CalendarInterval(Nest.DateInterval.Year) .Aggregations(aggObor => aggObor .Terms("x-obor", oborT => oborT .Field("classification.class1.typeValue") .Size(150) .Aggregations(agg => agg .Sum("sumincome", s => s .Field(ff => ff.CalculatedPriceWithVATinCZK) ) ) ) ) ); var res = HlidacStatu.Lib.Data.Smlouva.Search.SimpleSearch(query, 1, 0, HlidacStatu.Lib.Data.Smlouva.Search.OrderResult.FastestForScroll, aggYSum, exactNumOfResults: true); Dictionary <int, Dictionary <int, BasicData> > result = new Dictionary <int, Dictionary <int, BasicData> >(); if (interestedInYearsOnly != null) { foreach (int year in interestedInYearsOnly) { result.Add(year, new Dictionary <int, BasicData>()); } foreach (Nest.DateHistogramBucket val in ((BucketAggregate)res.ElasticResults.Aggregations["x-agg"]).Items) { if (result.ContainsKey(val.Date.Year)) { Nest.BucketAggregate vals = (Nest.BucketAggregate)val.Values.FirstOrDefault(); var oblasti = vals.Items.Select(m => new { oblast = Convert.ToInt32(((Nest.KeyedBucket <object>)m).Key), data = new BasicData() { CelkemCena = (decimal)((Nest.ValueAggregate)((Nest.KeyedBucket <object>)m).Values.FirstOrDefault()).Value, Pocet = ((Nest.KeyedBucket <object>)m).DocCount ?? 0 } } ).ToArray(); result[val.Date.Year] = oblasti.ToDictionary(k => k.oblast, v => v.data); } } } else { foreach (Nest.DateHistogramBucket val in ((BucketAggregate)res.ElasticResults.Aggregations["x-agg"]).Items) { if (result.ContainsKey(val.Date.Year)) { Nest.BucketAggregate vals = (Nest.BucketAggregate)val.Values.FirstOrDefault(); var oblasti = vals.Items.Select(m => new { oblast = Convert.ToInt32(((Nest.KeyedBucket <object>)m).Key), data = new BasicData() { CelkemCena = (decimal)((Nest.ValueAggregate)((Nest.KeyedBucket <object>)m).Values.FirstOrDefault()).Value, Pocet = ((Nest.KeyedBucket <object>)m).DocCount ?? 0 } } ).ToArray(); result.Add(val.Date.Year, new Dictionary <int, BasicData>()); result[val.Date.Year] = oblasti.ToDictionary(k => k.oblast, v => v.data); } } } return(result); }
// level<category, value> public static Dictionary <string, float> Aggregate(List <string> categories, string fact, string query, string aggFunc, int?diffFromDays, int?diffToDays) { var values = new Dictionary <string, float>(); try { if (categories != null && categories.Count > 0 && fact != null && fact.Length > 0) { var searchQuery = new SearchDescriptor <ExpandoObject>() .Index("docs") .From(0) .Size(1000) .Query(q => q .Bool(b => b .Must(BuildQueryContainer(query, diffFromDays, diffToDays)) ) ) .Aggregations(a => { if (categories.Count == 1) { return(a.Terms("level0", ta => ta.Size(1000).Field(categories[0].ToLower()) .Aggregations(aa => { if (aggFunc == "avg") { return aa.Average("summe", ts => ts.Field(fact.ToLower())); } if (aggFunc == "min") { return aa.Min("summe", ts => ts.Field(fact.ToLower())); } if (aggFunc == "max") { return aa.Max("summe", ts => ts.Field(fact.ToLower())); } return aa.Sum("summe", ts => ts.Field(fact.ToLower())); }) )); } else { return(BuildAggregationContainer(a, 0, categories.Count - 1, categories, fact.ToLower(), aggFunc)); } }); var filenameQuery = @"d:\query.txt"; using (FileStream SourceStream = File.Open(filenameQuery, FileMode.Create)) { ElasticClientFactory.Client.Serializer.Serialize(searchQuery, SourceStream); } var res = ElasticClientFactory.Client.Search <ExpandoObject>(searchQuery); var filenameResult = @"d:\result.txt"; using (FileStream SourceStream = File.Open(filenameResult, FileMode.Create)) { ElasticClientFactory.Client.Serializer.Serialize(res.Aggregations, SourceStream); } Nest.BucketAggregate firstBucketAggregate = res.Aggregations["level0"] as Nest.BucketAggregate; ExtractKeyAndValues(values, firstBucketAggregate, 0, "", (aggFunc == "count")); } } catch (Exception e) { var filenameError = @"d:\error.txt"; if (!File.Exists(filenameError)) { // Create a file to write to. File.WriteAllText(filenameError, String.Format("{0:dd.MM.yyyy hh:MM:ss}", DateTime.Now) + "\n" + e.Message + ": \n" + e.StackTrace); } File.AppendAllText(filenameError, "\n" + String.Format("{0:dd.MM.yyyy hh:MM:ss}", DateTime.Now) + "\n" + e.Message + ": \n" + e.StackTrace); } return(values); }