public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { var filter = new TermsQueryDescriptor<object, object>(); ITermsQuery f = filter; if (reader.TokenType != JsonToken.StartObject) return null; var depth = reader.Depth; while (reader.Read() && reader.Depth >= depth && reader.Value != null) { var property = reader.Value as string; switch (property) { case "disable_coord": reader.Read(); f.DisableCoord = reader.Value as bool?; break; case "minimum_should_match": f.MinimumShouldMatch = reader.ReadAsString(); break; case "boost": reader.Read(); f.Boost = reader.Value as double?; break; default: f.Field = property; //reader.Read(); ReadTerms(f, reader); //reader.Read(); break; } } return filter; }
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { if (reader.TokenType != JsonToken.StartObject) return null; reader.Read(); if (reader.TokenType != JsonToken.PropertyName) return null; var prop = reader.Value; if ((string) reader.Value != "filter") return null; reader.Read(); var agg = new FilterAggregator(); serializer.Populate(reader, agg); return agg; }
private IAggregation GetPercentilesMetricAggregation(JsonReader reader, JsonSerializer serializer) { var metric = new PercentilesMetric(); var percentileItems = new List<PercentileItem>(); if (reader.TokenType == JsonToken.StartObject) reader.Read(); while (reader.TokenType != JsonToken.EndObject) { var percentile = double.Parse(reader.Value as string, CultureInfo.InvariantCulture); reader.Read(); var value = reader.Value as double?; percentileItems.Add(new PercentileItem() { Percentile = percentile, Value = value.GetValueOrDefault(0) }); reader.Read(); } metric.Items = percentileItems; return metric; }
private IAggregation ReadAggregation(JsonReader reader, JsonSerializer serializer) { if (reader.TokenType != JsonToken.StartObject) return null; reader.Read(); if (reader.TokenType != JsonToken.PropertyName) return null; var property = reader.Value as string; if (_numeric.IsMatch(property)) return GetPercentilesMetricAggregation(reader, serializer); switch (property) { case "values": reader.Read(); reader.Read(); return GetPercentilesMetricAggregation(reader, serializer); case "value": return GetValueMetricOrAggregation(reader, serializer); case "buckets": return GetBucketAggregation(reader, serializer); case "key": return GetKeyedBucketItem(reader, serializer); case "from": case "to": return GetRangeAggregation(reader, serializer); case "key_as_string": return GetDateHistogramAggregation(reader, serializer); case "count": return GetStatsAggregation(reader, serializer); case "doc_count": return GetSingleBucketAggregation(reader, serializer); default: return null; } }
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { var filter = new GeoBoundingBoxFilterDescriptor(); IGeoBoundingBoxFilter f = filter; if (reader.TokenType != JsonToken.StartObject) return null; var depth = reader.Depth; while (reader.Read() && reader.Depth >= depth && reader.Value != null) { var property = reader.Value as string; switch(property) { case "_cache": reader.Read(); f.Cache = reader.Value as bool?; break; case "_name": reader.Read(); f.FilterName = reader.Value as string; break; case "_cache_key": reader.Read(); f.CacheKey = reader.Value as string; break; case "type": reader.Read(); f.GeoExecution = (reader.Value as string).ToEnum<GeoExecution>(); break; default: f.Field = property; ReadBox(f, reader); break; } } return filter; }
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { reader.Read(); if (reader.TokenType != JsonToken.PropertyName) return null; var key = reader.Value as string; reader.Read(); switch (key) { case "delete": var deleteItem = new BulkDeleteResponseItem(); serializer.Populate(reader, deleteItem); if (deleteItem != null) deleteItem.Operation = key; reader.Read(); return deleteItem; case "update": var updateItem = new BulkUpdateResponseItem(); serializer.Populate(reader, updateItem); if (updateItem != null) updateItem.Operation = key; reader.Read(); return updateItem; case "index": var indexItem = new BulkIndexResponseItem(); serializer.Populate(reader, indexItem); if (indexItem != null) indexItem.Operation = key; reader.Read(); return indexItem; case "create": var createItem = new BulkCreateResponseItem(); serializer.Populate(reader, createItem); if (createItem != null) createItem.Operation = key; reader.Read(); return createItem; } return null; }
public IAggregation GetRangeAggregation(JsonReader reader, JsonSerializer serializer, string key = null) { string fromAsString = null, toAsString = null; long? docCount = null; double? toDouble = null, fromDouble = null; var readExpectedProperty = true; while (readExpectedProperty) { switch (reader.Value as string) { case "from": reader.Read(); if (reader.ValueType == typeof (double)) fromDouble = (double) reader.Value; reader.Read(); break; case "to": reader.Read(); if (reader.ValueType == typeof (double)) toDouble = (double) reader.Value; reader.Read(); break; case "key": reader.Read(); key = reader.Value as string; reader.Read(); break; case "from_as_string": reader.Read(); fromAsString = reader.Value as string; reader.Read(); break; case "to_as_string": reader.Read(); toAsString = reader.Value as string; reader.Read(); break; case "doc_count": reader.Read(); docCount = (reader.Value as long?).GetValueOrDefault(0); reader.Read(); break; default: readExpectedProperty = false; break; } } var bucket = new RangeItem { Key = key, From = fromDouble, To = toDouble, DocCount = docCount.GetValueOrDefault(), FromAsString = fromAsString, ToAsString = toAsString }; bucket.Aggregations = this.GetNestedAggregations(reader, serializer); return bucket; }
private IAggregation GetValueMetricOrAggregation(JsonReader reader, JsonSerializer serializer) { reader.Read(); var metric = new ValueMetric() { Value = (reader.Value as double?) }; if (metric.Value == null && reader.ValueType == typeof(long)) metric.Value = reader.Value as long?; reader.Read(); return metric; }
private IAggregation GetBucketAggregation(JsonReader reader, JsonSerializer serializer) { var bucket = new Bucket(); var aggregations = new List<IAggregation>(); reader.Read(); if (reader.TokenType != JsonToken.StartArray) return null; reader.Read(); //move from start array to start object if (reader.TokenType == JsonToken.EndArray) { reader.Read(); bucket.Items = Enumerable.Empty<IAggregation>(); return bucket; } do { var agg = this.ReadAggregation(reader, serializer); aggregations.Add(agg); reader.Read(); } while (reader.TokenType != JsonToken.EndArray); bucket.Items = aggregations; reader.Read(); return bucket; }
private void WriteConstructorDate(JsonReader reader) { if (!reader.Read()) throw JsonWriterException.Create(this, "Unexpected end when reading date constructor.", null); if (reader.TokenType != JsonToken.Integer) throw JsonWriterException.Create(this, "Unexpected token when reading date constructor. Expected Integer, got " + reader.TokenType, null); long ticks = (long)reader.Value; DateTime date = DateTimeUtils.ConvertJavaScriptTicksToDateTime(ticks); if (!reader.Read()) throw JsonWriterException.Create(this, "Unexpected end when reading date constructor.", null); if (reader.TokenType != JsonToken.EndConstructor) throw JsonWriterException.Create(this, "Unexpected token when reading date constructor. Expected EndConstructor, got " + reader.TokenType, null); WriteValue(date); }
private IAggregation GetKeyedBucketItem(JsonReader reader, JsonSerializer serializer) { reader.Read(); var key = reader.Value; reader.Read(); var property = reader.Value as string; if (property == "from" || property == "to") return GetRangeAggregation(reader, serializer, key.ToString()); var keyItem = new KeyItem(); keyItem.Key = key.ToString(); if (property == "key_as_string") { // Skip key_as_string property reader.Read(); reader.Read(); } reader.Read(); //doc_count; var docCount = reader.Value as long?; keyItem.DocCount = docCount.GetValueOrDefault(0); reader.Read(); var nextProperty = reader.Value as string; if (nextProperty == "score") { return GetSignificantTermItem(reader, serializer, keyItem); } keyItem.Aggregations = this.GetNestedAggregations(reader, serializer); return keyItem; }
private IAggregation GetDateHistogramAggregation(JsonReader reader, JsonSerializer serializer) { reader.Read(); var keyAsString = reader.Value as string; reader.Read(); reader.Read(); var key = (reader.Value as long?).GetValueOrDefault(0); reader.Read(); reader.Read(); var docCount = (reader.Value as long?).GetValueOrDefault(0); reader.Read(); var dateHistogram = new HistogramItem() {Key = key, KeyAsString = keyAsString, DocCount = docCount}; dateHistogram.Aggregations = this.GetNestedAggregations(reader, serializer); return dateHistogram; }
private IAggregation GetStatsAggregation(JsonReader reader, JsonSerializer serializer) { reader.Read(); var count = (reader.Value as long?).GetValueOrDefault(0); reader.Read(); reader.Read(); var min = (reader.Value as double?); reader.Read(); reader.Read(); var max = (reader.Value as double?); reader.Read(); reader.Read(); var average = (reader.Value as double?); reader.Read(); reader.Read(); var sum = (reader.Value as double?); reader.Read(); if (reader.TokenType == JsonToken.EndObject) return new StatsMetric() { Average = average, Count = count, Max = max, Min = min, Sum = sum }; reader.Read(); var sumOfSquares = (reader.Value as double?); reader.Read(); reader.Read(); var variance = (reader.Value as double?); reader.Read(); reader.Read(); var stdVariation = (reader.Value as double?); reader.Read(); return new ExtendedStatsMetric() { Average = average, Count = count, Max = max, Min = min, StdDeviation = stdVariation, Sum = sum, SumOfSquares = sumOfSquares, Variance = variance }; }
private void ReadTerms(ITermsQuery termsQuery, JsonReader reader) { reader.Read(); if (reader.TokenType == JsonToken.StartObject) { var ef = new ExternalFieldDeclaration(); var depth = reader.Depth; while (reader.Read() && reader.Depth >= depth && reader.Value != null) { var property = reader.Value as string; switch (property) { case "id": reader.Read(); ef.Id = reader.Value as string; break; case "index": reader.Read(); ef.Index = reader.Value as string; break; case "type": reader.Read(); ef.Type = reader.Value as string; break; case "path": reader.Read(); ef.Path = reader.Value as string; break; } } termsQuery.ExternalField = ef; } else if (reader.TokenType == JsonToken.StartArray) { var values = JArray.Load(reader).Values<string>(); termsQuery.Terms = values; } }
private IAggregation GetSingleBucketAggregation(JsonReader reader, JsonSerializer serializer) { reader.Read(); var docCount = (reader.Value as long?).GetValueOrDefault(0); var bucket = new SingleBucket() {DocCount = docCount}; reader.Read(); if (reader.TokenType == JsonToken.PropertyName && ((string)reader.Value) == "buckets" ) { var b = this.GetBucketAggregation(reader, serializer) as Bucket; return new BucketWithDocCount() { DocCount = docCount, Items = b.Items }; } bucket.Aggregations = this.GetNestedAggregations(reader, serializer); return bucket; }
private void ReadBox(IGeoBoundingBoxFilter filter, JsonReader reader) { var c = CultureInfo.InvariantCulture; reader.Read(); if (reader.TokenType != JsonToken.StartObject) return; reader.Read(); var firstProperty = reader.Value as string; if (firstProperty == "top_left") { reader.Read(); if (reader.ValueType == typeof(string)) { filter.TopLeft = reader.Value as string; reader.Read(); reader.Read(); filter.BottomRight = reader.Value as string; } else if (reader.TokenType == JsonToken.StartArray) { var values = JArray.Load(reader).Values<double>(); filter.TopLeft = string.Join(", ", values.Select(v=>v.ToString(c))); reader.Read(); reader.Read(); values = JArray.Load(reader).Values<double>(); filter.BottomRight =string.Join(", ", values.Select(v=>v.ToString(c))); } else if (reader.TokenType == JsonToken.StartObject) { var latlon = JObject.Load(reader).ToObject<LatLon>(); filter.TopLeft = "{0}, {1}".F(latlon.Lon, latlon.Lat); reader.Read(); reader.Read(); latlon = JObject.Load(reader).ToObject<LatLon>(); filter.BottomRight = "{0}, {1}".F(latlon.Lon, latlon.Lat); } } //vertices else if (firstProperty == "top") { reader.Read(); var top = reader.Value as double?; reader.Read(); reader.Read(); var left = reader.Value as double?; reader.Read(); reader.Read(); var bottom = reader.Value as double?; reader.Read(); reader.Read(); var right = reader.Value as double?; filter.TopLeft = "{0}, {1}".F(top, left); filter.BottomRight = "{0}, {1}".F(bottom, right); } reader.Read(); }
private IAggregation GetSignificantTermItem(JsonReader reader, JsonSerializer serializer, KeyItem keyItem) { reader.Read(); var score = reader.Value as double?; reader.Read(); reader.Read(); var bgCount = reader.Value as long?; var significantTermItem = new SignificantTermItem() { Key = keyItem.Key, DocCount = keyItem.DocCount, BgCount = bgCount.GetValueOrDefault(0), Score = score.GetValueOrDefault(0) }; reader.Read(); significantTermItem.Aggregations = this.GetNestedAggregations(reader, serializer); return significantTermItem; }
private IDictionary<string, IAggregation> GetNestedAggregations(JsonReader reader, JsonSerializer serializer) { if (reader.TokenType != JsonToken.PropertyName) return null; var nestedAggs = new Dictionary<string, IAggregation>(); var currentDepth = reader.Depth; do { var propertyName = reader.Value as string; reader.Read(); var agg = this.ReadAggregation(reader, serializer); nestedAggs.Add(propertyName, agg); reader.Read(); if (reader.Depth == currentDepth && reader.TokenType == JsonToken.EndObject || reader.Depth < currentDepth) break; } while (true); return nestedAggs; }
internal void WriteToken(JsonReader reader, int initialDepth, bool writeChildren, bool writeDateConstructorAsDate) { do { switch (reader.TokenType) { case JsonToken.None: // read to next break; case JsonToken.StartObject: WriteStartObject(); break; case JsonToken.StartArray: WriteStartArray(); break; case JsonToken.StartConstructor: string constructorName = reader.Value.ToString(); // write a JValue date when the constructor is for a date if (writeDateConstructorAsDate && string.Equals(constructorName, "Date", StringComparison.Ordinal)) WriteConstructorDate(reader); else WriteStartConstructor(reader.Value.ToString()); break; case JsonToken.PropertyName: WritePropertyName(reader.Value.ToString()); break; case JsonToken.Comment: WriteComment((reader.Value != null) ? reader.Value.ToString() : null); break; case JsonToken.Integer: #if !(NET20 || NET35 || PORTABLE || PORTABLE40) if (reader.Value is BigInteger) { WriteValue((BigInteger)reader.Value); } else #endif { WriteValue(Convert.ToInt64(reader.Value, CultureInfo.InvariantCulture)); } break; case JsonToken.Float: object value = reader.Value; if (value is decimal) WriteValue((decimal)value); else if (value is double) WriteValue((double)value); else if (value is float) WriteValue((float)value); else WriteValue(Convert.ToDouble(value, CultureInfo.InvariantCulture)); break; case JsonToken.String: WriteValue(reader.Value.ToString()); break; case JsonToken.Boolean: WriteValue(Convert.ToBoolean(reader.Value, CultureInfo.InvariantCulture)); break; case JsonToken.Null: WriteNull(); break; case JsonToken.Undefined: WriteUndefined(); break; case JsonToken.EndObject: WriteEndObject(); break; case JsonToken.EndArray: WriteEndArray(); break; case JsonToken.EndConstructor: WriteEndConstructor(); break; case JsonToken.Date: #if !NET20 if (reader.Value is DateTimeOffset) WriteValue((DateTimeOffset)reader.Value); else #endif WriteValue(Convert.ToDateTime(reader.Value, CultureInfo.InvariantCulture)); break; case JsonToken.Raw: WriteRawValue((reader.Value != null) ? reader.Value.ToString() : null); break; case JsonToken.Bytes: WriteValue((byte[])reader.Value); break; default: throw MiscellaneousUtils.CreateArgumentOutOfRangeException("TokenType", reader.TokenType, "Unexpected token type."); } } while ( // stop if we have reached the end of the token being read initialDepth - 1 < reader.Depth - (IsEndToken(reader.TokenType) ? 1 : 0) && writeChildren && reader.Read()); }