public string Render() { var p = new Dictionary <string, string>(); var l = new Cfg.Net.Loggers.MemoryLogger(); // get template _context.Debug(() => $"Reading {_template.File}"); var templateContent = _templateReader.Read(_template.File, p, l); if (l.Errors().Any()) { foreach (var error in l.Errors()) { _context.Error(error); } return(string.Empty); } // get parameters (other than process) var parameters = new ExpandoObject(); foreach (var parameter in _template.Parameters) { ((IDictionary <string, object>)parameters).Add(parameter.Name, parameter.Value); } if (p.Any()) { foreach (var parameter in p) { ((IDictionary <string, object>)parameters)[parameter.Key] = parameter.Value; } } try { _context.Debug(() => $"Compiling {_template.Name}."); return(_service.RunCompile(templateContent, _template.Name, typeof(SolrTemplateModel), new SolrTemplateModel { Context = _context, Process = _context.Process, Parameters = parameters })); } catch (TemplateCompilationException ex) { _context.Error($"Error parsing template {_template.Name}."); _context.Error($"There are {ex.CompilerErrors.Count} errors."); foreach (var error in ex.CompilerErrors) { _context.Error(error.ErrorText); } Utility.CodeToError(_context, ex.SourceCode); return(string.Empty); } }
public IEnumerable <IRow> Read() { using (var cn = _cf.GetConnection()) { cn.Open(); var cmd = cn.CreateCommand(); cmd.CommandTimeout = 0; cmd.CommandType = CommandType.Text; cmd.CommandText = $@" SELECT {string.Join(",", _fields.Select(f => _readFrom == ReadFrom.Output ? _cf.Enclose(f.FieldName()) : _cf.Enclose(f.Name)))} FROM {_schemaPrefix}{_cf.Enclose(_tableOrView)} {(_connection.Provider == "sqlserver" && _context.Entity.NoLock ? "WITH (NOLOCK)" : string.Empty)} {_filter};"; _context.Debug(() => cmd.CommandText); IDataReader reader; try { reader = cmd.ExecuteReader(CommandBehavior.SequentialAccess); } catch (System.Data.Common.DbException e) { _context.Error($"Error reading data from {_connection.Name}, {_tableOrView}."); _context.Error(e.Message); yield break; } if (_context.Connection.Buffer) { var buffer = new List <IRow>(); while (reader.Read()) { _rowCount++; buffer.Add(_rowCreator.Create(reader, _fields)); } foreach (var row in buffer) { yield return(row); } } else { while (reader.Read()) { _rowCount++; yield return(_rowCreator.Create(reader, _fields)); } } _context.Info("{0} from {1}", _rowCount, _connection.Name); } }
public void Write(IEnumerable <IRow> rows) { var l = new Cfg.Net.Loggers.MemoryLogger(); _output.Debug(() => $"Loading template {_output.Connection.Template}"); var template = _templateReader.Read(_output.Connection.Template, new Dictionary <string, string>(), l); if (l.Errors().Any()) { foreach (var error in l.Errors()) { _output.Error(error); } } else { using (var service = RazorEngineService.Create(_config)) { //File.WriteAllText(_output.Connection.File, service.RunCompile(template, _output.Connection.Name, typeof(RazorModel), new RazorModel(_output.Process, _output.Entity, rows))); using (var file = new StreamWriter(_output.Connection.File)) { service.RunCompile(template, _output.Connection.Name, file, typeof(RazorModel), new RazorModel(_output.Process, _output.Entity, rows)); } // the template must set Model.Entity.Inserts } } }
public IEnumerable<Entity> GetEntities() { var response = _client.IndicesGetMapping<DynamicResponse>(_index, "_all"); if (response.Success) { var mappings = response.Body[_index]["mappings"] as ElasticsearchDynamicValue; if (mappings != null && mappings.HasValue) { var types = mappings.Value as IDictionary<string, object>; if (types != null) { foreach (var pair in types) { var e = new Entity { Name = pair.Key }.WithDefaults(); var attributes = pair.Value as IDictionary<string, object>; if (attributes != null && attributes.ContainsKey("properties")) { e.Fields = PropertiesToFields(pair.Key, attributes["properties"] as IDictionary<string, object>).ToList(); } else { _input.Error("Could not find properties for index {0} type {1}.", _input, pair.Key); } yield return e; } } else { _input.Error("Could not find types in index {0}.", _index); } } else { _input.Error("Could not find mappings for index {0}.", _index); } } else { _input.Error(response.ToString()); } }
public IEnumerable <Field> GetFields(string name) { var version = ElasticVersionParser.ParseVersion(_input); ElasticsearchResponse <DynamicResponse> response; if (version.Major >= 7) { response = _client.IndicesGetMapping <DynamicResponse>(_index, name, qs => qs.AddQueryString("include_type_name", "true")); } else { response = _client.IndicesGetMapping <DynamicResponse>(_index, name); } if (response.Success) { var properties = response.Body[_index]["mappings"][name]["properties"] as ElasticsearchDynamicValue; if (properties != null && properties.HasValue) { return(PropertiesToFields(name, properties.Value as IDictionary <string, object>)); } _input.Error("Could not find properties for index {0} type {1}.", _index, name); } else { _input.Error(response.ToString()); } return(Enumerable.Empty <Field>()); }
private IEnumerable <Field> GetFields(string name, string query, string schema) { var fields = new List <Field>(); using (var cn = _cf.GetConnection()) { cn.Open(); DataTable table = null; var cmd = cn.CreateCommand(); try { cmd.CommandText = query == string.Empty ? $"SELECT * FROM {(string.IsNullOrEmpty(schema) ? string.Empty : _cf.Enclose(schema) + ".")}{_cf.Enclose(name)} WHERE 1=2;" : query; var reader = cmd.ExecuteReader(CommandBehavior.KeyInfo | CommandBehavior.SchemaOnly); table = reader.GetSchemaTable(); } catch (DbException ex) { _c.Error(ex.Message); } if (table == null) { return(fields); } foreach (DataRow row in table.Rows) { var column = row["ColumnName"].ToString(); var ordinal = Convert.ToInt16(row["ColumnOrdinal"]); var isHidden = row["IsHidden"] != DBNull.Value && Convert.ToBoolean(row["IsHidden"]); var dataType = row["DataType"] == DBNull.Value ? "string" : ((Type)row["DataType"]).Name.ToLower(); var isKey = row["IsKey"] != DBNull.Value && Convert.ToBoolean(row["IsKey"]); var field = fields.FirstOrDefault(f => f.Name.Equals(column, StringComparison.OrdinalIgnoreCase)); if (!isHidden) { if (field == null) { field = new Field { Name = column, Alias = column, Ordinal = ordinal, Type = dataType, PrimaryKey = isKey }; AddLengthAndPrecision(field, row); fields.Add(field); } else { field.Type = dataType; AddLengthAndPrecision(field, row); } } } } return(fields); }
public void Write(IEnumerable <IRow> rows) { var l = new Cfg.Net.Loggers.MemoryLogger(); _output.Debug(() => $"Loading template {_output.Connection.Template}"); var template = _templateReader.Read(_output.Connection.Template, new Dictionary <string, string>(), l); template = Regex.Replace(template, "^@model .+$", string.Empty, RegexOptions.Multiline); if (l.Errors().Any()) { foreach (var error in l.Errors()) { _output.Error(error); } } else { var engine = new RazorEngine(); IRazorEngineCompiledTemplate <RazorEngineTemplateBase <RazorModel> > compiledTemplate; try { compiledTemplate = engine.Compile <RazorEngineTemplateBase <RazorModel> >(template, builder => { builder.AddAssemblyReference(typeof(Configuration.Process)); builder.AddAssemblyReference(typeof(Cfg.Net.CfgNode)); builder.AddAssemblyReferenceByName("System.Collections"); }); // doesn't appear to be a way to stream output yet (in this library), so will just write to string and then file var output = compiledTemplate.Run(instance => { instance.Model = new RazorModel() { Process = _output.Process, Entity = _output.Entity, Rows = rows }; }); File.WriteAllText(_output.Connection.File, output); } catch (RazorEngineCompilationException ex) { foreach (var error in ex.Errors) { var line = error.Location.GetLineSpan(); _output.Error($"C# error on line {line.StartLinePosition.Line}, column {line.StartLinePosition.Character}."); _output.Error(error.GetMessage()); } _output.Error(ex.Message.Replace("{", "{{").Replace("}", "}}")); Utility.CodeToError(_output, template); } catch (System.AggregateException ex) { _output.Error(ex.Message.Replace("{", "{{").Replace("}", "}}")); foreach (var error in ex.InnerExceptions) { _output.Error(error.Message.Replace("{", "{{").Replace("}", "}}")); } Utility.CodeToError(_output, template); } // the template must set Model.Entity.Inserts } }
public Batch Read(IEnumerable <IRow> input) { var batch = new Batch(); using (var cn = _cf.GetConnection()) { cn.Open(); _context.Debug(() => "begin transaction"); var trans = cn.BeginTransaction(); try { var createSql = SqlCreateKeysTable(_tempTable); cn.Execute(createSql, null, trans); var index = 0; var keys = new List <ExpandoObject>(); foreach (var row in input) { var obj = row.ToExpandoObject(_keys); ((IDictionary <string, object>)obj)["TflIndex"] = index; keys.Add(obj); ++index; } var insertSql = SqlInsertTemplate(_context, _tempTable, _keys); cn.Execute(insertSql, keys, trans, 0, System.Data.CommandType.Text); var i = _fields.Length; using (var reader = cn.ExecuteReader(SqlQuery(), null, trans, 0, System.Data.CommandType.Text)) { while (reader.Read()) { batch[reader.GetInt32(i)] = _rowCreator.Create(reader, _fields); } } var sqlDrop = SqlDrop(_tempTable); cn.Execute(sqlDrop, null, trans); _context.Debug(() => "commit transaction"); trans.Commit(); } catch (Exception ex) { _context.Error(ex.Message); _context.Warn("rollback transaction"); trans.Rollback(); } } return(batch); }
public IEnumerable <Field> GetFields(string name) { var response = _client.IndicesGetMapping <DynamicResponse>(_index, name); if (response.Success) { var properties = response.Body[_index]["mappings"][name]["properties"] as ElasticsearchDynamicValue; if (properties != null && properties.HasValue) { return(PropertiesToFields(name, properties.Value as IDictionary <string, object>)); } _input.Error("Could not find properties for index {0} type {1}.", _index, name); } else { _input.Error(response.ToString()); } return(Enumerable.Empty <Field>()); }
public IEnumerable <IRow> Read(IEnumerable <IRow> input) { var results = new List <IRow>(); using (var cn = _cf.GetConnection()) { cn.Open(); _context.Debug(() => "begin transaction"); var trans = cn.BeginTransaction(); try { var createSql = SqlCreateKeysTable(_tempTable); cn.Execute(createSql, null, trans); var keys = input.Select(r => r.ToExpandoObject(_keys)); var insertSql = SqlInsertTemplate(_context, _tempTable, _keys); cn.Execute(insertSql, keys, trans, 0, System.Data.CommandType.Text); using (var reader = cn.ExecuteReader(SqlQuery(), null, trans, 0, System.Data.CommandType.Text)) { while (reader.Read()) { var row = _rowCreator.Create(reader, _fields); results.Add(row); } } var sqlDrop = SqlDrop(_tempTable, _cf); cn.Execute(sqlDrop, null, trans); _context.Debug(() => "commit transaction"); trans.Commit(); } catch (Exception ex) { _context.Error(ex.Message); _context.Warn("rollback transaction"); trans.Rollback(); } } return(results); }
public void Write(IEnumerable <IRow> rows) { var l = new Cfg.Net.Loggers.MemoryLogger(); _output.Debug(() => $"Loading template {_output.Connection.Template}"); var template = _templateReader.Read(_output.Connection.Template, new Dictionary <string, string>(), l); if (l.Errors().Any()) { foreach (var error in l.Errors()) { _output.Error(error); } } else { var context = new VelocityContext(); context.Put("Model", new VelocityModel(_output.Process, _output.Entity, rows)); using (var file = new StreamWriter(_output.Connection.File)) { NVelocity.App.Velocity.Evaluate(context, file, RuntimeConstants.RUNTIME_LOG_LOGSYSTEM, template); } } }
public string Create() { var identifier = Utility.Identifier(_fileInfo.Name.Replace(_fileInfo.Extension, string.Empty)); var quoted = _fileInfo.Extension.ToLower() == ".csv"; var lines = new FileLineReader(_fileInfo, _lines).Read().ToArray(); var delimiters = _context.Connection.Delimiters.Any() ? _context.Connection.Delimiters : new List <Delimiter> { new Delimiter { Character = (_context.Connection.Delimiter.Length == 0 ? ',' : _context.Connection.Delimiter[0]), Name = "Delimiter" } }; var delimiter = Utility.FindDelimiter(lines, delimiters, quoted); var values = lines.First() .SplitLine(delimiter, quoted) .Select(c => c.Trim('"')) .Select(c => c.Trim()) .ToArray(); // substitute blank headers with excel column names (useful when some of the column headers are blank) for (var i = 0; i < values.Length; i++) { if (values[i] == string.Empty) { values[i] = Utility.GetExcelName(i); } } var hasColumnNames = ColumnNames.AreValid(_context, values); var fieldNames = hasColumnNames ? values : ColumnNames.Generate(values.Length).ToArray(); var connection = new Connection { Name = "input", Provider = "file", File = _fileInfo.FullName, Delimiter = delimiter == default(char) ? "," : delimiter.ToString(), Start = hasColumnNames ? 2 : 1, Types = _context.Connection.Types }; var process = new Process { Name = "FileInspector", ReadOnly = true, Connections = new List <Connection> { connection } }; process.Entities.Add(new Entity { Name = identifier, Input = "input", PrependProcessNameToOutputName = false, Sample = Convert.ToInt32(_context.Connection.Sample) }); foreach (var name in fieldNames) { process.Entities[0].Fields.Add(new Field { Name = name, Alias = Constants.InvalidFieldNames.Contains(name) ? identifier + name : name, Length = "max" }); } process.Load(); foreach (var warning in process.Warnings()) { _context.Warn(warning); } foreach (var error in process.Errors()) { _context.Error(error); } return(process.Serialize()); }
public IRow Create(IDataReader reader, Field[] fields) { var row = _rowFactory.Create(); if (_fieldCount == 0) { _fieldCount = Math.Min(reader.FieldCount, fields.Length); _conversions = new List <Func <object, object> >(_fieldCount); for (var i = 0; i < _fieldCount; i++) { _conversions.Add(null); } _errors = new bool[fields.Length]; for (var i = 0; i < _fieldCount; i++) { var inputType = reader.GetFieldType(i); _errors[i] = inputType != _typeMap[fields[i].Type]; if (_errors[i]) { if (fields[i].Transforms.Any() && fields[i].Transforms.First().Method == "convert") { _conversions[i] = o => o; // the user has set a conversion } else { _conversions[i] = fields[i].Convert; _context.Warn("Type mismatch for {0}. Expected {1}, but read {2}. Change type or add conversion.", fields[i].Name, fields[i].Type, inputType); } } else { _conversions[i] = o => o; } } for (var i = 0; i < _fieldCount; i++) { if (reader.IsDBNull(i)) { continue; } if (_errors[i]) { var value = reader.GetValue(i); try { row[fields[i]] = fields[i].Type == "object" ? value : _conversions[i](value); } catch (FormatException) { _context.Error($"Could not convert value {value} in field {fields[i].Alias} to {fields[i].Type}"); } } else { row[fields[i]] = reader.GetValue(i); } } } else { for (var i = 0; i < _fieldCount; i++) { if (reader.IsDBNull(i)) { continue; } if (_errors[i]) { row[fields[i]] = fields[i].Type == "object" ? reader.GetValue(i) : _conversions[i](reader.GetValue(i)); } else { row[fields[i]] = reader.GetValue(i); } } } return(row); }
public IEnumerable <IRow> Read() { ElasticsearchResponse <DynamicResponse> response; ElasticsearchDynamicValue hits; var from = 1; var size = 10; string body; if (_context.Entity.IsPageRequest()) { from = (_context.Entity.Page * _context.Entity.PageSize) - _context.Entity.PageSize; body = WriteQuery(_fields, _readFrom, _context, from, _context.Entity.PageSize); } else { body = WriteQuery(_fields, _readFrom, _context, 0, 0); response = _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body); if (response.Success) { hits = response.Body["hits"] as ElasticsearchDynamicValue; if (hits != null && hits.HasValue) { var properties = hits.Value as IDictionary <string, object>; if (properties != null && properties.ContainsKey("total")) { size = Convert.ToInt32(properties["total"]); body = WriteQuery(_fields, _readFrom, _context, 0, size > 10000 ? 10000 : size); } } } } _context.Debug(() => body); _context.Entity.Query = body; // move 10000 to configurable limit response = from + size > 10000 ? _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body, p => p.Scroll(TimeSpan.FromMinutes(1.0))) : _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body); if (!response.Success) { LogError(response); yield break; } _context.Entity.Hits = Convert.ToInt32((response.Body["hits"]["total"] as ElasticsearchDynamicValue).Value); hits = response.Body["hits"]["hits"] as ElasticsearchDynamicValue; if (hits == null || !hits.HasValue) { _context.Warn("No hits from elasticsearch"); yield break; } var docs = hits.Value as IList <object>; if (docs == null) { _context.Error("No documents returned from elasticsearch!"); yield break; } // if any of the fields do not exist, yield break if (docs.Count > 0) { var doc = docs.First() as IDictionary <string, object>; var source = doc?["_source"] as IDictionary <string, object>; if (source == null) { _context.Error("Missing _source from elasticsearch response!"); yield break; } for (var i = 0; i < _fields.Length; i++) { if (source.ContainsKey(_fieldNames[i])) { continue; } _context.Error($"Field {_fieldNames[i]} does not exist!"); yield break; } } var count = 0; foreach (var d in docs) { var doc = (IDictionary <string, object>)d; var row = _rowFactory.Create(); var source = (IDictionary <string, object>)doc["_source"]; for (var i = 0; i < _fields.Length; i++) { row[_fields[i]] = _fields[i].Convert(source[_fieldNames[i]]); } _context.Increment(); yield return(row); } count += docs.Count; // get this from first search response (maybe), unless you have to aggregate it from all... foreach (var filter in _context.Entity.Filter.Where(f => f.Type == "facet" && !string.IsNullOrEmpty(f.Map))) { var map = _context.Process.Maps.First(m => m.Name == filter.Map); var buckets = response.Body["aggregations"][filter.Key]["buckets"] as ElasticsearchDynamicValue; if (buckets == null || !buckets.HasValue) { continue; } var items = buckets.Value as IEnumerable <object>; if (items == null) { continue; } foreach (var item in items.OfType <IDictionary <string, object> >()) { map.Items.Add(new MapItem { From = $"{item["key"]} ({item["doc_count"]})", To = item["key"] }); } } if (!response.Body.ContainsKey("_scroll_id")) { yield break; } if (size == count) { _client.ClearScroll <DynamicResponse>(new PostData <object>(new { scroll_id = response.Body["_scroll_id"].Value })); yield break; } var scrolls = new HashSet <string>(); do { var scrollId = response.Body["_scroll_id"].Value; scrolls.Add(scrollId); response = _client.Scroll <DynamicResponse>(new PostData <object>(new { scroll = "1m", scroll_id = scrollId })); if (response.Success) { docs = (IList <object>)response.Body["hits"]["hits"].Value; foreach (var d in docs) { var doc = (IDictionary <string, object>)d; var row = _rowFactory.Create(); var source = (IDictionary <string, object>)doc["_source"]; for (var i = 0; i < _fields.Length; i++) { row[_fields[i]] = _fields[i].Convert(source[_fieldNames[i]]); } _context.Increment(); yield return(row); } count += docs.Count; } else { LogError(response); } } while (response.Success && count < size); _client.ClearScroll <DynamicResponse>(new PostData <object>(new { scroll_id = scrolls.ToArray() })); }
public IEnumerable <IRow> Read() { ElasticsearchResponse <DynamicResponse> response; string body; if (_context.Entity.IsPageRequest()) { var from = (_context.Entity.Page * _context.Entity.PageSize) - _context.Entity.PageSize; body = WriteQuery(_fields, _readFrom, _context, from, _context.Entity.PageSize); } else { body = WriteQuery(_fields, _readFrom, _context, 0, 0); response = _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body); if (response.Success) { var hits = response.Body["hits"] as ElasticsearchDynamicValue; if (hits != null && hits.HasValue) { var properties = hits.Value as IDictionary <string, object>; if (properties != null && properties.ContainsKey("total")) { var size = Convert.ToInt32(properties["total"]) + 1; body = WriteQuery(_fields, _readFrom, _context, 0, size); } } } } _context.Debug(() => body); _context.Entity.Query = body; response = _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body); if (response.Success) { _context.Entity.Hits = Convert.ToInt32((response.Body["hits"]["total"] as ElasticsearchDynamicValue).Value); var hits = response.Body["hits"]["hits"] as ElasticsearchDynamicValue; if (hits != null && hits.HasValue) { var docs = hits.Value as IEnumerable <object>; if (docs != null) { foreach (var doc in docs.OfType <IDictionary <string, object> >()) { var row = _rowFactory.Create(); if (doc != null && doc.ContainsKey("_source")) { var source = doc["_source"] as IDictionary <string, object>; if (source != null) { for (var i = 0; i < _fields.Length; i++) { var field = _fields[i]; row[field] = field.Convert(source[_fieldNames[i]]); } } } _context.Increment(); yield return(row); } } } foreach (var filter in _context.Entity.Filter.Where(f => f.Type == "facet" && !string.IsNullOrEmpty(f.Map))) { var map = _context.Process.Maps.First(m => m.Name == filter.Map); var buckets = response.Body["aggregations"][filter.Key]["buckets"] as ElasticsearchDynamicValue; if (buckets == null || !buckets.HasValue) { continue; } var items = buckets.Value as IEnumerable <object>; if (items == null) { continue; } foreach (var item in items.OfType <IDictionary <string, object> >()) { map.Items.Add(new MapItem { From = $"{item["key"]} ({item["doc_count"]})", To = item["key"] }.WithDefaults()); } } } else { _context.Error(response.DebugInformation); } }
public IEnumerable <IRow> Read() { ElasticsearchResponse <DynamicResponse> response; ElasticsearchDynamicValue hits; var from = 0; var size = 10; string body; bool warned = false; var scroll = !_context.Entity.IsPageRequest(); if (!scroll) { from = (_context.Entity.Page * _context.Entity.Size) - _context.Entity.Size; body = WriteQuery(_fields, _readFrom, _context, scroll: false, from: from, size: _context.Entity.Size); } else { body = WriteQuery(_fields, _readFrom, _context, scroll: false, from: 0, size: 0); response = _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body); if (response.Success) { hits = response.Body["hits"] as ElasticsearchDynamicValue; if (hits != null && hits.HasValue) { var total = hits["total"]; try { if (_version.Major >= 7) // version 7 changed total to an object with "value" and "relation" properties { size = Convert.ToInt32(total["value"].Value); } else { size = Convert.ToInt32(total.Value); } } catch (Exception ex) { warned = true; _context.Debug(() => total); _context.Warn($"Could not get total number of matching documents from the elasticsearch response. Are you sure you using version {_version}?"); _context.Error(ex, ex.Message); } body = WriteQuery(_fields, _readFrom, _context, scroll: true, from: 0, size: size > ElasticsearchDefaultSizeLimit ? DefaultSize : size); } } } _context.Debug(() => body); _context.Entity.Query = body; response = scroll ? _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body, p => p.AddQueryString("scroll", _context.Connection.Scroll)) : _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body); if (!response.Success) { LogError(response); yield break; } try { if (_version.Major >= 7) // version 7 changed total to an object with "value" and "relation" properties { _context.Entity.Hits = Convert.ToInt32(response.Body["hits"]["total"]["value"].Value); } else { _context.Entity.Hits = Convert.ToInt32(response.Body["hits"]["total"].Value); } } catch (Exception ex) { if (!warned) { _context.Debug(() => response.Body["hits"]); _context.Warn($"Could not get total number of matching documents from the elasticsearch response. Are you sure you using version {_version}?"); _context.Error(ex.Message); } } hits = response.Body["hits"]["hits"] as ElasticsearchDynamicValue; if (hits == null || !hits.HasValue) { _context.Warn("No hits from elasticsearch"); yield break; } var docs = hits.Value as IList <object>; if (docs == null) { _context.Error("No documents returned from elasticsearch!"); yield break; } // if any of the fields do not exist, yield break if (docs.Count > 0) { var doc = docs.First() as IDictionary <string, object>; var source = doc?["_source"] as IDictionary <string, object>; if (source == null) { _context.Error("Missing _source from elasticsearch response!"); yield break; } for (var i = 0; i < _fields.Length; i++) { if (source.ContainsKey(_fieldNames[i])) { continue; } _context.Error($"Field {_fieldNames[i]} does not exist!"); yield break; } } var count = 0; foreach (var d in docs) { var doc = (IDictionary <string, object>)d; var row = _rowFactory.Create(); var source = (IDictionary <string, object>)doc["_source"]; for (var i = 0; i < _fields.Length; i++) { row[_fields[i]] = _fields[i].Convert(source[_fieldNames[i]]); } yield return(row); } count += docs.Count; // get this from first search response (maybe), unless you have to aggregate it from all... foreach (var filter in _context.Entity.Filter.Where(f => f.Type == "facet" && !string.IsNullOrEmpty(f.Map))) { var map = _context.Process.Maps.First(m => m.Name == filter.Map); var buckets = response.Body["aggregations"][filter.Key]["buckets"] as ElasticsearchDynamicValue; if (buckets == null || !buckets.HasValue) { continue; } var items = buckets.Value as IEnumerable <object>; if (items == null) { continue; } foreach (var item in items.OfType <IDictionary <string, object> >()) { map.Items.Add(new MapItem { From = $"{item["key"]} ({item["doc_count"]})", To = item["key"] }); } } if (!response.Body.ContainsKey("_scroll_id")) { yield break; } if (size == count) { _client.ClearScroll <DynamicResponse>(new PostData <object>(new { scroll_id = response.Body["_scroll_id"].Value })); yield break; } var scrolls = new HashSet <string>(); do { var scrollId = response.Body["_scroll_id"].Value; scrolls.Add(scrollId); response = _client.Scroll <DynamicResponse>(new PostData <object>(new { scroll = _context.Connection.Scroll, scroll_id = scrollId })); if (response.Success) { docs = (IList <object>)response.Body["hits"]["hits"].Value; foreach (var d in docs) { var doc = (IDictionary <string, object>)d; var row = _rowFactory.Create(); var source = (IDictionary <string, object>)doc["_source"]; for (var i = 0; i < _fields.Length; i++) { row[_fields[i]] = _fields[i].Convert(source[_fieldNames[i]]); } yield return(row); } count += docs.Count; } else { LogError(response); } } while (response.Success && count < size); _client.ClearScroll <DynamicResponse>(new PostData <object>(new { scroll_id = scrolls.ToArray() })); }