public FolderReader(IConnectionContext input, IRowFactory rowFactory) { var readers = new List <IRead>(); var searchOption = (SearchOption)Enum.Parse(typeof(SearchOption), input.Connection.SearchOption, true); input.Debug(() => $"Searching folder: {input.Connection.Folder}"); var files = new DirectoryInfo(input.Connection.Folder).GetFiles(input.Connection.SearchPattern, searchOption).OrderBy(f => f.CreationTime).ToArray(); input.Debug(() => $"Found {files.Length} files."); foreach (var file in files) { input.Debug(() => $"Found file: {file.Name}"); var context = new PipelineContext(input.Logger, input.Process, input.Entity, input.Field, input.Transform); var fileConnection = input.Connection.Clone(); fileConnection.Provider = "file"; fileConnection.File = file.FullName; var fileInput = new InputContext(context, new Incrementer(context)) { Connection = fileConnection }; if (file.Extension.ToLower().Contains("xls")) { readers.Add(new ExcelReader(fileInput, rowFactory)); } else { readers.Add(new DelimitedFileReader(fileInput, rowFactory)); } } _reader = new CompositeReader(readers); }
private string SqlDrop(string tempTable) { var sql = $"DROP TABLE {_cf.Enclose(tempTable)}"; _context.Debug(() => sql); return(sql); }
string SqlDrop(string tempTable, IConnectionFactory cf) { var sql = $"DROP TABLE {cf.Enclose(tempTable)}"; _context.Debug(() => sql); return(sql); }
public string Render() { var p = new Dictionary <string, string>(); var l = new Cfg.Net.Loggers.MemoryLogger(); // get template _context.Debug(() => $"Reading {_template.File}"); var templateContent = _templateReader.Read(_template.File, p, l); if (l.Errors().Any()) { foreach (var error in l.Errors()) { _context.Error(error); } return(string.Empty); } // get parameters (other than process) var parameters = new ExpandoObject(); foreach (var parameter in _template.Parameters) { ((IDictionary <string, object>)parameters).Add(parameter.Name, parameter.Value); } if (p.Any()) { foreach (var parameter in p) { ((IDictionary <string, object>)parameters)[parameter.Key] = parameter.Value; } } try { _context.Debug(() => $"Compiling {_template.Name}."); return(_service.RunCompile(templateContent, _template.Name, typeof(SolrTemplateModel), new SolrTemplateModel { Context = _context, Process = _context.Process, Parameters = parameters })); } catch (TemplateCompilationException ex) { _context.Error($"Error parsing template {_template.Name}."); _context.Error($"There are {ex.CompilerErrors.Count} errors."); foreach (var error in ex.CompilerErrors) { _context.Error(error.ErrorText); } Utility.CodeToError(_context, ex.SourceCode); return(string.Empty); } }
static string SqlDrop(IConnectionContext context, string tempTable) { var sql = string.Format("DROP TABLE #{0};", tempTable); context.Debug(sql); return(sql); }
static string SqlDrop(IConnectionContext context, string tempTable) { var sql = $"DROP TABLE #{tempTable};"; context.Debug(() => sql); return(sql); }
public void Write(IEnumerable <IRow> rows) { var l = new Cfg.Net.Loggers.MemoryLogger(); _output.Debug(() => $"Loading template {_output.Connection.Template}"); var template = _templateReader.Read(_output.Connection.Template, new Dictionary <string, string>(), l); if (l.Errors().Any()) { foreach (var error in l.Errors()) { _output.Error(error); } } else { using (var service = RazorEngineService.Create(_config)) { //File.WriteAllText(_output.Connection.File, service.RunCompile(template, _output.Connection.Name, typeof(RazorModel), new RazorModel(_output.Process, _output.Entity, rows))); using (var file = new StreamWriter(_output.Connection.File)) { service.RunCompile(template, _output.Connection.Name, file, typeof(RazorModel), new RazorModel(_output.Process, _output.Entity, rows)); } // the template must set Model.Entity.Inserts } } }
string SqlInsertTemplate(IConnectionContext context, string tempTable) { var sql = $"INSERT #{tempTable} VALUES ({string.Join(",", _keys.Select(k => "@" + k.FieldName()))});"; context.Debug(() => sql); return(sql); }
static string SqlInsertTemplate(IConnectionContext context, string tempTable, Field[] keys) { var sql = string.Format("INSERT #{0} VALUES ({1});", tempTable, string.Join(",", keys.Select(k => "@" + k.FieldName()))); context.Debug(sql); return(sql); }
static string SqlCreateKeysTable(IConnectionContext context, IConnectionFactory cf, string tempTable) { var columnsAndDefinitions = string.Join(",", context.Entity.GetPrimaryKey().Select(f => cf.Enclose(f.FieldName()) + " " + cf.SqlDataType(f) + " NOT NULL")); var sql = $@"CREATE TABLE #{tempTable}({columnsAndDefinitions})"; context.Debug(() => sql); return(sql); }
static string SqlQuery(Field[] keys, IConnectionContext context, string tempTable, Field hashCode) { var names = string.Join(",", keys.Select(f => "k.[" + f.FieldName() + "]")); var table = context.Entity.OutputTableName(context.Process.Name); var joins = string.Join(" AND ", keys.Select(f => "o.[" + f.FieldName() + "] = k.[" + f.FieldName() + "]")); var sql = string.Format("SELECT {0},o.[{1}] FROM #{2} k WITH (NOLOCK) INNER JOIN [{3}] o WITH (NOLOCK) ON ({4})", names, hashCode.FieldName(), tempTable, table, joins); context.Debug(sql); return sql; }
static string SqlCreateKeysTable(IConnectionContext context, string tempTable) { var columnsAndDefinitions = string.Join(",", context.Entity.GetPrimaryKey().Select(f => "[" + f.FieldName() + "] " + f.SqlDataType() + " NOT NULL")); var sql = string.Format(@"CREATE TABLE #{0}({1})", tempTable, columnsAndDefinitions); context.Debug(sql); return(sql); }
public void Write(IEnumerable <IRow> rows) { var l = new Cfg.Net.Loggers.MemoryLogger(); _output.Debug(() => $"Loading template {_output.Connection.Template}"); var template = _templateReader.Read(_output.Connection.Template, new Dictionary <string, string>(), l); template = Regex.Replace(template, "^@model .+$", string.Empty, RegexOptions.Multiline); if (l.Errors().Any()) { foreach (var error in l.Errors()) { _output.Error(error); } } else { var engine = new RazorEngine(); IRazorEngineCompiledTemplate <RazorEngineTemplateBase <RazorModel> > compiledTemplate; try { compiledTemplate = engine.Compile <RazorEngineTemplateBase <RazorModel> >(template, builder => { builder.AddAssemblyReference(typeof(Configuration.Process)); builder.AddAssemblyReference(typeof(Cfg.Net.CfgNode)); builder.AddAssemblyReferenceByName("System.Collections"); }); // doesn't appear to be a way to stream output yet (in this library), so will just write to string and then file var output = compiledTemplate.Run(instance => { instance.Model = new RazorModel() { Process = _output.Process, Entity = _output.Entity, Rows = rows }; }); File.WriteAllText(_output.Connection.File, output); } catch (RazorEngineCompilationException ex) { foreach (var error in ex.Errors) { var line = error.Location.GetLineSpan(); _output.Error($"C# error on line {line.StartLinePosition.Line}, column {line.StartLinePosition.Character}."); _output.Error(error.GetMessage()); } _output.Error(ex.Message.Replace("{", "{{").Replace("}", "}}")); Utility.CodeToError(_output, template); } catch (System.AggregateException ex) { _output.Error(ex.Message.Replace("{", "{{").Replace("}", "}}")); foreach (var error in ex.InnerExceptions) { _output.Error(error.Message.Replace("{", "{{").Replace("}", "}}")); } Utility.CodeToError(_output, template); } // the template must set Model.Entity.Inserts } }
static string SqlQuery(Field[] keys, IConnectionContext context, string tempTable, Field hashCode) { var names = string.Join(",", keys.Select(f => "k.[" + f.FieldName() + "]")); var table = context.Entity.OutputTableName(context.Process.Name); var joins = string.Join(" AND ", keys.Select(f => "o.[" + f.FieldName() + "] = k.[" + f.FieldName() + "]")); var sql = string.Format("SELECT {0},o.[{1}] FROM #{2} k WITH (NOLOCK) INNER JOIN [{3}] o WITH (NOLOCK) ON ({4})", names, hashCode.FieldName(), tempTable, table, joins); context.Debug(sql); return(sql); }
public IEnumerable <IRow> Read() { using (var cn = _cf.GetConnection()) { cn.Open(); var cmd = cn.CreateCommand(); cmd.CommandTimeout = 0; cmd.CommandType = CommandType.Text; cmd.CommandText = $@" SELECT {string.Join(",", _fields.Select(f => _readFrom == ReadFrom.Output ? _cf.Enclose(f.FieldName()) : _cf.Enclose(f.Name)))} FROM {_schemaPrefix}{_cf.Enclose(_tableOrView)} {(_connection.Provider == "sqlserver" && _context.Entity.NoLock ? "WITH (NOLOCK)" : string.Empty)} {_filter};"; _context.Debug(() => cmd.CommandText); IDataReader reader; try { reader = cmd.ExecuteReader(CommandBehavior.SequentialAccess); } catch (System.Data.Common.DbException e) { _context.Error($"Error reading data from {_connection.Name}, {_tableOrView}."); _context.Error(e.Message); yield break; } if (_context.Connection.Buffer) { var buffer = new List <IRow>(); while (reader.Read()) { _rowCount++; buffer.Add(_rowCreator.Create(reader, _fields)); } foreach (var row in buffer) { yield return(row); } } else { while (reader.Read()) { _rowCount++; yield return(_rowCreator.Create(reader, _fields)); } } _context.Info("{0} from {1}", _rowCount, _connection.Name); } }
public void Write(IEnumerable <IRow> rows) { var l = new Cfg.Net.Loggers.MemoryLogger(); _output.Debug(() => $"Loading template {_output.Connection.Template}"); var template = _templateReader.Read(_output.Connection.Template, new Dictionary <string, string>(), l); if (l.Errors().Any()) { foreach (var error in l.Errors()) { _output.Error(error); } } else { var context = new VelocityContext(); context.Put("Model", new VelocityModel(_output.Process, _output.Entity, rows)); using (var file = new StreamWriter(_output.Connection.File)) { NVelocity.App.Velocity.Evaluate(context, file, RuntimeConstants.RUNTIME_LOG_LOGSYSTEM, template); } } }
public IEnumerable <IRow> Read() { if (_parent.Entities.Sum(e => e.Inserts + e.Updates + e.Deletes) == 0) { yield break; } ; var batches = _parent.Entities.Select(e => e.BatchId).ToArray(); var minBatchId = batches.Min(); var maxBatchId = batches.Max(); _output.Info("Batch Range: {0} to {1}.", minBatchId, maxBatchId); var threshold = minBatchId - 1; var sql = string.Empty; if (_cf.AdoProvider == AdoProvider.SqlCe) { // because SqlCe doesn't support views, re-construct the parent view's definition var ctx = new PipelineContext(_output.Logger, _parent); var master = _parent.Entities.First(e => e.IsMaster); var builder = new StringBuilder(); builder.AppendLine($"SELECT {string.Join(",", _output.Entity.Fields.Where(f => f.Output).Select(f => _cf.Enclose(f.Source.Split('.')[0]) + "." + _cf.Enclose(f.Source.Split('.')[1])))}"); foreach (var from in ctx.SqlStarFroms(_cf)) { builder.AppendLine(@from); } builder.AppendLine($"WHERE {_cf.Enclose(Utility.GetExcelName(master.Index))}.{_cf.Enclose(master.TflBatchId().FieldName())} > @Threshold;"); sql = builder.ToString(); } else { sql = $@" SELECT {string.Join(",", _output.Entity.Fields.Where(f => f.Output).Select(f => _cf.Enclose(f.Alias)))} FROM {_cf.Enclose(_output.Process.Star)} {(_cf.AdoProvider == AdoProvider.SqlServer ? "WITH (NOLOCK)" : string.Empty)} WHERE {_cf.Enclose(Constants.TflBatchId)} > @Threshold;"; } _output.Debug(() => sql); using (var cn = _cf.GetConnection()) { cn.Open(); var cmd = cn.CreateCommand(); cmd.CommandTimeout = 0; cmd.CommandType = CommandType.Text; cmd.CommandText = sql; var min = cmd.CreateParameter(); min.ParameterName = "@Threshold"; min.Value = threshold; min.Direction = ParameterDirection.Input; min.DbType = DbType.Int32; cmd.Parameters.Add(min); var reader = cmd.ExecuteReader(CommandBehavior.SequentialAccess); var rowCount = 0; var fieldArray = _output.Entity.Fields.ToArray(); while (reader.Read()) { rowCount++; _output.Increment(); yield return(_rowCreator.Create(reader, fieldArray)); } _output.Info("{0} from {1}", rowCount, _output.Connection.Name); } }
string SqlInsertTemplate(IConnectionContext context, string tempTable) { var sql = string.Format("INSERT #{0} VALUES ({1});", tempTable, string.Join(",", _keys.Select(k => "@" + k.FieldName()))); context.Debug(sql); return sql; }
static string SqlCreateKeysTable(IConnectionContext context, string tempTable) { var columnsAndDefinitions = string.Join(",", context.Entity.GetPrimaryKey().Select(f => "[" + f.FieldName() + "] " + f.SqlDataType() + " NOT NULL")); var sql = string.Format(@"CREATE TABLE #{0}({1})", tempTable, columnsAndDefinitions); context.Debug(sql); return sql; }
static string SqlDrop(IConnectionContext context, string tempTable) { var sql = string.Format("DROP TABLE #{0};", tempTable); context.Debug(sql); return sql; }
public IEnumerable <IRow> Read() { ElasticsearchResponse <DynamicResponse> response; ElasticsearchDynamicValue hits; var from = 0; var size = 10; string body; bool warned = false; var scroll = !_context.Entity.IsPageRequest(); if (!scroll) { from = (_context.Entity.Page * _context.Entity.Size) - _context.Entity.Size; body = WriteQuery(_fields, _readFrom, _context, scroll: false, from: from, size: _context.Entity.Size); } else { body = WriteQuery(_fields, _readFrom, _context, scroll: false, from: 0, size: 0); response = _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body); if (response.Success) { hits = response.Body["hits"] as ElasticsearchDynamicValue; if (hits != null && hits.HasValue) { var total = hits["total"]; try { if (_version.Major >= 7) // version 7 changed total to an object with "value" and "relation" properties { size = Convert.ToInt32(total["value"].Value); } else { size = Convert.ToInt32(total.Value); } } catch (Exception ex) { warned = true; _context.Debug(() => total); _context.Warn($"Could not get total number of matching documents from the elasticsearch response. Are you sure you using version {_version}?"); _context.Error(ex, ex.Message); } body = WriteQuery(_fields, _readFrom, _context, scroll: true, from: 0, size: size > ElasticsearchDefaultSizeLimit ? DefaultSize : size); } } } _context.Debug(() => body); _context.Entity.Query = body; response = scroll ? _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body, p => p.AddQueryString("scroll", _context.Connection.Scroll)) : _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body); if (!response.Success) { LogError(response); yield break; } try { if (_version.Major >= 7) // version 7 changed total to an object with "value" and "relation" properties { _context.Entity.Hits = Convert.ToInt32(response.Body["hits"]["total"]["value"].Value); } else { _context.Entity.Hits = Convert.ToInt32(response.Body["hits"]["total"].Value); } } catch (Exception ex) { if (!warned) { _context.Debug(() => response.Body["hits"]); _context.Warn($"Could not get total number of matching documents from the elasticsearch response. Are you sure you using version {_version}?"); _context.Error(ex.Message); } } hits = response.Body["hits"]["hits"] as ElasticsearchDynamicValue; if (hits == null || !hits.HasValue) { _context.Warn("No hits from elasticsearch"); yield break; } var docs = hits.Value as IList <object>; if (docs == null) { _context.Error("No documents returned from elasticsearch!"); yield break; } // if any of the fields do not exist, yield break if (docs.Count > 0) { var doc = docs.First() as IDictionary <string, object>; var source = doc?["_source"] as IDictionary <string, object>; if (source == null) { _context.Error("Missing _source from elasticsearch response!"); yield break; } for (var i = 0; i < _fields.Length; i++) { if (source.ContainsKey(_fieldNames[i])) { continue; } _context.Error($"Field {_fieldNames[i]} does not exist!"); yield break; } } var count = 0; foreach (var d in docs) { var doc = (IDictionary <string, object>)d; var row = _rowFactory.Create(); var source = (IDictionary <string, object>)doc["_source"]; for (var i = 0; i < _fields.Length; i++) { row[_fields[i]] = _fields[i].Convert(source[_fieldNames[i]]); } yield return(row); } count += docs.Count; // get this from first search response (maybe), unless you have to aggregate it from all... foreach (var filter in _context.Entity.Filter.Where(f => f.Type == "facet" && !string.IsNullOrEmpty(f.Map))) { var map = _context.Process.Maps.First(m => m.Name == filter.Map); var buckets = response.Body["aggregations"][filter.Key]["buckets"] as ElasticsearchDynamicValue; if (buckets == null || !buckets.HasValue) { continue; } var items = buckets.Value as IEnumerable <object>; if (items == null) { continue; } foreach (var item in items.OfType <IDictionary <string, object> >()) { map.Items.Add(new MapItem { From = $"{item["key"]} ({item["doc_count"]})", To = item["key"] }); } } if (!response.Body.ContainsKey("_scroll_id")) { yield break; } if (size == count) { _client.ClearScroll <DynamicResponse>(new PostData <object>(new { scroll_id = response.Body["_scroll_id"].Value })); yield break; } var scrolls = new HashSet <string>(); do { var scrollId = response.Body["_scroll_id"].Value; scrolls.Add(scrollId); response = _client.Scroll <DynamicResponse>(new PostData <object>(new { scroll = _context.Connection.Scroll, scroll_id = scrollId })); if (response.Success) { docs = (IList <object>)response.Body["hits"]["hits"].Value; foreach (var d in docs) { var doc = (IDictionary <string, object>)d; var row = _rowFactory.Create(); var source = (IDictionary <string, object>)doc["_source"]; for (var i = 0; i < _fields.Length; i++) { row[_fields[i]] = _fields[i].Convert(source[_fieldNames[i]]); } yield return(row); } count += docs.Count; } else { LogError(response); } } while (response.Success && count < size); _client.ClearScroll <DynamicResponse>(new PostData <object>(new { scroll_id = scrolls.ToArray() })); }
public IEnumerable <IRow> Read() { ElasticsearchResponse <DynamicResponse> response; string body; if (_context.Entity.IsPageRequest()) { var from = (_context.Entity.Page * _context.Entity.PageSize) - _context.Entity.PageSize; body = WriteQuery(_fields, _readFrom, _context, from, _context.Entity.PageSize); } else { body = WriteQuery(_fields, _readFrom, _context, 0, 0); response = _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body); if (response.Success) { var hits = response.Body["hits"] as ElasticsearchDynamicValue; if (hits != null && hits.HasValue) { var properties = hits.Value as IDictionary <string, object>; if (properties != null && properties.ContainsKey("total")) { var size = Convert.ToInt32(properties["total"]) + 1; body = WriteQuery(_fields, _readFrom, _context, 0, size); } } } } _context.Debug(() => body); _context.Entity.Query = body; response = _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body); if (response.Success) { _context.Entity.Hits = Convert.ToInt32((response.Body["hits"]["total"] as ElasticsearchDynamicValue).Value); var hits = response.Body["hits"]["hits"] as ElasticsearchDynamicValue; if (hits != null && hits.HasValue) { var docs = hits.Value as IEnumerable <object>; if (docs != null) { foreach (var doc in docs.OfType <IDictionary <string, object> >()) { var row = _rowFactory.Create(); if (doc != null && doc.ContainsKey("_source")) { var source = doc["_source"] as IDictionary <string, object>; if (source != null) { for (var i = 0; i < _fields.Length; i++) { var field = _fields[i]; row[field] = field.Convert(source[_fieldNames[i]]); } } } _context.Increment(); yield return(row); } } } foreach (var filter in _context.Entity.Filter.Where(f => f.Type == "facet" && !string.IsNullOrEmpty(f.Map))) { var map = _context.Process.Maps.First(m => m.Name == filter.Map); var buckets = response.Body["aggregations"][filter.Key]["buckets"] as ElasticsearchDynamicValue; if (buckets == null || !buckets.HasValue) { continue; } var items = buckets.Value as IEnumerable <object>; if (items == null) { continue; } foreach (var item in items.OfType <IDictionary <string, object> >()) { map.Items.Add(new MapItem { From = $"{item["key"]} ({item["doc_count"]})", To = item["key"] }.WithDefaults()); } } } else { _context.Error(response.DebugInformation); } }
public IEnumerable <IRow> Read() { ElasticsearchResponse <DynamicResponse> response; ElasticsearchDynamicValue hits; var from = 1; var size = 10; string body; if (_context.Entity.IsPageRequest()) { from = (_context.Entity.Page * _context.Entity.PageSize) - _context.Entity.PageSize; body = WriteQuery(_fields, _readFrom, _context, from, _context.Entity.PageSize); } else { body = WriteQuery(_fields, _readFrom, _context, 0, 0); response = _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body); if (response.Success) { hits = response.Body["hits"] as ElasticsearchDynamicValue; if (hits != null && hits.HasValue) { var properties = hits.Value as IDictionary <string, object>; if (properties != null && properties.ContainsKey("total")) { size = Convert.ToInt32(properties["total"]); body = WriteQuery(_fields, _readFrom, _context, 0, size > 10000 ? 10000 : size); } } } } _context.Debug(() => body); _context.Entity.Query = body; // move 10000 to configurable limit response = from + size > 10000 ? _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body, p => p.Scroll(TimeSpan.FromMinutes(1.0))) : _client.Search <DynamicResponse>(_context.Connection.Index, _typeName, body); if (!response.Success) { LogError(response); yield break; } _context.Entity.Hits = Convert.ToInt32((response.Body["hits"]["total"] as ElasticsearchDynamicValue).Value); hits = response.Body["hits"]["hits"] as ElasticsearchDynamicValue; if (hits == null || !hits.HasValue) { _context.Warn("No hits from elasticsearch"); yield break; } var docs = hits.Value as IList <object>; if (docs == null) { _context.Error("No documents returned from elasticsearch!"); yield break; } // if any of the fields do not exist, yield break if (docs.Count > 0) { var doc = docs.First() as IDictionary <string, object>; var source = doc?["_source"] as IDictionary <string, object>; if (source == null) { _context.Error("Missing _source from elasticsearch response!"); yield break; } for (var i = 0; i < _fields.Length; i++) { if (source.ContainsKey(_fieldNames[i])) { continue; } _context.Error($"Field {_fieldNames[i]} does not exist!"); yield break; } } var count = 0; foreach (var d in docs) { var doc = (IDictionary <string, object>)d; var row = _rowFactory.Create(); var source = (IDictionary <string, object>)doc["_source"]; for (var i = 0; i < _fields.Length; i++) { row[_fields[i]] = _fields[i].Convert(source[_fieldNames[i]]); } _context.Increment(); yield return(row); } count += docs.Count; // get this from first search response (maybe), unless you have to aggregate it from all... foreach (var filter in _context.Entity.Filter.Where(f => f.Type == "facet" && !string.IsNullOrEmpty(f.Map))) { var map = _context.Process.Maps.First(m => m.Name == filter.Map); var buckets = response.Body["aggregations"][filter.Key]["buckets"] as ElasticsearchDynamicValue; if (buckets == null || !buckets.HasValue) { continue; } var items = buckets.Value as IEnumerable <object>; if (items == null) { continue; } foreach (var item in items.OfType <IDictionary <string, object> >()) { map.Items.Add(new MapItem { From = $"{item["key"]} ({item["doc_count"]})", To = item["key"] }); } } if (!response.Body.ContainsKey("_scroll_id")) { yield break; } if (size == count) { _client.ClearScroll <DynamicResponse>(new PostData <object>(new { scroll_id = response.Body["_scroll_id"].Value })); yield break; } var scrolls = new HashSet <string>(); do { var scrollId = response.Body["_scroll_id"].Value; scrolls.Add(scrollId); response = _client.Scroll <DynamicResponse>(new PostData <object>(new { scroll = "1m", scroll_id = scrollId })); if (response.Success) { docs = (IList <object>)response.Body["hits"]["hits"].Value; foreach (var d in docs) { var doc = (IDictionary <string, object>)d; var row = _rowFactory.Create(); var source = (IDictionary <string, object>)doc["_source"]; for (var i = 0; i < _fields.Length; i++) { row[_fields[i]] = _fields[i].Convert(source[_fieldNames[i]]); } _context.Increment(); yield return(row); } count += docs.Count; } else { LogError(response); } } while (response.Success && count < size); _client.ClearScroll <DynamicResponse>(new PostData <object>(new { scroll_id = scrolls.ToArray() })); }