public ParameterRowReader(IContext context, IRead parentReader, IRowFactory rowFactory = null) { _defaultRowReader = rowFactory == null ? (IRead)new NullReader(context) : new DefaultRowReader(context, rowFactory); _context = context; _parentReader = parentReader; foreach (var p in context.Process.Parameters) { _parameters[p.Name] = p; } // attempt to disable validation if parameter can't be converted to field's type foreach (var field in _context.Entity.GetAllFields()) { Parameter p = null; if (_parameters.ContainsKey(field.Alias)) { p = _parameters[field.Alias]; } else if (_parameters.ContainsKey(field.Name)) { p = _parameters[field.Name]; } if (p != null && p.Value != null) { if (!Constants.CanConvert()[field.Type](p.Value)) { field.Validators.Clear(); } } } }
public AdoRowCreator(IConnectionContext context, IRowFactory rowFactory) { _errors = null; _context = context; _rowFactory = rowFactory; _typeMap = Constants.TypeSystem(); }
public FromXmlTransform(IContext context, IRowFactory rowFactory) : base(context, "object") { _rowFactory = rowFactory; _input = SingleInputForMultipleOutput(); var output = MultipleOutput(); _fields = context.GetAllEntityFields().ToArray(); _outerFields = _fields.Except(output).Where(f => !f.System).ToList(); if (!_input.Output) { _outerFields.Remove(_input); } _typeDefaults = Constants.TypeDefaults(); _root = context.Operation.Root; _findRoot = !string.IsNullOrEmpty(context.Operation.Root); foreach (var field in output) { if (!_searchAttributes && field.NodeType.Equals("attribute", Ic)) { _searchAttributes = true; } _nameMap[field.Name] = field; } _setSystemFields = new SetSystemFields(context); _hashCode = context.Entity.TflHashCode(); _fieldsToHash = _fields.Where(f => !f.System).ToArray(); }
public FolderReader(IConnectionContext input, IRowFactory rowFactory) { var readers = new List <IRead>(); var searchOption = (SearchOption)Enum.Parse(typeof(SearchOption), input.Connection.SearchOption, true); input.Debug(() => $"Searching folder: {input.Connection.Folder}"); var files = new DirectoryInfo(input.Connection.Folder).GetFiles(input.Connection.SearchPattern, searchOption).OrderBy(f => f.CreationTime).ToArray(); input.Debug(() => $"Found {files.Length} files."); foreach (var file in files) { input.Debug(() => $"Found file: {file.Name}"); var context = new PipelineContext(input.Logger, input.Process, input.Entity, input.Field, input.Transform); var fileConnection = input.Connection.Clone(); fileConnection.Provider = "file"; fileConnection.File = file.FullName; var fileInput = new InputContext(context, new Incrementer(context)) { Connection = fileConnection }; if (file.Extension.ToLower().Contains("xls")) { readers.Add(new ExcelReader(fileInput, rowFactory)); } else { readers.Add(new DelimitedFileReader(fileInput, rowFactory)); } } _reader = new CompositeReader(readers); }
public FolderReader(IConnectionContext input, IRowFactory rowFactory) { var readers = new List <IRead>(); var searchOption = (SearchOption)Enum.Parse(typeof(SearchOption), input.Connection.SearchOption, true); input.Info($"Searching folder: {input.Connection.Folder}"); var files = new DirectoryInfo(input.Connection.Folder).GetFiles(input.Connection.SearchPattern, searchOption).OrderBy(f => f.CreationTime).ToArray(); input.Info($"Found {files.Length} files."); foreach (var file in files) { input.Info($"Found file: {file.Name}"); input.Connection.File = file.FullName; var context = new PipelineContext(input.Logger, input.Process, input.Entity, input.Field, input.Operation); var fileConnection = input.Connection.Clone(); fileConnection.Provider = "file"; fileConnection.File = file.FullName; var fileInput = new InputContext(context) { Connection = fileConnection }; if (input.Connection.Delimiter == string.Empty && input.Entity.Fields.Count(f => f.Input) == 1) { readers.Add(new FileReader(fileInput, rowFactory)); } else { readers.Add(new DelimitedFileReader(fileInput, rowFactory)); } } _reader = new CompositeReader(readers); }
public DelimitedFileReader(InputContext context, IRowFactory rowFactory) { _context = context; _rowFactory = rowFactory; _fileInfo = new FileInfo(_context.Connection.File); context.Entity.TryGetField("TflFile", out _fileField); }
public DelimitedFileReader(InputContext context, IRowFactory rowFactory, IRowCondition rowCondition) { _context = context; _rowFactory = rowFactory; _rowCondition = rowCondition; var identifier = Utility.Identifier(context.Entity.OutputTableName(context.Process.Name)); _builder = new DelimitedClassBuilder(identifier) { IgnoreEmptyLines = true, Delimiter = context.Connection.Delimiter, IgnoreFirstLines = context.Connection.Start }; _fileInfo = new FileInfo(_context.Connection.File); foreach (var field in context.InputFields) { var fieldBuilder = _builder.AddField(field.FieldName(), typeof(string)); fieldBuilder.FieldQuoted = true; fieldBuilder.QuoteChar = _context.Connection.TextQualifier; fieldBuilder.QuoteMode = QuoteMode.OptionalForRead; fieldBuilder.FieldOptional = field.Optional; } }
public ElasticReader( IConnectionContext context, Field[] fields, IElasticLowLevelClient client, IRowFactory rowFactory, ReadFrom readFrom ) { _context = context; _fields = fields; _fieldNames = fields.Select(f => _readFrom == ReadFrom.Input ? f.Name : f.Alias.ToLower()).ToArray(); _client = client; _rowFactory = rowFactory; _readFrom = readFrom; _typeName = readFrom == ReadFrom.Input ? context.Entity.Name : context.Entity.Alias.ToLower(); _context.Entity.ReadSize = _context.Entity.ReadSize == 0 ? DefaultSize : _context.Entity.ReadSize; if (_context.Entity.ReadSize > ElasticsearchDefaultSizeLimit) { _context.Warn("Elasticsearch's default size limit is 10000. {0} may be too high.", _context.Entity.ReadSize); } _version = ElasticVersionParser.ParseVersion(_context); }
public InternalParameterReader(InputContext input, IEnumerable <Field> fields, IRowFactory rowFactory, IDictionary <string, string> parameters) { _input = input; _rowFactory = rowFactory; _fields = fields.Where(f => f.Input).ToArray(); _parameters = parameters; }
public InstancesReader(InputContext context, IRowFactory rowFactory) { _context = context; _rowFactory = rowFactory; _client = new AmazonConnectClient(); CheckFieldTypes(); _request = new ListInstancesRequest(); }
public JsonFileReader(InputContext context, IRowFactory rowFactory) { _context = context; var fileInfo = FileUtility.Find(_context.Connection.File); _stream = new FileStream(fileInfo.FullName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); _streamWriter = new JsonStreamReader(context, _stream, rowFactory); }
public JsonStreamReader(InputContext context, Stream stream, IRowFactory rowFactory) { _context = context; _stream = stream; _fields = context.GetAllEntityFields().Where(f => f.Input && !f.System).ToArray(); _fieldLookup = _fields.ToDictionary(f => f.Name, f => f); _rowFactory = rowFactory; }
public StreamReader( IRowFactory rowFactory, ITokenizer tokenizer, IFileMetaData fileMetaData) { this.fileMetaData = fileMetaData; this.rowFactory = rowFactory; this.tokenizer = tokenizer; }
public WebCsvReader(InputContext context, IRowFactory rowFactory) { _context = context; _rowFactory = rowFactory; _client = string.IsNullOrEmpty(context.Connection.User) ? new WebClient() : new WebClient { Credentials = new NetworkCredential(_context.Connection.User, _context.Connection.Password) }; _client.Headers[HttpRequestHeader.Authorization] = $"{"Basic"} {Convert.ToBase64String(System.Text.Encoding.Default.GetBytes($"{_context.Connection.User}:{_context.Connection.Password}"))}"; }
public ElasticQueryReader( InputContext context, IElasticLowLevelClient client, IRowFactory rowFactory ) { _context = context; _client = client; _rowFactory = rowFactory; _fields = context.InputFields.ToDictionary(k => k.Name, v => v); }
public LogGroupsReader(InputContext context, IRowFactory rowFactory) { _context = context; _rowFactory = rowFactory; _client = new AmazonCloudWatchLogsClient(); CheckFieldTypes(); _request = new DescribeLogGroupsRequest() { Limit = GetLimit(), LogGroupNamePrefix = GetPrefix() }; }
public FileReaderTests() { fileMetaDataMock.Setup(n => n.FieldsEnclosedBy).Returns(""); fileMetaDataMock.Setup(n => n.FieldsTerminatedBy).Returns("\t"); fileMetaDataMock.Setup(n => n.LinesTerminatedBy).Returns("\n"); fileMetaDataMock.Setup(n => n.HeaderRowCount).Returns(1); fileMetaDataMock.Setup(n => n.Encoding).Returns(Encoding.UTF8); fileMetaDataMock.Setup(n => n.Fields.IndexOf("id")).Returns(0); fileMetaDataMock.Setup(n => n.LineTerminatorLength).Returns(1); rowFactory = new RowFactory(); tokenizer = new Tokenizer(fileMetaDataMock.Object); }
public FileReader(string fileName, IRowFactory rowFactory, ITokenizer tokenizer, IFileMetaData fileMetaData, FileReaderConfiguration config) { this.config = config; this.FileName = fileName; this.FileMetaData = fileMetaData; FileReaderUtils.ValidateLineEnds(fileMetaData.LinesTerminatedBy); streamReader = new StreamReader(rowFactory, tokenizer, fileMetaData); }
public FileReader(InputContext context, IRowFactory rowFactory) { _context = context; _rowFactory = rowFactory; _field = context.Entity.GetAllFields().First(f => f.Input); foreach (var transform in context.Entity.GetAllTransforms().Where(t => t.Method == "line")) { if (int.TryParse(transform.Value, out var lineNo)) { _linesToKeep.Add(lineNo); } } }
public CsvHelperStreamReader(InputContext context, StreamReader streamReader, IRowFactory rowFactory) { _context = context; _streamReader = streamReader; _rowFactory = rowFactory; foreach (var field in context.Entity.Fields.Where(f => f.Input && f.Type != "string" && (!f.Transforms.Any() || f.Transforms.First().Method != "convert"))) { _transforms.Add(new ConvertTransform(new PipelineContext(context.Logger, context.Process, context.Entity, field, new Operation { Method = "convert" }))); } }
public SolrInputReader( ISolrReadOnlyOperations <Dictionary <string, object> > solr, InputContext context, Field[] fields, IRowFactory rowFactory ) { _solr = solr; _context = context; _fields = fields; _rowFactory = rowFactory; _fieldNames = new Collection <string>(fields.Select(f => f.Name).ToList()); }
public AdoReader(IContext context, Field[] fields, IConnectionFactory cf, IRowFactory rowFactory, ReadFrom readFrom) { _context = context; _cf = cf; _connection = readFrom == ReadFrom.Output ? context.Process.Connections.First(c => c.Name == "output") : context.Process.Connections.First(c => c.Name == context.Entity.Connection); _tableOrView = readFrom == ReadFrom.Output ? context.Entity.OutputTableName(context.Process.Name) : context.Entity.Name; _schemaPrefix = readFrom == ReadFrom.Output ? string.Empty : (context.Entity.Schema == string.Empty ? string.Empty : cf.Enclose(context.Entity.Schema) + "."); _filter = readFrom == ReadFrom.Output ? $"WHERE {cf.Enclose(_context.Entity.TflDeleted().FieldName())} != 1" : string.Empty; _fields = fields; _readFrom = readFrom; _rowCreator = new AdoRowCreator(context, rowFactory); }
public InternalReader(InputContext input, IEnumerable <Field> fields, IRowFactory rowFactory) { _input = input; _rowFactory = rowFactory; _missing = new HashSet <string>(); _fields = fields.Where(f => f.Input).ToArray(); foreach (var field in _fields.Where(f => f.Type != "string" && (!f.Transforms.Any() || f.Transforms.First().Method != "convert"))) { _transforms.Add(new ConvertTransform(new PipelineContext(input.Logger, input.Process, input.Entity, field, new Operation { Method = "convert" }))); } }
public ElasticReader( IConnectionContext context, Field[] fields, IElasticLowLevelClient client, IRowFactory rowFactory, ReadFrom readFrom ) { _context = context; _fields = fields; _fieldNames = fields.Select(f => _readFrom == ReadFrom.Input ? f.Name : f.Alias.ToLower()).ToArray(); _client = client; _rowFactory = rowFactory; _readFrom = readFrom; _typeName = readFrom == ReadFrom.Input ? context.Entity.Name : context.Entity.Alias.ToLower(); }
public StreamEnumeratorTests() { fileMetaDataMock.Setup(n => n.FieldsEnclosedBy).Returns(""); fileMetaDataMock.Setup(n => n.FieldsTerminatedBy).Returns("\t"); fileMetaDataMock.Setup(n => n.LinesTerminatedBy).Returns("\n"); fileMetaDataMock.Setup(n => n.Encoding).Returns(Encoding.UTF8); fileMetaDataMock.Setup(n => n.Fields.Length).Returns(24); fileAttributesMock.Setup(n => n.FieldsEnclosedBy).Returns(""); fileAttributesMock.Setup(n => n.FieldsTerminatedBy).Returns("\t"); fileAttributesMock.Setup(n => n.LinesTerminatedBy).Returns("\n"); fileAttributesMock.Setup(n => n.Encoding).Returns("UTF-8"); rowFactory = new RowFactory(); tokenizer = new Tokenizer(fileMetaDataMock.Object); }
public void Setup() { Random rand = new Random(); sequence = Enumerable.Range(0, fields.Length - 1) .OrderBy(n => rand.Next()) .ToArray(); var metaData = FieldsMetaDataBuilder.Fields() .AutomaticallyIndex() .AddField(_ => _.Term("id")) .AddField(_ => _.Term(Terms.taxonID)) .AddField(_ => _.Term(Terms.acceptedNameUsageID)) .AddField(_ => _.Term(Terms.parentNameUsageID)) .AddField(_ => _.Term(Terms.nameAccordingToID)) .AddField(_ => _.Term(Terms.scientificName)) .AddField(_ => _.Term(Terms.acceptedNameUsage)) .AddField(_ => _.Term(Terms.parentNameUsage)) .AddField(_ => _.Term(Terms.nameAccordingTo)) .AddField(_ => _.Term(Terms.higherClassification)) .AddField(_ => _.Term(Terms.@class)) .AddField(_ => _.Term(Terms.order)) .AddField(_ => _.Term(Terms.family)) .AddField(_ => _.Term(Terms.genus)) .AddField(_ => _.Term(Terms.subgenus)) .AddField(_ => _.Term(Terms.specificEpithet)) .AddField(_ => _.Term(Terms.infraspecificEpithet)) .AddField(_ => _.Term(Terms.taxonRank)) .AddField(_ => _.Term(Terms.scientificNameAuthorship)) .AddField(_ => _.Term(Terms.taxonomicStatus)) .AddField(_ => _.Term(Terms.modified)) .AddField(_ => _.Term(Terms.license)) .AddField(_ => _.Term(Terms.bibliographicCitation)) .AddField(_ => _.Term(Terms.references)) .Build(); var coreFileType = new CoreFileType(); foreach (var m in metaData) { coreFileType.Field.Add(m); } var defaultFactory = new DefaultFactory(); fileMetaData = defaultFactory.CreateCoreMetaData(coreFileType); rowFactory = defaultFactory.CreateRowFactory(); }
public HierarchicalRowInfo( HierarchicalRowInfo parent = null, Type itemType = null, string parentKeyColumnName = null, string itemKeyColumnName = null, IRowFactory rowFactory = null, bool isOptional = false, bool isArray = false) { this.parent = parent; ItemType = itemType; ParentKeyColumnName = parentKeyColumnName; ItemKeyColumnName = itemKeyColumnName; RowFactory = rowFactory; IsOptional = isOptional; IsArray = isArray; }
public LuceneReader( IConnectionContext context, IEnumerable <Field> fields, SearcherFactory searcherFactory, Analyzer analyzer, IndexReaderFactory readerFactory, IRowFactory rowFactory, ReadFrom readFrom) { _context = context; _fields = fields; _searcherFactory = searcherFactory; _analyzer = analyzer; _readerFactory = readerFactory; _rowFactory = rowFactory; _readFrom = readFrom; }
public FromXmlTransform(IContext context = null) : base(context, null) { if (IsMissingContext()) { return; } ProducesFields = true; var factory = new ContextFactory(Context.Process, Context.Logger); var input = factory.GetEntityInputContext().First(c => c.Entity.Equals(Context.Entity)); _rowFactory = factory.GetEntityInputRowFactory(input, (c) => c.GetAllEntityFields().Count()); _input = SingleInputForMultipleOutput(); var output = MultipleOutput(); _fields = Context.GetAllEntityFields().ToArray(); _outerFields = _fields.Except(output).Where(f => !f.System).ToList(); if (!_input.Output) { _outerFields.Remove(_input); } _typeDefaults = Constants.TypeDefaults(); _root = Context.Operation.Root; _findRoot = !string.IsNullOrEmpty(Context.Operation.Root); foreach (var field in output) { if (!_searchAttributes && field.NodeType.Equals("attribute", Ic)) { _searchAttributes = true; } _nameMap[field.Name] = field; } if (!Context.Process.ReadOnly) { _fieldsToHash = _fields.Where(f => !f.System).ToArray(); _setSystemFields = new SetSystemFields(Context); _hashCode = Context.Entity.TflHashCode(); } }
public ToRowTransform(IContext context = null, IRowFactory rowFactory = null) : base(context, null) { if (IsMissingContext()) { return; } var lastOperation = LastOperation(); if (lastOperation == null) { Error($"The toRow operation should receive an array. You may want proceed it with a split operation."); Run = false; return; } if (!lastOperation.ProducesArray) { Error($"The toRow operation should receive an array. The {lastOperation.Method} method is not producing an array."); Run = false; return; } if (rowFactory == null) { Run = false; Context.Error("The toRow() method did not receive a row factory."); return; } ProducesRows = true; _rowFactory = rowFactory; _fields = Context.Entity.GetAllFields().ToArray(); _input = SingleInput(); // this bit can be encapsulated ,it is always needed for producing rows (it's in FromXml too) _setSystemFields = new SetSystemFields(context); _hashCode = Context.Entity.TflHashCode(); _fieldsToHash = _fields.Where(f => !f.System).ToArray(); }