public override IEtlOperationResult Execute(EtlPipelineContext context) { var log = context.GetLogger(GetType().FullName); log.Debug("Opening connection to Redshift."); using (var con = context.CreateNamedDbConnection(_connectionName)) { con.Open(); foreach (var redshiftCommand in _commands) { log.Trace($"Executing Redshift command: {redshiftCommand}"); try { using (var cmd = con.CreateCommand()) { cmd.CommandType = CommandType.Text; cmd.CommandText = redshiftCommand; cmd.ExecuteNonQuery(); } } catch (Exception e) { log.Error(e.Message, e); return(new EtlOperationResult(false) .WithError(this, e, redshiftCommand)); } } } return(new EtlOperationResult(true)); }
public override IEtlOperationResult Execute(EtlPipelineContext context) { var log = context.GetLogger(GetType().FullName); log.Debug($"Ensuring existance of directory '{_path}'."); Directory.CreateDirectory(_path); return(new EtlOperationResult(Directory.Exists(_path))); }
public override void OnExecute(EtlPipelineContext context) { var logger = context.GetLogger(GetType().FullName); try { using (var con = new SqlConnection(_connectionString)) { con.Open(); using (var trx = con.BeginTransaction(_isolationLevel)) using (var cmd = con.CreateCommand()) { cmd.CommandText = _commandText; cmd.CommandType = CommandType.Text; cmd.Transaction = trx; foreach (var param in _parameters) { var p = cmd.CreateParameter(); p.ParameterName = param.Key; p.Value = param.Value(); cmd.Parameters.Add(p); } using (var reader = cmd.ExecuteReader(CommandBehavior.CloseConnection)) { if (!reader.HasRows) { SignalEnd(); return; } while (reader.Read()) { var row = new Row(); for (var i = 0; i < reader.FieldCount; i++) { row[reader.GetName(i)] = reader[i] is DBNull ? null : reader[i]; } Emit(row); } } } } } catch (SqlException e) { logger.Error($"Error while executing query: \"{_commandText}\"", e); throw; } finally { SignalEnd(); } }
public override IEtlOperationResult Execute(EtlPipelineContext context) { var log = context.GetLogger(GetType().Name); if (!_predicate(context)) { log.Info($"Predicate evaluated to false for running of '{Name}', skipping."); return(new EtlOperationResult(true)); } log.Info($"Predicate evaluated to true for running of '{Name}', executing."); return(_operation.Execute(context)); }
public override void OnExecute(EtlPipelineContext context) { var log = context.GetLogger(GetType().FullName); var connectionInfo = new ConnectionInfo(_host, _port, _username); if (_passwordAuthentication != null) { connectionInfo.AuthenticationMethods.Add(_passwordAuthentication); } if (_privateKeyAuthentication != null) { connectionInfo.AuthenticationMethods.Add(_privateKeyAuthentication); } var localDirectory = _localDirectory ?? Path.GetTempPath(); if (_localDirectory == null) { log.Warn("No explicit download path was specified, using current user's temp directory."); } log.Debug("Opening connection to SFTP server."); using (var client = new SftpClient(connectionInfo)) { client.Connect(); log.Debug($"Listing contents of remote directory '{_remoteDirectory}' matching filename prefix '{_filenamePrefix}'."); var remoteFiles = client.ListDirectory(_remoteDirectory).Where(x => x.Name.StartsWith(_filenamePrefix)); foreach (var remoteFile in remoteFiles) { var localFile = Path.Combine(localDirectory, remoteFile.Name); log.Debug($"Downloading remote file '{remoteFile.FullName}' to '{localFile}'."); using (var file = File.OpenWrite(localFile)) { client.DownloadFile(remoteFile.FullName, file); } Emit(new NodeOutputWithFilePath(localFile)); } client.Disconnect(); } SignalEnd(); }
public override void OnExecute(EtlPipelineContext context) { var log = context.GetLogger("EtlLib.Nodes.CsvWriterNode"); var first = true; var columns = new List<string>(); using (var file = File.OpenWrite(_filePath)) using (var sw = new StreamWriter(file, _encoding)) using (var writer = new CsvWriter(sw)) { writer.Configuration.QuoteAllFields = _quoteAllFields; writer.Configuration.CultureInfo = _culture; foreach (var row in Input) { if (first && _includeHeader) { foreach (var column in row) { writer.WriteField(column.Key); columns.Add(column.Key); } writer.NextRecord(); first = false; } foreach (var column in row) { writer.WriteField(column.Value ?? _nullAs); } writer.NextRecord(); context.ObjectPool.Return(row); _writtenRowCount++; } writer.Flush(); } if (_includeHeader) _writtenRowCount++; log.Debug($"{this} wrote {_writtenRowCount} rows to '{_filePath}"); Emit(new CsvWriterNodeResult(_filePath, _writtenRowCount, columns.ToArray(), _includeHeader)); SignalEnd(); }
public override IEnumerableEtlOperationResult <AmazonS3WriterResult> ExecuteWithResult(EtlPipelineContext context) { var logger = context.GetLogger(GetType().FullName); if ((_awsCredentials == null || _awsCredentials is AnonymousAWSCredentials) && context.Config.ContainsKey(Constants.S3AccessKeyId)) { _awsCredentials = new BasicAWSCredentials(context.Config[Constants.S3AccessKeyId], context.Config[Constants.S3SecretAccessKey]); } var results = new List <AmazonS3WriterResult>(); var tasks = new List <Task>(); using (var client = new TransferUtility(new AmazonS3Client(_awsCredentials, _awsRegionEndpoint))) { foreach (var file in _files) { var objectKey = Path.GetFileName(file); var request = new TransferUtilityUploadRequest() { BucketName = _bucketName, Key = objectKey, FilePath = file, StorageClass = _storageClass }; startTime = DateTime.Now; progress = 0; request.UploadProgressEvent += (sender, e) => { if (e.PercentDone == 100) { logger.Info($"Upload of '{e.FilePath}' completed."); return; } if (progress == e.PercentDone || e.PercentDone % 5 != 0 || !((DateTime.Now - startTime).TotalSeconds > 0)) { return; } var bs = e.TransferredBytes / (DateTime.Now - startTime).TotalSeconds; var kbs = bs / 1024; logger.Info($"Uploading '{Path.GetFileName(e.FilePath)}'; Progress: {e.PercentDone.ToString().PadLeft(3)}%, {kbs:0.00} KiB/s"); progress = e.PercentDone; }; logger.Info($"Beginning upload of '{file}'."); var task = client.UploadAsync(request); task.GetAwaiter().OnCompleted(() => { results.Add(new AmazonS3WriterResult(objectKey)); }); tasks.Add(task); } } Task.WaitAll(tasks.ToArray()); return(new EnumerableEtlOperationResult <AmazonS3WriterResult>(true, results)); }