protected override void Load(ContainerBuilder builder) { foreach (var process in _root.Processes) { // Using Cfg-Net.Reader to read templates, for now. if (process.Templates.Any()) { builder.RegisterType<SourceDetector>().As<ISourceDetector>(); builder.RegisterType<FileReader>().Named<IReader>("file"); builder.RegisterType<WebReader>().Named<IReader>("web"); builder.Register<IReader>(ctx => new DefaultReader( ctx.Resolve<ISourceDetector>(), ctx.ResolveNamed<IReader>("file"), ctx.ResolveNamed<IReader>("web") )); } foreach (var t in process.Templates.Where(t => t.Enabled)) { var template = t; builder.Register<ITemplateEngine>(ctx => { var context = new PipelineContext(ctx.Resolve<IPipelineLogger>(), process); switch (template.Engine) { case "razor": return new RazorTemplateEngine(context, template, ctx.Resolve<IReader>()); default: return new NullTemplateEngine(); } }).Named<ITemplateEngine>(t.Key); } } }
public ToStringTransform(PipelineContext context) : base(context) { _input = SingleInput(); if (context.Transform.Format == string.Empty) { _toString = (o) => o.ToString(); } else { switch (_input.Type) { case "int32": case "int": _toString = (o) => ((int)o).ToString(context.Transform.Format); break; case "double": _toString = (o) => ((double)o).ToString(context.Transform.Format); break; case "short": case "int16": _toString = (o) => ((short)o).ToString(context.Transform.Format); break; case "long": case "int64": _toString = (o) => ((long)o).ToString(context.Transform.Format); break; case "datetime": case "date": _toString = (o) => ((DateTime)o).ToString(context.Transform.Format); break; default: _toString = (o) => o.ToString(); break; } } }
public void Warn(PipelineContext context, string message, params object[] args) { if (WarnEnabled) { var custom = string.Format(message, args); Console.ForegroundColor = ConsoleColor.Yellow; Console.WriteLine(FORMAT, DateTime.UtcNow, ForLog(context), "warn ", custom); } }
public override void LoadEntity(ContainerBuilder builder, Process process, Entity entity) { if (entity.Delete) { builder.Register<IEntityDeleteHandler>(ctx => { var context = new PipelineContext(ctx.Resolve<IPipelineLogger>(), process, entity); var inputConnection = process.Connections.First(c => c.Name == entity.Connection); IRead input = new NullReader(context); switch (inputConnection.Provider) { case "sqlserver": input = new SqlReader(context, entity.GetPrimaryKey(), ReadFrom.Input); break; } IRead output = new NullReader(context); IDelete deleter = new NullDeleter(context); var outputConnection = process.Connections.First(c => c.Name == "output"); switch (outputConnection.Provider) { case "sqlserver": output = new SqlReader(context, entity.GetPrimaryKey(), ReadFrom.Output); deleter = new SqlDeleter(new OutputContext(context, new Incrementer(context))); break; } return new ParallelDeleteHandler(new DefaultDeleteHandler(entity, input, output, deleter)); }).Named<IEntityDeleteHandler>(entity.Key); } }
public void Error(PipelineContext context, string message, params object[] args) { if (ErrorEnabled) { var custom = string.Format(message, args); Console.ForegroundColor = ConsoleColor.Red; Console.Error.WriteLine(FORMAT, DateTime.UtcNow, ForLog(context), "error", custom); } }
public void Info(PipelineContext context, string message, params object[] args) { if (InfoEnabled) { var custom = string.Format(message, args); Console.ForegroundColor = ConsoleColor.Gray; Console.WriteLine(FORMAT, DateTime.UtcNow, ForLog(context), "info ", custom); } }
public void Debug(PipelineContext context, string message, params object[] args) { if (DebugEnabled) { var custom = string.Format(message, args); Console.ForegroundColor = ConsoleColor.Cyan; Console.WriteLine(FORMAT, DateTime.UtcNow, ForLog(context), "debug", custom); } }
public override void LoadEntity(ContainerBuilder builder, Process process, Entity entity) { builder.Register<IRead>(ctx => { var context = new PipelineContext(ctx.Resolve<IPipelineLogger>(), process, entity); var input = new InputContext(context, new Incrementer(context)); switch (input.Connection.Provider) { case "internal": context.Debug("Registering {0} provider", input.Connection.Provider); return new DataSetEntityReader(input); case "sqlserver": if (input.Entity.ReadSize == 0) { context.Debug("Registering {0} reader", input.Connection.Provider); return new SqlInputReader(input, input.InputFields); } context.Debug("Registering {0} batch reader", input.Connection.Provider); return new SqlInputBatchReader( input, new SqlInputReader(input, input.Entity.GetPrimaryKey()) ); default: context.Warn("Registering null reader", input.Connection.Provider); return new NullEntityReader(); } }).Named<IRead>(entity.Key); }
protected override void RegisterProcess(ContainerBuilder builder, Process original) { builder.Register<IProcessController>(ctx => { var pipelines = new List<IPipeline>(); var deleteHandlers = new List<IEntityDeleteHandler>(); // entity-level pipelines foreach (var entity in original.Entities) { pipelines.Add(ctx.ResolveNamed<IPipeline>(entity.Key)); if (entity.Delete) { deleteHandlers.Add(ctx.ResolveNamed<IEntityDeleteHandler>(entity.Key)); } } // process-level pipeline pipelines.Add(ctx.ResolveNamed<IPipeline>(original.Key)); var outputProvider = original.Connections.First(c => c.Name == "output").Provider; var context = new PipelineContext(ctx.Resolve<IPipelineLogger>(), original); var controller = new ProcessController(pipelines, deleteHandlers); if (original.Mode == "init") { switch (outputProvider) { case "sqlserver": var output = new OutputContext(context, new Incrementer(context)); controller.PreActions.Add(new SqlInitializer(output)); controller.PostActions.Add(new SqlStarViewCreator(output)); break; } } // templates foreach (var template in original.Templates.Where(t => t.Enabled)) { controller.PreActions.Add(new RenderTemplateAction(template, ctx.ResolveNamed<ITemplateEngine>(template.Key))); foreach (var action in template.Actions.Where(a => a.GetModes().Any(m => m == original.Mode))) { if (action.Before) { controller.PreActions.Add(ctx.ResolveNamed<IAction>(action.Key)); } if (action.After) { controller.PostActions.Add(ctx.ResolveNamed<IAction>(action.Key)); } } } // actions foreach (var action in original.Actions.Where(a => a.GetModes().Any(m => m == original.Mode))) { if (action.Before) { controller.PreActions.Add(ctx.ResolveNamed<IAction>(action.Key)); } if (action.After) { controller.PostActions.Add(ctx.ResolveNamed<IAction>(action.Key)); } } return controller; }).Named<IProcessController>(original.Key); }
public void Error(PipelineContext context, Exception exception, string message, params object[] args) { if (ErrorEnabled) { var custom = string.Format(message, args); System.Diagnostics.Debug.WriteLine(FORMAT, DateTime.UtcNow, ForLog(context), "error", custom); System.Diagnostics.Debug.WriteLine(exception.Message); System.Diagnostics.Debug.WriteLine(exception.StackTrace); } }
public IsValidator(PipelineContext context) : base(context) { _input = SingleInput(); if (context.Field.Type.StartsWith("bool", StringComparison.Ordinal)) { _canConvert = v => Constants.CanConvert()[context.Transform.Type](v); } else { _canConvert = v => Constants.CanConvert()[context.Transform.Type](v) ? string.Empty : string.Format("The value {0} can not be converted to a {1}.", v, context.Transform.Type); } }
public CompositeValidator(PipelineContext context, IEnumerable<ITransform> transforms) : base(context) { _transforms = transforms.ToArray(); if (context.Field.Type.StartsWith("bool", StringComparison.Ordinal)) { _validate = r => _transforms.All(t => (bool)t.Transform(r)[context.Field]); } else { _validate = r => string.Concat(_transforms.Select(t => t.Transform(r)[context.Field] + " ")).Trim(); } }
public IEnumerable<MapItem> Read(PipelineContext context) { var items = new List<MapItem>(); var map = context.Process.Maps.First(m => m.Name == context.Transform.Map); var connection = context.Process.Connections.First(cn => cn.Name == map.Connection); using (var cn = new SqlConnection(connection.GetConnectionString())) { cn.Open(); items.AddRange(cn.Query<MapItem>(map.Query, commandTimeout: connection.Timeout, commandType: System.Data.CommandType.Text)); } return items; }
public TimeZoneOperation(PipelineContext context) : base(context) { _input = SingleInput(); _output = context.Field; var fromTimeZoneInfo = TimeZoneInfo.FindSystemTimeZoneById(context.Transform.FromTimeZone); _toTimeZoneInfo = TimeZoneInfo.FindSystemTimeZoneById(context.Transform.ToTimeZone); _adjustment = _toTimeZoneInfo.BaseUtcOffset - fromTimeZoneInfo.BaseUtcOffset; _daylightAdjustment = _adjustment.Add(new TimeSpan(0, 1, 0, 0)); }
public RazorTemplateEngine(PipelineContext context, Configuration.Template template, IReader templateReader) { _context = context; _template = template; _templateReader = templateReader; var config = new FluentTemplateServiceConfiguration( c => c.WithEncoding(_template.ContentType == "html" ? Encoding.Html : Encoding.Raw) .WithCodeLanguage(Language.CSharp) ); _service = RazorEngineService.Create(config); }
static void Main(string[] args) { var context = new PipelineContext(new ConsoleLogger(Logging.LogLevel.Debug), new Configuration.Process() { Name = "Command" }); if (args == null || args.Length == 0) { context.Error("Please pass in a configuration."); System.Environment.Exit(1); } var builder = new ContainerBuilder(); builder.RegisterModule(new ConfigurationModule(args[0], "Shorthand.xml")); var cfg = builder.Build(); var root = cfg.Resolve<Root>(); if (root.Warnings().Any()) { foreach (var warning in root.Warnings()) { context.Warn(warning); } } if (root.Errors().Any()) { foreach (var error in root.Errors()) { context.Error(error); } System.Environment.Exit(1); } context.Info("Configuration is Ok"); cfg.Dispose(); // register pipeline builder = new ContainerBuilder(); builder.Register<IPipelineLogger>(ctx => new ConsoleLogger(LogLevel.Info)).SingleInstance(); builder.RegisterModule(new MapModule(root)); builder.RegisterModule(new TemplateModule(root)); builder.RegisterModule(new ActionModule(root)); builder.RegisterModule(new EntityControlModule(root)); builder.RegisterModule(new EntityInputModule(root)); builder.RegisterModule(new EntityOutputModule(root)); builder.RegisterModule(new EntityMasterUpdateModule(root)); builder.RegisterModule(new EntityDeleteModule(root)); builder.RegisterModule(new EntityPipelineModule(root)); builder.RegisterModule(new ProcessPipelineModule(root)); builder.RegisterModule(new ProcessControlModule(root)); using (var c = builder.Build().BeginLifetimeScope()) { // resolve, run, and release var container = c; foreach (var controller in root.Processes.Select(process => container.ResolveNamed<IProcessController>(process.Key))) { controller.PreExecute(); controller.Execute(); controller.PostExecute(); } } }
public ContainsValidater(PipelineContext context) : base(context) { _input = SingleInput(); if (context.Field.Type.StartsWith("bool", StringComparison.Ordinal)) { _contains = s => s.Contains(context.Transform.Value); } else { _contains = s => s.Contains(context.Transform.Value) ? String.Empty : String.Format("{0} does not contain {1}.", _input.Alias, context.Transform.Value); } }
private static IAction SwitchAction(IComponentContext ctx, Process process, Action action) { var context = new PipelineContext(ctx.Resolve<IPipelineLogger>(), process); switch (action.Name) { case "copy": return action.InTemplate ? (IAction) new ContentToFileAction(context, action) : new FileToFileAction(context, action); case "web": return new WebAction(context, action); default: context.Error("{0} action is not registered.", action.Name); return new NullAction(); } }
public DefaultTransform(PipelineContext context, IEnumerable<Field> fields) : base(context) { _fields = fields.ToArray(); _typeDefaults = Constants.TypeDefaults(); if (context.Entity.IsMaster) { _index = (f) => f.MasterIndex; } else { _index = (f) => f.Index; } foreach (var fld in _fields) { var f = fld; _getDefaultFor[_index(f)] = () => f.Default == Constants.DefaultSetting ? _typeDefaults[f.Type] : f.Convert(f.Default); } }
public virtual PipelineExecutionResult Execute(PipelineContext context) { if (context == null) throw new ArgumentNullException("context"); var stepExecutionResults = new List<StepExecutionResult>(); foreach (var result in Steps.Select(step => step.Execute(context))) { stepExecutionResults.Add(result); if (false == result.Success) { break; } } return new PipelineExecutionResult(stepExecutionResults); }
public override void LoadEntity(ContainerBuilder builder, Process process, Entity entity) { //master updater builder.Register<IUpdate>((ctx) => { var context = new PipelineContext(ctx.Resolve<IPipelineLogger>(), process, entity); var incrementer = new Incrementer(context); var output = new OutputContext(context, incrementer); switch (output.Connection.Provider) { case "sqlserver": context.Debug("Registering {0} master updater", output.Connection.Provider); return new SqlMasterUpdater(output); default: context.Warn("Registering null updater"); return new NullMasterUpdater(); } }).Named<IUpdate>(entity.Key); }
public FromXmlTransform(PipelineContext context) : base(context) { _input = SingleInputForMultipleOutput(); var output = MultipleOutput(); foreach (var f in output) { if (f.NodeType.Equals("attribute", IC)) { _attributes[f.Name] = f; } else { _elements[f.Name] = f; } } _searchAttributes = _attributes.Count > 0; _total = _elements.Count + _attributes.Count; }
public override XmlTextReader ProcessResponse(XmlTextReader reader, PipelineContext context) { XmlTextReader xReader = reader; string command = context.Parameters["command"] as String; if (command == "GetList") // Code runs on every GetList { try { Trace.Write("==========================Start PreprocessListItems " + System.DateTime.Now.ToShortDateString() + ", " + System.DateTime.Now.ToLongTimeString() + Environment.NewLine); xReader = PreprocessListItems(reader, context); Trace.Write("==========================Stop PreprocessListItems " + System.DateTime.Now.ToShortDateString() + ", " + System.DateTime.Now.ToLongTimeString() + Environment.NewLine); } catch { } } return xReader; }
public override void LoadEntity(ContainerBuilder builder, Process process, Entity entity) { builder.Register<IEntityController>(ctx => { var context = new PipelineContext(ctx.Resolve<IPipelineLogger>(), process, entity); var output = new OutputContext(context, new Incrementer(context)); switch (output.Connection.Provider) { case "sqlserver": context.Debug("Registering sql server controller"); var initializer = process.Mode == "init" ? (IAction)new SqlEntityInitializer(output) : new NullInitializer(); return new SqlEntityController(output, initializer); default: context.Debug("Registering null controller"); return new NullEntityController(); } }).Named<IEntityController>(entity.Key); }
public MapTransform(PipelineContext context, IMapReader mapReader) : base(context) { _input = SingleInput(); foreach (var item in mapReader.Read(context)) { var from = _input.Convert(item.From); if (item.To == string.Empty) { var field = context.Entity.GetField(item.Parameter); _map[from] = (r) => r[field]; } else { var to = context.Field.Convert(item.To); _map[from] = (r) => to; } } if (!_map.ContainsKey(CATCH_ALL)) { var value = context.Field.Convert(context.Field.Default); _map[CATCH_ALL] = (r) => value; } }
public void build_the_object() { var objectGraph = new ObjectGraph<ClassWithMultipleCtorArguments>(); objectGraph.ByType[typeof (Color)] = "Red"; objectGraph.ByName["name"] = "Jeremy"; objectGraph.ByType[typeof (int)] = "36"; objectGraph.ByName["day"] = DateTime.Today; var pipeline = new PipelineContext(); var pluginGraph = new PluginGraph(); var builder = new ObjectGraphBuilder(objectGraph, pluginGraph); var pluginType = pluginGraph.PluginTypeFor(typeof (ClassWithMultipleCtorArguments)); var target = builder.Build(pluginType, pipeline).ShouldBeOfType<ClassWithMultipleCtorArguments>(); target.Color.ShouldEqual(Color.Red); target.Name.ShouldEqual("Jeremy"); target.Age.ShouldEqual(36); target.Day.ShouldEqual(DateTime.Today); }
public override void LoadEntity(ContainerBuilder builder, Process process, Entity entity) { var type = process.Pipeline == "defer" ? entity.Pipeline : process.Pipeline; builder.Register<IPipeline>((ctx) => { var context = new PipelineContext(ctx.Resolve<IPipelineLogger>(), process, entity); IPipeline pipeline; switch (type) { case "parallel.linq": context.Debug("Registering {0} pipeline.", type); pipeline = new ParallelPipeline(new DefaultPipeline(ctx.ResolveNamed<IEntityController>(entity.Key), context)); break; default: context.Debug("Registering linq pipeline.", type); pipeline = new DefaultPipeline(ctx.ResolveNamed<IEntityController>(entity.Key), context); break; } var provider = process.Connections.First(c => c.Name == "output").Provider; // extract pipeline.Register(ctx.ResolveNamed<IRead>(entity.Key)); // transform pipeline.Register(new DefaultTransform(context, context.GetAllEntityFields())); pipeline.Register(new TflHashCodeTransform(context)); pipeline.Register(TransformFactory.GetTransforms(ctx,process, entity, entity.GetAllFields().Where(f=>f.Transforms.Any()))); pipeline.Register(new StringTruncateTransfom(context)); if (provider == "sqlserver") { pipeline.Register(new MinDateTransform(context, new DateTime(1753, 1, 1))); } //load pipeline.Register(ctx.ResolveNamed<IWrite>(entity.Key)); pipeline.Register(ctx.ResolveNamed<IUpdate>(entity.Key)); return pipeline; }).Named<IPipeline>(entity.Key); }
public void GetTypedDataSet() { var cfg = File.ReadAllText(@"Files\PersonAndPet.xml"); var shorthand = File.ReadAllText(@"Files\Shorthand.xml"); var process = new Root(cfg, shorthand, new Cfg.Net.Validators("js", new JintParser())).Processes.First(); var personContext = new PipelineContext(new DebugLogger(), process, process.Entities.Last()); var entityInput = new InputContext(personContext, new Incrementer(personContext)); var rows = new DataSetEntityReader(entityInput).Read().ToArray(); Assert.IsInstanceOf<IEnumerable<Row>>(rows); Assert.AreEqual(3, rows.Length); var dale = rows[0]; var micheal = rows[1]; Assert.IsInstanceOf<int>(dale[FieldAt(0)]); Assert.AreEqual(1, dale[FieldAt(0)]); Assert.AreEqual("Dale", dale[FieldAt(1)]); Assert.AreEqual("Michael", micheal[FieldAt(1)]); foreach (var row in rows) { Console.WriteLine(row); } }
public void InventoryQuery() { const string expected = @"SELECT [InventoryKey] ,[Id] ,[Timestamp] ,[StatusChangeTimestamp] ,[PartKey] ,[StorageLocationKey] ,[SerialNo1] ,[SerialNo2] ,[SerialNo3] ,[SerialNo4] ,[Pallet] ,[Lot] ,[ShipmentOrder] ,[DateReceived] ,[DateInstalled] ,[LocationInstalled] ,[Notes] ,[InventoryStatusId] ,[Hide] ,[SS_RowVersion] FROM [Inventory] WHERE ([InventoryStatusId] = 80) "; var builder = new ContainerBuilder(); builder.RegisterModule(new ConfigurationModule(@"C:\temp\Inventory.xml", @"Files\Shorthand.xml")); var container = builder.Build(); var root = container.Resolve<Root>(); var process = root.Processes[0]; var context = new PipelineContext(new ConsoleLogger(), process, process.Entities[0]); process.Entities[0].Filter.Add(new Filter { Left = "InventoryStatusId", Right = "80" }); var sql = new SqlFormattingManager().Format(context.SqlSelectInput(process.Entities[0].GetAllFields().Where(f => f.Input).ToArray())); Assert.AreEqual(expected, sql); }
public static ITransform SwitchTransform(IComponentContext ctx, PipelineContext context) { context.Activity = PipelineActivity.Transform; switch (context.Transform.Method) { case "format": return new FormatTransform(context); case "left": return new LeftTransform(context); case "right": return new RightTransform(context); case "copy": return new CopyTransform(context); case "concat": return new ConcatTransform(context); case "fromxml": return new FromXmlTransform(context); case "fromsplit": return new FromSplitTransform(context); case "htmldecode": return new DecodeTransform(context); case "xmldecode": return new DecodeTransform(context); case "hashcode": return new HashcodeTransform(context); case "padleft": return new PadLeftTransform(context); case "padright": return new PadRightTransform(context); case "splitlength": return new SplitLengthTransform(context); case "timezone": return new TimeZoneOperation(context); case "trim": return new TrimTransform(context); case "trimstart": return new TrimStartTransform(context); case "trimend": return new TrimEndTransform(context); case "javascript": return new JintTransform(context); case "tostring": return new ToStringTransform(context); case "toupper": return new ToUpperTransform(context); case "tolower": return new ToLowerTransform(context); case "join": return new JoinTransform(context); case "map": return new MapTransform(context, ctx.ResolveNamed<IMapReader>(context.Process.Maps.First(m => m.Name == context.Transform.Map).Key)); case "decompress": return new DecompressTransform(context); case "contains": return new ContainsValidater(context); case "is": return new IsValidator(context); default: context.Warn("The {0} method is undefined.", context.Transform.Method); return new NullTransformer(context); } }
void IPixelTransformInternal.Init(PipelineContext ctx) { MagicTransforms.AddExternalFormatConverter(ctx); if (ctx.Source.Format.FormatGuid != outFormat) ctx.Source = ctx.AddDispose(new ConversionTransform(ctx.Source, null, null, PixelFormat.FromGuid(outFormat))); }
public void TestInitialise() { _Environment = new Dictionary <string, object>(StringComparer.OrdinalIgnoreCase); _Context = new PipelineContext(_Environment); }
public static Parser CreateDefaultParser(Command command, PipelineContext context) => CreateParserBuilder(command, context) .UseDefaults() .Build();
public SqlTableToFlatFilePipeline(PipelineContext context) { m_Context = context; //make sure the target is a csv or txt file and the source is set ConfirmSourceAndTargetAreSetCorrectly(m_Context); }
public void BeforeInitialisation(PipelineContext context) { }
public IEnumerable <MapItem> Read(PipelineContext context) { return(context.Process.Maps.First(m => m.Name == context.Transform.Map).Items); }
protected virtual void ProcessPipelines(PipelineStep pipelineStep, ICollection <Pipeline> subPipelines, PipelineContext pipelineContext) { if (pipelineStep == null) { throw new ArgumentNullException("pipelineStep"); } if (subPipelines == null) { throw new ArgumentNullException("subPipelines"); } if (pipelineContext == null) { throw new ArgumentNullException("pipelineContext"); } ILogger logger = pipelineContext.PipelineBatchContext.Logger; if (!subPipelines.Any <Pipeline>()) { logger.Error("Pipeline step processing will abort because no pipelines are assigned to the pipeline step. (pipeline step: {0})", (object)pipelineStep.Name); } else { List <Pipeline> pipelineList = new List <Pipeline>(); foreach (Pipeline subPipeline in (IEnumerable <Pipeline>)subPipelines) { RunSubPipelines(pipelineContext, subPipeline); } } }
public FileToFileAction(PipelineContext context, Action action) { _context = context; _action = action; }
public void FlatSql() { var composer = new CompositionRoot(); var controller = composer.Compose(@"Files\Northwind.xml"); Assert.AreEqual(0, composer.Process.Errors().Length); var pipe = new PipelineContext(new TraceLogger(), composer.Process); var actual = new SqlFormattingManager().Format(pipe.SqlCreateFlatTable(new SqlServerConnectionFactory(new Connection()))); Assert.IsNotNull(controller); const string expected = @"CREATE TABLE [NorthWindFlat] ( [TflKey] INT NOT NULL ,[TflBatchId] INT NOT NULL ,[TflHashCode] INT NOT NULL ,[TflDeleted] BIT NOT NULL ,[OrderDetailsDiscount] REAL NOT NULL ,[OrderDetailsOrderID] INT NOT NULL ,[OrderDetailsProductID] INT NOT NULL ,[OrderDetailsQuantity] SMALLINT NOT NULL ,[OrderDetailsRowVersion] VARBINARY(8) NOT NULL ,[OrderDetailsUnitPrice] DECIMAL(19, 4) NOT NULL ,[OrderDetailsExtendedPrice] DECIMAL(19, 4) NOT NULL ,[CountryExchange] NVARCHAR(128) NOT NULL ,[Test] NVARCHAR(64) NOT NULL ,[OrdersCustomerID] NVARCHAR(5) NOT NULL ,[OrdersEmployeeID] INT NOT NULL ,[ProductsSupplierID] INT NOT NULL ,[ProductsCategoryID] INT NOT NULL ,[OrdersShipVia] INT NOT NULL ,[OrdersFreight] DECIMAL(19, 4) NOT NULL ,[OrdersOrderDate] DATETIME NOT NULL ,[OrdersRequiredDate] DATETIME NOT NULL ,[OrdersShipAddress] NVARCHAR(60) NOT NULL ,[OrdersShipCity] NVARCHAR(15) NOT NULL ,[OrdersShipCountry] NVARCHAR(15) NOT NULL ,[OrdersShipName] NVARCHAR(40) NOT NULL ,[OrdersShippedDate] DATETIME NOT NULL ,[OrdersShipPostalCode] NVARCHAR(10) NOT NULL ,[OrdersShipRegion] NVARCHAR(15) NOT NULL ,[TimeOrderMonth] NVARCHAR(6) NOT NULL ,[TimeOrderDate] NVARCHAR(10) NOT NULL ,[TimeOrderYear] NVARCHAR(4) NOT NULL ,[CustomersAddress] NVARCHAR(60) NOT NULL ,[CustomersCity] NVARCHAR(15) NOT NULL ,[CustomersCompanyName] NVARCHAR(40) NOT NULL ,[CustomersContactName] NVARCHAR(30) NOT NULL ,[CustomersContactTitle] NVARCHAR(30) NOT NULL ,[CustomersCountry] NVARCHAR(15) NOT NULL ,[CustomersFax] NVARCHAR(24) NOT NULL ,[CustomersPhone] NVARCHAR(24) NOT NULL ,[CustomersPostalCode] NVARCHAR(10) NOT NULL ,[CustomersRegion] NVARCHAR(15) NOT NULL ,[EmployeesAddress] NVARCHAR(60) NOT NULL ,[EmployeesBirthDate] DATETIME NOT NULL ,[EmployeesCity] NVARCHAR(15) NOT NULL ,[EmployeesCountry] NVARCHAR(15) NOT NULL ,[EmployeesExtension] NVARCHAR(4) NOT NULL ,[EmployeesFirstName] NVARCHAR(10) NOT NULL ,[EmployeesHireDate] DATETIME NOT NULL ,[EmployeesHomePhone] NVARCHAR(24) NOT NULL ,[EmployeesLastName] NVARCHAR(20) NOT NULL ,[EmployeesNotes] NVARCHAR(max) NOT NULL ,[EmployeesPostalCode] NVARCHAR(10) NOT NULL ,[EmployeesRegion] NVARCHAR(15) NOT NULL ,[EmployeesTitle] NVARCHAR(30) NOT NULL ,[EmployeesTitleOfCourtesy] NVARCHAR(25) NOT NULL ,[EmployeesReportsTo] INT NOT NULL ,[EmployeesManager] NVARCHAR(64) NOT NULL ,[Employee] NVARCHAR(64) NOT NULL ,[ProductsDiscontinued] BIT NOT NULL ,[ProductsProductName] NVARCHAR(40) NOT NULL ,[ProductsQuantityPerUnit] NVARCHAR(20) NOT NULL ,[ProductsReorderLevel] SMALLINT NOT NULL ,[ProductsUnitPrice] DECIMAL(19, 4) NOT NULL ,[ProductsUnitsInStock] SMALLINT NOT NULL ,[ProductsUnitsOnOrder] SMALLINT NOT NULL ,[SuppliersAddress] NVARCHAR(60) NOT NULL ,[SuppliersCity] NVARCHAR(15) NOT NULL ,[SuppliersCompanyName] NVARCHAR(40) NOT NULL ,[SuppliersContactName] NVARCHAR(30) NOT NULL ,[SuppliersContactTitle] NVARCHAR(30) NOT NULL ,[SuppliersCountry] NVARCHAR(15) NOT NULL ,[SuppliersFax] NVARCHAR(24) NOT NULL ,[SuppliersHomePage] NVARCHAR(max) NOT NULL ,[SuppliersPhone] NVARCHAR(24) NOT NULL ,[SuppliersPostalCode] NVARCHAR(10) NOT NULL ,[SuppliersRegion] NVARCHAR(15) NOT NULL ,[CategoriesCategoryName] NVARCHAR(15) NOT NULL ,[CategoriesDescription] NVARCHAR(max) NOT NULL ,[ShippersCompanyName] NVARCHAR(40) NOT NULL ,[ShippersPhone] NVARCHAR(24) NOT NULL ,CONSTRAINT pk_NorthWindFlat_tflkey PRIMARY KEY ([TflKey]) ); "; Assert.AreEqual(expected, actual); }
public Task <List <List <string> > > InvokeAsync(GetARequest request, PipelineContext <List <List <string> > > context, NextAsync <List <List <string> > > next, CancellationToken cancellationToken = default) { throw new NotImplementedException(); }
public override void Process(PipelineStep pipelineStep, PipelineContext pipelineContext) { ILogger logger = pipelineContext.PipelineBatchContext.Logger; if (!this.CanProcess(pipelineStep, pipelineContext)) { logger.Error("Pipeline step processing will abort because the pipeline step cannot be processed. (pipeline step: {0})", (object)pipelineStep.Name); } else { PipelinesSettings pipelinesSettings = pipelineStep.GetPipelinesSettings(); if (pipelinesSettings == null || !pipelinesSettings.Pipelines.Any <Pipeline>()) { logger.Error("Pipeline step processing will abort because the pipeline step has no sub-pipelines assigned. (pipeline step: {0})", (object)pipelineStep.Name); } else { IterableDataSettings iterableDataSettings = pipelineContext.GetIterableDataSettings(); if (iterableDataSettings == null || iterableDataSettings.Data == null) { return; } int num = 0; try { List <Task> tasks = new List <Task>(); foreach (object element in iterableDataSettings.Data) { Task task = Task.Factory.StartNew(() => { if (!pipelineContext.PipelineBatchContext.Stopped) { PipelineContext pipelineContext1 = new PipelineContext(pipelineContext.PipelineBatchContext); SynchronizationSettings synchronizationSettings = this.ResolveSynchronizationSettingsAndSetElement(pipelineStep, pipelineContext, element); pipelineContext1.Plugins.Add((IPlugin)synchronizationSettings); ParentPipelineContextSettings pipelineContextSettings = new ParentPipelineContextSettings() { ParentPipelineContext = pipelineContext }; pipelineContext1.Plugins.Add((IPlugin)pipelineContextSettings); this.ProcessPipelines(pipelineStep, pipelinesSettings.Pipelines, pipelineContext1); } }); num++; } Task.WaitAll(tasks.ToArray()); logger.Info("{0} elements were iterated. (pipeline: {1}, pipeline step: {2})", (object)num, (object)pipelineContext.CurrentPipeline.Name, (object)pipelineContext.CurrentPipelineStep.Name, (object)pipelineContext); } catch (Exception ex) { logger.Error(ex.Message); logger.Error(ex.StackTrace); pipelineContext.CriticalError = true; } } } }
public void PipelineContext_GetOrSetTranslation_Throws_If_Translation_Key_Is_Null() { PipelineContext.GetOrSetTranslation <string, string>(_Environment, "original", null, "1", () => "2"); }
public override void Process(PipelineStep pipelineStep, PipelineContext pipelineContext) { ILogger logger = pipelineContext.PipelineBatchContext.Logger; if (!this.CanProcess(pipelineStep, pipelineContext)) { logger.Error("Pipeline step processing will abort because the pipeline step cannot be processed. (pipeline step: {0})", (object)pipelineStep.Name); } else { PipelinesSettings pipelinesSettings = pipelineStep.GetPipelinesSettings(); if (pipelinesSettings == null || !pipelinesSettings.Pipelines.Any <Pipeline>()) { logger.Error("Pipeline step processing will abort because the pipeline step has no sub-pipelines assigned. (pipeline step: {0})", (object)pipelineStep.Name); } else { var iterateGroupedDataSettings = pipelineStep.GetPlugin <IterateThroughGroupedDataSettings>(); if (iterateGroupedDataSettings == null || string.IsNullOrEmpty(iterateGroupedDataSettings.GroupFieldKey)) { logger.Error("No Iterated Group Settings was found with configured group field key"); return; } GroupedDataSettings groupedDataSettings = pipelineContext.GetPlugin <GroupedDataSettings>(); if (groupedDataSettings == null || groupedDataSettings.Data == null) { //let's try the parent context var parentSettings = pipelineContext.GetPlugin <ParentPipelineContextSettings>(); if (parentSettings != null) { groupedDataSettings = parentSettings.ParentPipelineContext.GetPlugin <GroupedDataSettings>(); } if (groupedDataSettings == null || groupedDataSettings.Data == null) { logger.Error("No Grouped Data Settings was found in the pipelineContext or parent Pipeline Context "); return; } } var parentSyncSettings = pipelineContext.GetPlugin <SynchronizationSettings>(); var source = parentSyncSettings.Source as Dictionary <string, string>; if (source == null || !source.ContainsKey(iterateGroupedDataSettings.GroupFieldKey)) { logger.Warn("Group Field Key {0} doesn;t exist in source.", iterateGroupedDataSettings.GroupFieldKey); return; } var groupValue = source[iterateGroupedDataSettings.GroupFieldKey]; int num = 0; try { var childRecordSettings = new ChildRecordSettings(); foreach (object element in groupedDataSettings.Data[groupValue]) { if (!pipelineContext.PipelineBatchContext.Stopped) { PipelineContext pipelineContext1 = new PipelineContext(pipelineContext.PipelineBatchContext); SynchronizationSettings synchronizationSettings = this.ResolveSynchronizationSettingsAndSetElement(pipelineStep, pipelineContext, element); //instatiate Target for filling. synchronizationSettings.Target = new Dictionary <string, string>(); pipelineContext1.Plugins.Add((IPlugin)synchronizationSettings); ParentPipelineContextSettings pipelineContextSettings = new ParentPipelineContextSettings() { ParentPipelineContext = pipelineContext }; pipelineContext1.Plugins.Add((IPlugin)pipelineContextSettings); this.ProcessPipelines(pipelineStep, pipelinesSettings.Pipelines, pipelineContext1); var record = pipelineContext1.GetPlugin <SynchronizationSettings>().Target as Dictionary <string, string>; if (record != null) { childRecordSettings.Records.Add(record); } } num++; } pipelineContext.Plugins.Add(childRecordSettings); logger.Info("{0} elements were iterated. (pipeline: {1}, pipeline step: {2})", (object)num, (object)pipelineContext.CurrentPipeline.Name, (object)pipelineContext.CurrentPipelineStep.Name, (object)pipelineContext); } catch (Exception ex) { logger.Error(ex.Message); logger.Error(ex.StackTrace); pipelineContext.CriticalError = true; } } } }
protected virtual void OnCriticalError(Pipeline errorPipeline, IEnumerable <Pipeline> completedPipelines, PipelineStep pipelineStep, PipelineContext pipelineContext) { }
private static string TransformConfiguration(IComponentContext ctx, IEnumerable <Parameter> p) { // short hand for parameters is defined, try to transform parameters in advance if (!ctx.IsRegisteredWithName <IDependency>(TransformModule.ParametersName)) { return(null); } if (!p.Any() && !ctx.IsRegisteredWithName <string>("cfg")) { return(null); } var dependencies = new List <IDependency> { ctx.Resolve <IReader>(), new DateMathModifier(), new ParameterModifier(new NullPlaceHolderReplacer()), ctx.ResolveNamed <IDependency>(TransformModule.ParametersName) }; var preCfg = (p.Any() ? p.Named <string>("cfg") : null) ?? ctx.ResolveNamed <string>("cfg"); var preProcess = new ConfigurationFacade.Process(preCfg, new Dictionary <string, string>(), dependencies.ToArray()); if (!preProcess.Parameters.Any(pr => pr.Transforms.Any())) { return(null); } var fields = preProcess.Parameters.Select(pr => new Field { Name = pr.Name, Alias = pr.Name, Default = pr.Value, Type = pr.Type, Transforms = pr.Transforms.Select(o => o.ToOperation()).ToList() }).ToList(); var len = fields.Count; var entity = new Entity { Name = "Parameters", Alias = "Parameters", Fields = fields }; var mini = new Process { Name = "ParameterTransform", ReadOnly = true, Entities = new List <Entity> { entity }, Connections = preProcess.Connections.Select(c => c.ToConnection()).ToList(), // for map transforms that need connections Maps = preProcess.Maps.Select(m => m.ToMap()).ToList(), // for map transforms Scripts = preProcess.Scripts.Select(s => s.ToScript()).ToList() // for transforms that use scripts (e.g. js) }; mini.Load(); // very important to check after creating, as it runs validation and even modifies! if (!mini.Errors().Any()) { // modification in Load() do not make it out to local variables so overwrite them fields = mini.Entities.First().Fields; entity = mini.Entities.First(); var transforms = TransformFactory.GetTransforms(ctx, new PipelineContext(ctx.Resolve <IPipelineLogger>(), mini, entity), fields); // make an input out of the parameters var input = new List <IRow>(); var row = new MasterRow(len); for (var i = 0; i < len; i++) { row[fields[i]] = preProcess.Parameters[i].Value; } input.Add(row); var output = transforms.Aggregate(input.AsEnumerable(), (rows, t) => t.Operate(rows)).ToList().First(); for (var i = 0; i < len; i++) { var parameter = preProcess.Parameters[i]; parameter.Value = output[fields[i]].ToString(); parameter.T = string.Empty; parameter.Transforms.Clear(); } return(preProcess.Serialize()); } var context = new PipelineContext(ctx.Resolve <IPipelineLogger>(), mini, entity); foreach (var error in mini.Errors()) { context.Error(error); } return(null); }
private Task Endpoint1(PipelineContext context) { _event2.Set(); return(Task.CompletedTask); }
public PadLeftTransform(PipelineContext context) : base(context) { _input = SingleInput(); }
protected override void Load(ContainerBuilder builder) { if (_process == null) { return; } // connections foreach (var connection in _process.Connections.Where(c => c.Provider.In(_ado))) { var cn = connection; // Connection Factory builder.Register <IConnectionFactory>(ctx => { switch (cn.Provider) { case "sqlserver": return(new SqlServerConnectionFactory(cn)); case "mysql": return(new MySqlConnectionFactory(cn)); case "postgresql": return(new PostgreSqlConnectionFactory(cn)); case "sqlite": return(new SqLiteConnectionFactory(cn)); default: return(new NullConnectionFactory()); } }).Named <IConnectionFactory>(connection.Key).InstancePerLifetimeScope(); // Schema Reader builder.Register <ISchemaReader>(ctx => { var factory = ctx.ResolveNamed <IConnectionFactory>(cn.Key); return(new AdoSchemaReader(ctx.ResolveNamed <IConnectionContext>(cn.Key), factory)); }).Named <ISchemaReader>(connection.Key); } //ISchemaReader //IOutputController //IRead (Process for Calculated Columns) //IWrite (Process for Calculated Columns) //IInitializer (Process) // Per Entity // IInputVersionDetector // IRead (Input, per Entity) // IOutputController // -- ITakeAndReturnRows (for matching) // -- IWriteMasterUpdateQuery (for updating) // IUpdate // IWrite // IEntityDeleteHandler // entitiy input foreach (var entity in _process.Entities.Where(e => _process.Connections.First(c => c.Name == e.Connection).Provider.In(_ado))) { // INPUT READER builder.Register <IRead>(ctx => { var input = ctx.ResolveNamed <InputContext>(entity.Key); var rowFactory = ctx.ResolveNamed <IRowFactory>(entity.Key, new NamedParameter("capacity", input.RowCapacity)); switch (input.Connection.Provider) { case "mysql": case "postgresql": case "sqlite": case "sqlserver": return(new AdoInputReader( input, input.InputFields, ctx.ResolveNamed <IConnectionFactory>(input.Connection.Key), rowFactory )); default: return(new NullReader(input, false)); } }).Named <IRead>(entity.Key); // INPUT VERSION DETECTOR builder.Register <IInputVersionDetector>(ctx => { if (!entity.Update) { return(new NullVersionDetector()); } var input = ctx.ResolveNamed <InputContext>(entity.Key); switch (input.Connection.Provider) { case "mysql": case "postgresql": case "sqlite": case "sqlserver": return(new AdoInputVersionDetector(input, ctx.ResolveNamed <IConnectionFactory>(input.Connection.Key))); default: return(new NullVersionDetector()); } }).Named <IInputVersionDetector>(entity.Key); } // entity output if (_process.Output().Provider.In(_ado)) { var calc = _process.ToCalculatedFieldsProcess(); // PROCESS OUTPUT CONTROLLER builder.Register <IOutputController>(ctx => { var output = ctx.Resolve <OutputContext>(); if (_process.Mode != "init") { return(new NullOutputController()); } switch (output.Connection.Provider) { case "mysql": case "postgresql": case "sqlite": case "sqlserver": var actions = new List <IAction> { new AdoStarViewCreator(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key)) }; if (_process.Flatten) { actions.Add(new AdoFlatTableCreator(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key))); } return(new AdoStarController(output, actions)); default: return(new NullOutputController()); } }).As <IOutputController>(); // PROCESS CALCULATED READER builder.Register <IRead>(ctx => { var calcContext = new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, calc.Entities.First()); var outputContext = new OutputContext(calcContext, new Incrementer(calcContext)); var cf = ctx.ResolveNamed <IConnectionFactory>(outputContext.Connection.Key); var capacity = outputContext.Entity.Fields.Count + outputContext.Entity.CalculatedFields.Count; var rowFactory = new RowFactory(capacity, false, false); return(new AdoStarParametersReader(outputContext, _process, cf, rowFactory)); }).As <IRead>(); // PROCESS CALCULATED FIELD WRITER builder.Register <IWrite>(ctx => { var calcContext = new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, calc.Entities.First()); var outputContext = new OutputContext(calcContext, new Incrementer(calcContext)); var cf = ctx.ResolveNamed <IConnectionFactory>(outputContext.Connection.Key); return(new AdoCalculatedFieldUpdater(outputContext, _process, cf)); }).As <IWrite>(); // PROCESS INITIALIZER builder.Register <IInitializer>(ctx => { var output = ctx.Resolve <OutputContext>(); return(new AdoInitializer(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key))); }).As <IInitializer>(); // ENTITIES foreach (var e in _process.Entities) { var entity = e; // ENTITY OUTPUT CONTROLLER builder.Register <IOutputController>(ctx => { var output = ctx.ResolveNamed <OutputContext>(entity.Key); switch (output.Connection.Provider) { case "mysql": case "postgresql": case "sqlite": case "sqlserver": var initializer = _process.Mode == "init" ? (IAction) new AdoEntityInitializer(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key)) : new NullInitializer(); return(new AdoOutputController( output, initializer, ctx.ResolveNamed <IInputVersionDetector>(entity.Key), new AdoOutputVersionDetector(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key)), ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key) )); default: return(new NullOutputController()); } }).Named <IOutputController>(entity.Key); // OUTPUT ROW MATCHER builder.Register(ctx => { if (!entity.Update) { return(new NullTakeAndReturnRows()); } var output = ctx.ResolveNamed <OutputContext>(entity.Key); var rowFactory = ctx.ResolveNamed <IRowFactory>(entity.Key, new NamedParameter("capacity", output.GetAllEntityFields().Count())); var cf = ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key); switch (output.Connection.Provider) { case "sqlite": return(new TypedEntityMatchingKeysReader(new AdoEntityMatchingKeysReader(output, cf, rowFactory), output)); default: return((ITakeAndReturnRows) new AdoEntityMatchingKeysReader(output, cf, rowFactory)); } }).Named <ITakeAndReturnRows>(entity.Key); // MASTER UPDATE QUERY builder.Register <IWriteMasterUpdateQuery>(ctx => { var output = ctx.ResolveNamed <OutputContext>(entity.Key); var factory = ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key); switch (output.Connection.Provider) { case "mysql": return(new MySqlUpdateMasterKeysQueryWriter(output, factory)); case "postgresql": return(new PostgreSqlUpdateMasterKeysQueryWriter(output, factory)); default: return(new SqlServerUpdateMasterKeysQueryWriter(output, factory)); } }).Named <IWriteMasterUpdateQuery>(entity.Key + "MasterKeys"); // MASTER UPDATER builder.Register <IUpdate>(ctx => { var output = ctx.ResolveNamed <OutputContext>(entity.Key); switch (output.Connection.Provider) { case "mysql": case "postgresql": case "sqlserver": return(new AdoMasterUpdater( output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key), ctx.ResolveNamed <IWriteMasterUpdateQuery>(entity.Key + "MasterKeys") )); case "sqlite": return(new AdoTwoPartMasterUpdater(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key))); default: return(new NullMasterUpdater()); } }).Named <IUpdate>(entity.Key); // WRITER builder.Register <IWrite>(ctx => { var output = ctx.ResolveNamed <OutputContext>(entity.Key); var cf = ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key); switch (output.Connection.Provider) { case "sqlserver": return(new SqlServerWriter( output, cf, ctx.ResolveNamed <ITakeAndReturnRows>(entity.Key), entity.Update ? (IWrite) new AdoEntityUpdater(output, cf) : new NullWriter(output) )); case "mysql": case "postgresql": case "sqlite": return(new AdoEntityWriter( output, ctx.ResolveNamed <ITakeAndReturnRows>(entity.Key), new AdoEntityInserter(output, cf), entity.Update ? (IWrite) new AdoEntityUpdater(output, cf) : new NullWriter(output) )); default: return(new NullWriter(output)); } }).Named <IWrite>(entity.Key); // DELETE HANDLER if (entity.Delete) { builder.Register <IEntityDeleteHandler>(ctx => { var context = ctx.ResolveNamed <IContext>(entity.Key); var inputContext = ctx.ResolveNamed <InputContext>(entity.Key); var rowCapacity = inputContext.Entity.GetPrimaryKey().Count(); var rowFactory = new RowFactory(rowCapacity, false, true); IRead input = new NullReader(context); var primaryKey = entity.GetPrimaryKey(); switch (inputContext.Connection.Provider) { case "mysql": case "postgresql": case "sqlite": case "sqlserver": input = new AdoReader( inputContext, primaryKey, ctx.ResolveNamed <IConnectionFactory>(inputContext.Connection.Key), rowFactory, ReadFrom.Input ); break; } IRead output = new NullReader(context); IDelete deleter = new NullDeleter(context); var outputConnection = _process.Output(); var outputContext = ctx.ResolveNamed <OutputContext>(entity.Key); switch (outputConnection.Provider) { case "mysql": case "postgresql": case "sqlite": case "sqlserver": var ocf = ctx.ResolveNamed <IConnectionFactory>(outputConnection.Key); output = new AdoReader(context, entity.GetPrimaryKey(), ocf, rowFactory, ReadFrom.Output); deleter = new AdoDeleter(outputContext, ocf); break; } var handler = new DefaultDeleteHandler(context, input, output, deleter); // since the primary keys from the input may have been transformed into the output, you have to transform before comparing // feels a lot like entity pipeline on just the primary keys... may look at consolidating handler.Register(new DefaultTransform(context, entity.GetPrimaryKey().ToArray())); handler.Register(TransformFactory.GetTransforms(ctx, _process, entity, primaryKey)); handler.Register(new StringTruncateTransfom(context, primaryKey)); return(new ParallelDeleteHandler(handler)); }).Named <IEntityDeleteHandler>(entity.Key); } } } }
protected override void Load(ContainerBuilder builder) { if (!builder.Properties.ContainsKey("Process")) { return; } var process = (Process)builder.Properties["Process"]; // connections foreach (var connection in process.Connections.Where(c => c.Provider == SqlCe)) { // Connection Factory builder.Register <IConnectionFactory>(ctx => new SqlCeConnectionFactory(connection)).Named <IConnectionFactory>(connection.Key).InstancePerLifetimeScope(); // Schema Reader builder.Register <ISchemaReader>(ctx => { var factory = ctx.ResolveNamed <IConnectionFactory>(connection.Key); return(new AdoSchemaReader(ctx.ResolveNamed <IConnectionContext>(connection.Key), factory)); }).Named <ISchemaReader>(connection.Key); } // entitiy input foreach (var entity in process.Entities.Where(e => process.Connections.First(c => c.Name == e.Input).Provider == SqlCe)) { // INPUT READER builder.Register <IRead>(ctx => { var input = ctx.ResolveNamed <InputContext>(entity.Key); var rowFactory = ctx.ResolveNamed <IRowFactory>(entity.Key, new NamedParameter("capacity", input.RowCapacity)); var dataReader = new AdoInputReader( input, input.InputFields, ctx.ResolveNamed <IConnectionFactory>(input.Connection.Key), rowFactory ); return(dataReader); }).Named <IRead>(entity.Key); // INPUT VERSION DETECTOR builder.Register <IInputProvider>(ctx => { var input = ctx.ResolveNamed <InputContext>(entity.Key); return(new AdoInputProvider(input, ctx.ResolveNamed <IConnectionFactory>(input.Connection.Key))); }).Named <IInputProvider>(entity.Key); } // entity output if (process.GetOutputConnection().Provider == SqlCe) { var calc = process.ToCalculatedFieldsProcess(); // PROCESS OUTPUT CONTROLLER builder.Register <IOutputController>(ctx => { var output = ctx.Resolve <OutputContext>(); if (process.Mode != "init") { return(new NullOutputController()); } var actions = new List <IAction> { new AdoStarViewCreator(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key)) }; if (process.Flatten) { actions.Add(new AdoFlatTableCreator(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key))); } return(new AdoStarController(output, actions)); }).As <IOutputController>(); // PROCESS CALCULATED READER builder.Register <IRead>(ctx => { var calcContext = new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, calc.Entities.First()); var outputContext = new OutputContext(calcContext); var cf = ctx.ResolveNamed <IConnectionFactory>(outputContext.Connection.Key); var capacity = outputContext.Entity.Fields.Count + outputContext.Entity.CalculatedFields.Count; var rowFactory = new RowFactory(capacity, false, false); return(new AdoStarParametersReader(outputContext, process, cf, rowFactory)); }).As <IRead>(); // PROCESS CALCULATED FIELD WRITER builder.Register <IWrite>(ctx => { var calcContext = new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, calc.Entities.First()); var outputContext = new OutputContext(calcContext); var cf = ctx.ResolveNamed <IConnectionFactory>(outputContext.Connection.Key); return(new AdoCalculatedFieldUpdater(outputContext, process, cf)); }).As <IWrite>(); // PROCESS INITIALIZER builder.Register <IInitializer>(ctx => { var output = ctx.Resolve <OutputContext>(); return(new AdoInitializer(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key))); }).As <IInitializer>(); // ENTITIES foreach (var entity in process.Entities) { builder.Register <IOutputProvider>(ctx => { var output = ctx.ResolveNamed <OutputContext>(entity.Key); var cf = ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key); var rowFactory = ctx.ResolveNamed <IRowFactory>(entity.Key, new NamedParameter("capacity", output.GetAllEntityFields().Count())); // matcher determines what's an update vs. and insert var matcher = entity.Update ? (IBatchReader) new AdoEntityMatchingKeysReader(output, cf, rowFactory) : new NullBatchReader(); IWrite writer = new SqlCeWriter( output, cf, matcher, new AdoEntityUpdater(output, cf) ); return(new AdoOutputProvider(output, cf, writer)); }).Named <IOutputProvider>(entity.Key); // ENTITY OUTPUT CONTROLLER builder.Register <IOutputController>(ctx => { var output = ctx.ResolveNamed <OutputContext>(entity.Key); var initializer = process.Mode == "init" ? (IAction) new AdoEntityInitializer(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key)) : new NullInitializer(); return(new AdoOutputController( output, initializer, ctx.ResolveNamed <IInputProvider>(entity.Key), ctx.ResolveNamed <IOutputProvider>(entity.Key), ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key) )); }).Named <IOutputController>(entity.Key); // MASTER UPDATE QUERY builder.Register <IWriteMasterUpdateQuery>(ctx => { var output = ctx.ResolveNamed <OutputContext>(entity.Key); var factory = ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key); return(new AdoUpdateMasterKeysQueryWriter(output, factory)); }).Named <IWriteMasterUpdateQuery>(entity.Key + "MasterKeys"); // MASTER UPDATER builder.Register <IUpdate>(ctx => { var output = ctx.ResolveNamed <OutputContext>(entity.Key); return(new AdoTwoPartMasterUpdater(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key))); }).Named <IUpdate>(entity.Key); // DELETE HANDLER if (entity.Delete) { // register input keys and hashcode reader if necessary builder.Register(ctx => { var inputContext = ctx.ResolveNamed <InputContext>(entity.Key); var rowCapacity = inputContext.Entity.GetPrimaryKey().Count(); var rowFactory = new RowFactory(rowCapacity, false, true); return(new AdoReader( inputContext, entity.GetPrimaryKey(), ctx.ResolveNamed <IConnectionFactory>(inputContext.Connection.Key), rowFactory, ReadFrom.Input )); }).Named <IReadInputKeysAndHashCodes>(entity.Key); // register output keys and hash code reader if necessary builder.Register((ctx => { var context = ctx.ResolveNamed <OutputContext>(entity.Key); var rowCapacity = context.Entity.GetPrimaryKey().Count(); var rowFactory = new RowFactory(rowCapacity, false, true); var outputConnection = process.GetOutputConnection(); var ocf = ctx.ResolveNamed <IConnectionFactory>(outputConnection.Key); return(new AdoReader(context, entity.GetPrimaryKey(), ocf, rowFactory, ReadFrom.Output)); })).Named <IReadOutputKeysAndHashCodes>(entity.Key); builder.Register((ctx) => { var outputConnection = process.GetOutputConnection(); var outputContext = ctx.ResolveNamed <OutputContext>(entity.Key); var ocf = ctx.ResolveNamed <IConnectionFactory>(outputConnection.Key); return(new AdoDeleter(outputContext, ocf)); }).Named <IDelete>(entity.Key); builder.Register <IEntityDeleteHandler>(ctx => { var context = ctx.ResolveNamed <IContext>(entity.Key); var primaryKey = entity.GetPrimaryKey(); var handler = new DefaultDeleteHandler( context, ctx.ResolveNamed <IReadInputKeysAndHashCodes>(entity.Key), ctx.ResolveNamed <IReadOutputKeysAndHashCodes>(entity.Key), ctx.ResolveNamed <IDelete>(entity.Key) ); // since the primary keys from the input may have been transformed into the output, you have to transform before comparing // feels a lot like entity pipeline on just the primary keys... may look at consolidating handler.Register(new DefaultTransform(context, entity.GetPrimaryKey().ToArray())); handler.Register(TransformFactory.GetTransforms(ctx, context, primaryKey)); handler.Register(new StringTruncateTransfom(context, primaryKey)); return(handler); }).Named <IEntityDeleteHandler>(entity.Key); } } } }
public void PipelineContext_GetOrSetTranslation_Throws_If_Original_And_Translation_Keys_Are_The_Same() { PipelineContext.GetOrSetTranslation <string, string>(_Environment, "key", "key", "1", () => "2"); }
public JintTransform(PipelineContext context) : base(context) { _input = MultipleInput(); }
public PipelineContext(PipelineContext original) { IsBroken = original.IsBroken; }
public IEnumerable <IRow> Read() { if (_parent.Entities.Sum(e => e.Inserts + e.Updates + e.Deletes) == 0) { yield break; } ; var batches = _parent.Entities.Select(e => e.BatchId).ToArray(); var minBatchId = batches.Min(); var maxBatchId = batches.Max(); _output.Info("Batch Range: {0} to {1}.", minBatchId, maxBatchId); var threshold = minBatchId - 1; var sql = string.Empty; if (_cf.AdoProvider == AdoProvider.SqlCe) { // because SqlCe doesn't support views, re-construct the parent view's definition var ctx = new PipelineContext(_output.Logger, _parent); var master = _parent.Entities.First(e => e.IsMaster); var builder = new StringBuilder(); builder.AppendLine($"SELECT {string.Join(",", _output.Entity.Fields.Where(f => f.Output).Select(f => _cf.Enclose(f.Source.Split('.')[0]) + "." + _cf.Enclose(f.Source.Split('.')[1])))}"); foreach (var from in ctx.SqlStarFroms(_cf)) { builder.AppendLine(@from); } builder.AppendLine($"WHERE {_cf.Enclose(Utility.GetExcelName(master.Index))}.{_cf.Enclose(master.TflBatchId().FieldName())} > @Threshold;"); sql = builder.ToString(); } else { sql = $@" SELECT {string.Join(",", _output.Entity.Fields.Where(f => f.Output).Select(f => _cf.Enclose(f.Alias)))} FROM {_cf.Enclose(_output.Process.Star)} {(_cf.AdoProvider == AdoProvider.SqlServer ? "WITH (NOLOCK)" : string.Empty)} WHERE {_cf.Enclose(Constants.TflBatchId)} > @Threshold;"; } _output.Debug(() => sql); using (var cn = _cf.GetConnection()) { cn.Open(); var cmd = cn.CreateCommand(); cmd.CommandTimeout = 0; cmd.CommandType = CommandType.Text; cmd.CommandText = sql; var min = cmd.CreateParameter(); min.ParameterName = "@Threshold"; min.Value = threshold; min.Direction = ParameterDirection.Input; min.DbType = DbType.Int32; cmd.Parameters.Add(min); var reader = cmd.ExecuteReader(CommandBehavior.SequentialAccess); var rowCount = 0; var fieldArray = _output.Entity.Fields.ToArray(); while (reader.Read()) { rowCount++; _output.Increment(); yield return(_rowCreator.Create(reader, fieldArray)); } _output.Info("{0} from {1}", rowCount, _output.Connection.Name); } }
protected async Task <bool> ReadData(Endpoint endpoint, PipelineStep pipelineStep, PipelineContext pipelineContext, ILogger logger) { if (endpoint == null) { throw new ArgumentNullException(nameof(endpoint)); } if (pipelineStep == null) { throw new ArgumentNullException(nameof(pipelineStep)); } if (pipelineContext == null) { throw new ArgumentNullException(nameof(pipelineContext)); } var repositorySettings = Context.GetPlugin <RepositorySettings>(); if (repositorySettings == null) { logger.Error("No repository settings plugin is specified on the context (pipeline step: {0}, endpoint: {1})", pipelineStep.Name, endpoint.Name); return(false); } if (repositorySettings.Client == null) { logger.Error("No client is specified on the repository settings (pipeline step: {0}, endpoint: {1})", pipelineStep.Name, endpoint.Name); return(false); } var applicationEndpointSettings = endpoint.GetApplicationEndpointSettings(); var applicationSettings = (ApplicationSettings)applicationEndpointSettings?.Application?.RefreshPlugin.Invoke(); if (applicationSettings == null) { logger.Error("No application is specified on the endpoint (pipeline step: {0}, endpoint: {1})", pipelineStep.Name, endpoint.Name); return(false); } if (string.IsNullOrWhiteSpace(applicationSettings.BaseUrl)) { logger.Error("No Base Url is specified on the endpoint (pipeline step: {0}, endpoint: {1})", pipelineStep.Name, endpoint.Name); return(false); } if (string.IsNullOrWhiteSpace(applicationSettings.AccessToken)) { logger.Warn("No access token is specified on the endpoint (pipeline step: {0}, endpoint: {1})", pipelineStep.Name, endpoint.Name); //return false; } var resourceSettings = pipelineStep.GetResourceSettings(); if (resourceSettings == null) { logger.Error("No resource is specified on the pipeline step (pipeline step: {0}, endpoint: {1})", pipelineStep.Name, endpoint.Name); return(false); } if (string.IsNullOrWhiteSpace(resourceSettings.Url)) { logger.Error("No url is specified on the resource (pipeline step: {0}, endpoint: {1})", pipelineStep.Name, endpoint.Name); return(false); } if (string.IsNullOrWhiteSpace(resourceSettings.Method)) { logger.Error("No method is specified on the resource (pipeline step: {0}, endpoint: {1})", pipelineStep.Name, endpoint.Name); return(false); } var readDataSettings = pipelineStep.GetReadResourceDataSettings(); if (readDataSettings == null || string.IsNullOrWhiteSpace(readDataSettings.PathExpression)) { logger.Error("No path expression is specified on the pipeline step. (pipeline step: {0}, endpoint: {1})", pipelineStep.Name, endpoint.Name); return(false); } var iterableData = new JArray(); bool hasMore; do { hasMore = false; var response = await repositorySettings.Client.SendAsync(applicationSettings, resourceSettings); var json = await response.Content.ReadAsStringAsync(); var jObject = JsonConvert.DeserializeObject <JObject>(json); if (jObject == null) { logger.Debug("No data returned from request. (pipeline step: {0}, endpoint: {1})", pipelineStep.Name, endpoint.Name); } else { var jArray = (JArray)jObject.SelectToken(readDataSettings.PathExpression, false); if (jArray == null) { logger.Debug("No data returned from path expression. (pipeline step: {0}, endpoint: {1})", pipelineStep.Name, endpoint.Name); } else if (jArray.Count == 0) { logger.Info("No items returned from request. (pipeline step: {0}, endpoint: {1})", pipelineStep.Name, endpoint.Name); } else { logger.Info("{0} rows were read from endpoint. (pipeline step: {1}, endpoint: {2})", jArray.Count, pipelineStep.Name, endpoint.Name); iterableData.Merge(jArray); if (resourceSettings.Paging != null) { if (!string.IsNullOrEmpty(resourceSettings.Paging.NextTokenPathExpression)) { var nextToken = jObject.SelectToken(resourceSettings.Paging.NextTokenPathExpression, false); resourceSettings.Paging.NextToken = nextToken?.Value <string>(); hasMore = !string.IsNullOrEmpty(nextToken?.Value <string>()); } else { var pageToken = jObject.SelectToken(resourceSettings.Paging.CurrentPagePathExpression, false); var pageSizeToken = jObject.SelectToken(resourceSettings.Paging.PageSizePathExpression, false); var totalCountToken = jObject.SelectToken(resourceSettings.Paging.TotalCountPathExpression, false); var page = pageToken?.Value <int?>() ?? 0; var pageSize = pageSizeToken?.Value <int?>() ?? resourceSettings.Paging.PageSize; var totalCount = totalCountToken?.Value <int?>() ?? int.MinValue; var maxCount = resourceSettings.Paging.MaximumCount; resourceSettings.Paging.Page = page + 1; resourceSettings.Paging.PageSize = pageSize; resourceSettings.Paging.TotalCount = totalCount; hasMore = page * pageSize > 0 && page * pageSize < totalCount && page * pageSize < maxCount; } } } } } while (resourceSettings.Paging != null && hasMore); logger.Info("{0} total rows were read from endpoint. (pipeline step: {1}, endpoint: {2})", iterableData.Count, pipelineStep.Name, endpoint.Name); var dataSettings = new IterableDataSettings(iterableData); pipelineContext.AddPlugins(dataSettings); return(true); }
public ILifetimeScope CreateScope(Process process, IPipelineLogger logger) { var builder = new ContainerBuilder(); #if PLUGINS builder.Properties["Process"] = process; #endif builder.Register(ctx => process).As <Process>(); builder.RegisterInstance(logger).As <IPipelineLogger>().SingleInstance(); // register short-hand for t attribute var transformModule = new TransformModule(process, _methods, _shortHand, logger); foreach (var t in _transforms) { transformModule.AddTransform(t); } builder.RegisterModule(transformModule); // register short-hand for v attribute var validateModule = new ValidateModule(process, _methods, _shortHand, logger); foreach (var v in _validators) { validateModule.AddValidator(v); } builder.RegisterModule(validateModule); #if PLUGINS // just in case other modules need to see these builder.Properties["ShortHand"] = _shortHand; builder.Properties["Methods"] = _methods; #endif foreach (var module in _modules) { builder.RegisterModule(module); } // Process Context builder.Register <IContext>((ctx, p) => new PipelineContext(logger, process)).As <IContext>(); // Process Output Context builder.Register(ctx => { var context = ctx.Resolve <IContext>(); return(new OutputContext(context)); }).As <OutputContext>(); // Connection and Process Level Output Context foreach (var connection in process.Connections) { builder.Register(ctx => new ConnectionContext(ctx.Resolve <IContext>(), connection)).Named <IConnectionContext>(connection.Key); if (connection.Name != "output") { continue; } // register output for connection builder.Register(ctx => { var context = ctx.ResolveNamed <IConnectionContext>(connection.Key); return(new OutputContext(context)); }).Named <OutputContext>(connection.Key); } // Entity Context and RowFactory foreach (var entity in process.Entities) { builder.Register <IContext>((ctx, p) => new PipelineContext(ctx.Resolve <IPipelineLogger>(), process, entity)).Named <IContext>(entity.Key); builder.Register(ctx => { var context = ctx.ResolveNamed <IContext>(entity.Key); return(new InputContext(context)); }).Named <InputContext>(entity.Key); builder.Register <IRowFactory>((ctx, p) => new RowFactory(p.Named <int>("capacity"), entity.IsMaster, false)).Named <IRowFactory>(entity.Key); builder.Register(ctx => { var context = ctx.ResolveNamed <IContext>(entity.Key); return(new OutputContext(context)); }).Named <OutputContext>(entity.Key); var connection = process.Connections.First(c => c.Name == entity.Connection); builder.Register(ctx => new ConnectionContext(ctx.Resolve <IContext>(), connection)).Named <IConnectionContext>(entity.Key); } // internal entity input foreach (var entity in process.Entities.Where(e => process.Connections.First(c => c.Name == e.Connection).Provider == "internal")) { builder.RegisterType <NullInputProvider>().Named <IInputProvider>(entity.Key); // READER builder.Register <IRead>(ctx => { var input = ctx.ResolveNamed <InputContext>(entity.Key); var rowFactory = ctx.ResolveNamed <IRowFactory>(entity.Key, new NamedParameter("capacity", input.RowCapacity)); return(new InternalReader(input, rowFactory)); }).Named <IRead>(entity.Key); } // Internal Entity Output if (process.Output().Provider == "internal") { // PROCESS OUTPUT CONTROLLER builder.Register <IOutputController>(ctx => new NullOutputController()).As <IOutputController>(); foreach (var entity in process.Entities) { builder.Register <IOutputController>(ctx => new NullOutputController()).Named <IOutputController>(entity.Key); builder.Register <IOutputProvider>(ctx => new InternalOutputProvider(ctx.ResolveNamed <OutputContext>(entity.Key), ctx.ResolveNamed <IWrite>(entity.Key))).Named <IOutputProvider>(entity.Key); // WRITER builder.Register <IWrite>(ctx => new InternalWriter(ctx.ResolveNamed <OutputContext>(entity.Key))).Named <IWrite>(entity.Key); } } // entity pipelines foreach (var entity in process.Entities) { builder.Register(ctx => { var type = process.Pipeline == "defer" ? entity.Pipeline : process.Pipeline; var context = ctx.ResolveNamed <IContext>(entity.Key); IPipeline pipeline; context.Debug(() => $"Registering {type} for entity {entity.Alias}."); var outputController = ctx.IsRegisteredWithName <IOutputController>(entity.Key) ? ctx.ResolveNamed <IOutputController>(entity.Key) : new NullOutputController(); switch (type) { case "parallel.linq": pipeline = new ParallelPipeline(new DefaultPipeline(outputController, context)); break; default: pipeline = new DefaultPipeline(outputController, context); break; } // TODO: rely on IInputProvider's Read method instead (after every provider has one) pipeline.Register(ctx.IsRegisteredWithName(entity.Key, typeof(IRead)) ? ctx.ResolveNamed <IRead>(entity.Key) : null); pipeline.Register(ctx.IsRegisteredWithName(entity.Key, typeof(IInputProvider)) ? ctx.ResolveNamed <IInputProvider>(entity.Key) : null); // transforms if (!process.ReadOnly) { pipeline.Register(new SetSystemFields(new PipelineContext(ctx.Resolve <IPipelineLogger>(), process, entity))); } pipeline.Register(new IncrementTransform(context)); pipeline.Register(new DefaultTransform(context, context.GetAllEntityFields().Where(f => !f.System))); pipeline.Register(TransformFactory.GetTransforms(ctx, context, entity.GetAllFields().Where(f => f.Transforms.Any()))); pipeline.Register(ValidateFactory.GetValidators(ctx, context, entity.GetAllFields().Where(f => f.Validators.Any()))); if (!process.ReadOnly) { pipeline.Register(new StringTruncateTransfom(new PipelineContext(ctx.Resolve <IPipelineLogger>(), process, entity))); } pipeline.Register(new Transformalize.Transforms.System.LogTransform(context)); // writer, TODO: rely on IOutputProvider instead pipeline.Register(ctx.IsRegisteredWithName(entity.Key, typeof(IWrite)) ? ctx.ResolveNamed <IWrite>(entity.Key) : null); pipeline.Register(ctx.IsRegisteredWithName(entity.Key, typeof(IOutputProvider)) ? ctx.ResolveNamed <IOutputProvider>(entity.Key) : null); // updater pipeline.Register(process.ReadOnly || !ctx.IsRegisteredWithName(entity.Key, typeof(IUpdate)) ? new NullUpdater() : ctx.ResolveNamed <IUpdate>(entity.Key)); return(pipeline); }).Named <IPipeline>(entity.Key); } // process pipeline builder.Register(ctx => { var calc = process.ToCalculatedFieldsProcess(); var entity = calc.Entities.First(); var context = new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, entity); var outputContext = new OutputContext(context); IPipeline pipeline; context.Debug(() => $"Registering {process.Pipeline} pipeline."); var outputController = ctx.IsRegistered <IOutputController>() ? ctx.Resolve <IOutputController>() : new NullOutputController(); switch (process.Pipeline) { case "parallel.linq": pipeline = new ParallelPipeline(new DefaultPipeline(outputController, context)); break; default: pipeline = new DefaultPipeline(outputController, context); break; } // no updater necessary pipeline.Register(new NullUpdater(context, false)); if (!process.CalculatedFields.Any()) { pipeline.Register(new NullReader(context, false)); pipeline.Register(new NullWriter(context, false)); return(pipeline); } // register transforms pipeline.Register(new IncrementTransform(context)); pipeline.Register(new Transformalize.Transforms.System.LogTransform(context)); pipeline.Register(new DefaultTransform(new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, entity), entity.CalculatedFields)); pipeline.Register(TransformFactory.GetTransforms(ctx, context, entity.CalculatedFields)); pipeline.Register(ValidateFactory.GetValidators(ctx, context, entity.GetAllFields().Where(f => f.Validators.Any()))); pipeline.Register(new StringTruncateTransfom(new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, entity))); // register input and output pipeline.Register(ctx.IsRegistered <IRead>() ? ctx.Resolve <IRead>() : new NullReader(context)); pipeline.Register(ctx.IsRegistered <IWrite>() ? ctx.Resolve <IWrite>() : new NullWriter(context)); if (outputContext.Connection.Provider == "sqlserver") { pipeline.Register(new MinDateTransform(new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, entity), new DateTime(1753, 1, 1))); } return(pipeline); }).As <IPipeline>(); // process controller builder.Register <IProcessController>(ctx => { var pipelines = new List <IPipeline>(); // entity-level pipelines foreach (var entity in process.Entities) { var pipeline = ctx.ResolveNamed <IPipeline>(entity.Key); pipelines.Add(pipeline); if (entity.Delete && process.Mode != "init") { pipeline.Register(ctx.ResolveNamed <IEntityDeleteHandler>(entity.Key)); } } // process-level pipeline for process level calculated fields if (ctx.IsRegistered <IPipeline>()) { pipelines.Add(ctx.Resolve <IPipeline>()); } var context = ctx.Resolve <IContext>(); var controller = new ProcessController(pipelines, context); // output initialization if (process.Mode == "init" && ctx.IsRegistered <IInitializer>()) { controller.PreActions.Add(ctx.Resolve <IInitializer>()); } // flatten(ing) is first post-action var isAdo = Constants.AdoProviderSet().Contains(process.Output().Provider); if (process.Flatten && isAdo) { if (ctx.IsRegisteredWithName <IAction>(process.Output().Key)) { controller.PostActions.Add(ctx.ResolveNamed <IAction>(process.Output().Key)); } else { context.Error($"Could not find ADO Flatten Action for provider {process.Output().Provider}."); } } // actions foreach (var action in process.Actions.Where(a => a.GetModes().Any(m => m == process.Mode || m == "*"))) { if (action.Before) { controller.PreActions.Add(ctx.ResolveNamed <IAction>(action.Key)); } if (action.After) { controller.PostActions.Add(ctx.ResolveNamed <IAction>(action.Key)); } } return(controller); }).As <IProcessController>(); var build = builder.Build(); return(build.BeginLifetimeScope()); }
protected override void Load(ContainerBuilder builder) { if (_process == null) { return; } var calc = _process.ToCalculatedFieldsProcess(); var entity = calc.Entities.First(); // I need a process keyed pipeline builder.Register(ctx => { var context = new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, entity); var outputContext = new OutputContext(context); IPipeline pipeline; context.Debug(() => $"Registering {_process.Pipeline} pipeline."); var outputController = ctx.IsRegistered <IOutputController>() ? ctx.Resolve <IOutputController>() : new NullOutputController(); switch (_process.Pipeline) { case "parallel.linq": pipeline = new ParallelPipeline(new DefaultPipeline(outputController, context)); break; default: pipeline = new DefaultPipeline(outputController, context); break; } // no updater necessary pipeline.Register(new NullUpdater(context, false)); if (!_process.CalculatedFields.Any()) { pipeline.Register(new NullReader(context, false)); pipeline.Register(new NullWriter(context, false)); return(pipeline); } // register transforms pipeline.Register(new IncrementTransform(context)); pipeline.Register(new LogTransform(context)); pipeline.Register(new DefaultTransform(new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, entity), entity.CalculatedFields)); pipeline.Register(TransformFactory.GetTransforms(ctx, context, entity.CalculatedFields)); pipeline.Register(new StringTruncateTransfom(new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, entity))); // register input and output switch (outputContext.Connection.Provider) { case "sqlserver": pipeline.Register(ctx.Resolve <IRead>()); pipeline.Register(ctx.Resolve <IWrite>()); pipeline.Register(new MinDateTransform(new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, entity), new DateTime(1753, 1, 1))); break; case "mysql": case "postgresql": case "sqlce": case "access": case "sqlite": pipeline.Register(ctx.Resolve <IRead>()); pipeline.Register(ctx.Resolve <IWrite>()); break; default: pipeline.Register(new NullReader(context)); pipeline.Register(new NullWriter(context)); break; } return(pipeline); }).As <IPipeline>(); }
/// <summary> /// Asynchronously process the pipeline context. /// </summary> /// <typeparam name="T">The type of the data entity.</typeparam> /// <param name="context">Contextual information for the request.</param> /// <param name="log">The logger.</param> /// <param name="cancellationToken"> /// A cancellation token that can be used by other objects or threads to receive notice of cancellation. /// </param> /// <returns>The asynchronous task.</returns> protected abstract Task _ProcessAsync <T>( PipelineContext <T> context, ILogger log, CancellationToken cancellationToken);
public static CommandLineBuilder CreateParserBuilder(Command command, PipelineContext context) => new CommandLineBuilder(command) .UseMiddleware(inv => inv.BindingContext.AddService(typeof(PipelineContext), () => context));
protected virtual SynchronizationSettings ResolveSynchronizationSettingsAndSetElement(PipelineStep pipelineStep, PipelineContext pipelineContext, object element) { DataLocationSettings locationSettings = pipelineStep.GetDataLocationSettings(); SynchronizationSettings synchronizationSettings = new SynchronizationSettings(); if (locationSettings.DataLocation == "Pipeline Context Source") { synchronizationSettings.Source = element; } if (locationSettings.DataLocation == "Pipeline Context Target") { synchronizationSettings.Target = element; } return(synchronizationSettings); }
public void PipelineContext_GetOrSetTranslation_Is_Case_Insensitive_When_Comparing_Keys() { PipelineContext.GetOrSetTranslation <string, string>(_Environment, "key", "KEY", "1", () => "2"); }