public BondInfoRepository(Infrastructure.ConnectionType type) { windReader = new WindReader(); sqlWriter = new SqlServerWriter(type); sqlReader = new SqlServerReader(type); dateRepo = new TransactionDateTimeRepository(type); }
public void SqlWriterTest_TruncateTableStatement_NO() { // Arrange var jsonSettings = @" { ""importFiles"": [ { ""file"": ""..\\..\\..\\..\\TestCsv\\simpleComma.csv"", ""batchSize"": 4, ""truncate"": false } ] }"; var importTasks = (List <ImportFileOptions>)CsvToSql.Configuration.ImportTasks.ReadTasks(log, jsonSettings); var sqlWriter = new SqlServerWriter(log, ""); var headers = new List <string>() { "head1", "head2" }; // Act sqlWriter.Init(importTasks.First(), headers); var tableTruncSql = sqlWriter.GetTruncateTableStatement(); // Assert Assert.IsTrue(sqlWriter != null); Assert.IsTrue(tableTruncSql.Length < 3); Assert.IsFalse(tableTruncSql.Contains("TRUNCATE TABLE")); }
public StockOptionDailyRepository(QuantitativeAnalysis.DataAccess.Infrastructure.ConnectionType type, IDataSource dataSource) { sqlReader = new SqlServerReader(type); sqlWriter = new SqlServerWriter(type); dateRepo = new TransactionDateTimeRepository(type); this.dataSource = dataSource; }
public StockWithVolatility2(StockMinuteRepository stockMinutelyRepo, StockDailyRepository stockDailyRepo, TransactionDateTimeRepository dateRepo) { this.stockMinutelyRepo = stockMinutelyRepo; this.stockDailyRepo = stockDailyRepo; this.dateRepo = dateRepo; sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Local); }
public DualTrust2(StockMinuteRepository stockMinutelyRepo, StockDailyRepository stockDailyRepo) { this.stockMinutelyRepo = stockMinutelyRepo; this.stockDailyRepo = stockDailyRepo; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Local); }
public ETFArbitrary(StockTickRepository stockRepo, StockDailyRepository stockDailyRepo, string code) { this.stockRepo = stockRepo; this.stockDailyRepo = stockDailyRepo; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Server84); this.code = code; }
public ETFConsitituentDailyRepository(ConnectionType type, DefaultETFConstituentDailyDataSource ds) { dateTimeRepo = new TransactionDateTimeRepository(type); sqlWriter = new SqlServerWriter(type); sqlReader = new SqlServerReader(type); redisReader = new RedisReader(); redisWriter = new RedisWriter(); dataSource = ds; }
public BasisFrontNext(StockMinuteRepository stockMinutelyRepo, string code) { this.stockMinutelyRepo = stockMinutelyRepo; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Server84); this.code = code; indexCode = code == "IF" ? "000300.SH" : "000905.SH"; }
public StockMinuteRepository(ConnectionType type, IDataSource ds) { dateTimeRepo = new TransactionDateTimeRepository(type); sqlWriter = new SqlServerWriter(type); sqlReader = new SqlServerReader(type); redisReader = new RedisReader(); redisWriter = new RedisWriter(); dataSource = ds; }
public ivixMinutely(OptionInfoRepository infoRepo, StockMinuteRepository stockRepo, double rate = 0.04) { this.infoRepo = infoRepo; this.stockRepo = stockRepo; this.rate = rate; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Local); }
public pairtradingDaily(StockDailyRepository stockDailyRepo, string code1, string code2) { this.stockDailyRepo = stockDailyRepo; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); this.code1 = code1; this.code2 = code2; sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Local); }
public RBreakStrategy(StockMinuteRepository stockMinutelyRepo, StockDailyRepository stockDailyRepo, string code) { this.stockMinutelyRepo = stockMinutelyRepo; this.stockDailyRepo = stockDailyRepo; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); this.code = code; sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Local); }
public StockTickRepository(QuantitativeAnalysis.DataAccess.Infrastructure.ConnectionType type, IDataSource ds) { transDateRepo = new TransactionDateTimeRepository(type); sqlReader = new SqlServerReader(type); sqlWriter = new SqlServerWriter(type); dataSource = ds; redisWriter = new RedisWriter(); redisReader = new RedisReader(); }
public Intraday1(StockMinuteRepository stockMinutelyRepo, StockDailyRepository stockDailyRepo, StockTickRepository tickRepo, TransactionDateTimeRepository dateRepo) { this.stockMinutelyRepo = stockMinutelyRepo; this.stockDailyRepo = stockDailyRepo; this.dateRepo = dateRepo; this.tickRepo = tickRepo; sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Local); }
public CallDeltaHedge(StockTickRepository stockRepo, StockDailyRepository stockDailyRepo, string code, int duration) { this.stockRepo = stockRepo; this.stockDailyRepo = stockDailyRepo; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Server84); this.code = code; this.duration = duration; }
public Impv(OptionInfoRepository infoRepo, StockOptionTickRepository optionRepo, StockTickRepository stockRepo, double rate = 0.04) { this.infoRepo = infoRepo; this.optionRepo = optionRepo; this.stockRepo = stockRepo; this.rate = rate; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Server84); }
public InfluxdbRecord(StockMinuteRepository stockMinutelyRepo, StockDailyRepository stockDailyRepo, TransactionDateTimeRepository dateRepo, StockInfoRepository stockInfoRepo) { this.stockMinutelyRepo = stockMinutelyRepo; this.stockDailyRepo = stockDailyRepo; this.dateRepo = dateRepo; this.stockInfoRepo = stockInfoRepo; this.sqlReaderLocal = new SqlServerReader(ConnectionType.Local); this.sqlReaderSource = new SqlServerReader(ConnectionType.Local); this.sqlWriter = new SqlServerWriter(ConnectionType.Local); }
//构造函数 public priceCeilingMoving2(StockMinuteRepository stockMinutelyRepo, StockDailyRepository stockDailyRepo, StockTickRepository stockTickRepo, StockInfoRepository stockInfoRepo) { this.stockMinutelyRepo = stockMinutelyRepo; this.stockDailyRepo = stockDailyRepo; this.stockTickRepo = stockTickRepo; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Local); this.windReader = new WindReader(); this.stockInfoRepo = stockInfoRepo; }
public TDstrategy(StockMinuteRepository stockMinutelyRepo, StockDailyRepository stockDailyRepo, string code) { this.stockMinutelyRepo = stockMinutelyRepo; this.stockDailyRepo = stockDailyRepo; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); this.code = code; sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Local); CreateDBOrTableIfNecessary(databaseName, tableName); CreateDBOrTableIfNecessary2(databaseName, tableName2); }
public void SqlWriterTest_GetInsertStatmentsWithConkretImportFileName() { // Arrange var jsonSettings = @" { ""importFiles"": [ { ""file"": ""..\\..\\..\\..\\TestCsv\\simpleComma.csv"", ""batchSize"": 4, ""columnMapping"": [ { ""head1"": ""NewHeadOne"", ""head2"": ""NewHeadTwo"", ""**20200911132530_20200907_Clients_SE.csv"": ""DCSource"" } ] } ] }"; var importTasks = (List <ImportFileOptions>)CsvToSql.Configuration.ImportTasks.ReadTasks(log, jsonSettings); var sqlWriter = new SqlServerWriter(log, ""); var headers = new List <string>() { "head1", "head2", "head3" }; var linesToWrite = new List <List <string> >() { new List <string>() { "a0", "a1", "a2" }, new List <string>() { "b0", "b1", "b2" } }; sqlWriter.Init(importTasks.First(), headers); // Act string insertSql = sqlWriter.GetInsertStatements(linesToWrite); // Assert Assert.IsTrue(sqlWriter != null); Assert.AreEqual(sqlWriter.GetHeaderFields().Count, 4); // { "head1", "head2", "head3", "DCSource"} Assert.AreEqual(sqlWriter.GetHeaderFields()[3].Name, "DCSource"); Assert.IsTrue(insertSql.Contains("INSERT INTO")); Assert.IsTrue(insertSql.Contains("20200911132530_20200907_Clients_SE.csv")); }
public pairtradingDaily3(StockDailyRepository stockDailyRepo, string stockBoard, DateTime startTime, DateTime endTime) { this.stockDailyRepo = stockDailyRepo; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Local); this.stockBoard = stockBoard; this.startTime = startTime; this.endTime = endTime; this.tradedays = dateRepo.GetStockTransactionDate(startTime, endTime); }
private static void CreateDBAndTableIfNecessary(ConnectionType type) { var sqlWriter = new SqlServerWriter(type); var sqlLocation = ConfigurationManager.AppSettings["SqlServerLocation"]; if (!Directory.Exists(sqlLocation)) { Directory.CreateDirectory(sqlLocation); } CreateCommonDBAndTables(sqlLocation, sqlWriter); CreateDailyTransactionDBAndTables(sqlLocation, sqlWriter); }
public DualTrust(StockMinuteRepository stockMinutelyRepo, StockDailyRepository stockDailyRepo, string code, string underlyingCode) { this.stockMinutelyRepo = stockMinutelyRepo; this.stockDailyRepo = stockDailyRepo; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); this.code = code; this.underlyingCode = underlyingCode; sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Local); if (code == "IF.CFE") { multiplicator = 300; } }
public StockMinuteRepository(QuantitativeAnalysis.DataAccess.Infrastructure.ConnectionType type, IDataSource ds, bool redis = false) { dateTimeRepo = new TransactionDateTimeRepository(type); sqlWriter = new SqlServerWriter(type); sqlReader = new SqlServerReader(type); sqlReader170 = new SqlServerReader(Infrastructure.ConnectionType.Server170); dataSource = ds; this.redis = redis; if (redis == true) { redisReader = new RedisReader(); redisWriter = new RedisWriter(); } }
public void SqlWriterTest_GetInsertStatments() { // Arrange var jsonSettings = @" { ""importFiles"": [ { ""file"": ""..\\..\\..\\..\\TestCsv\\simpleComma.csv"", ""batchSize"": 4 } ] }"; var importTasks = (List <ImportFileOptions>)CsvToSql.Configuration.ImportTasks.ReadTasks(log, jsonSettings); var sqlWriter = new SqlServerWriter(log, ""); var headers = new List <string>() { "head1", "head2", "head3" }; var linesToWrite = new List <List <string> >() { new List <string>() { "a0", "a1", "a2" }, new List <string>() { "b0", "b1", "b2" } }; sqlWriter.Init(importTasks.First(), headers); // Act string insertSql = sqlWriter.GetInsertStatements(linesToWrite); // Assert Assert.IsTrue(sqlWriter != null); Assert.AreEqual(sqlWriter.GetHeaderFields().Count, 3); // { "head1", "head2", "head3"} Assert.IsTrue(insertSql.Contains("INSERT INTO")); }
public void SqlWriterTest_InitHeadersWithcolumnMappingImportDate() { // Arrange var jsonSettings = @" { ""importFiles"": [ { ""file"": ""..\\..\\..\\..\\TestCsv\\simpleComma.csv"", ""batchSize"": 4, ""saveMode"": true, ""columnMapping"": [ { ""head1"": ""NewHeadOne"", ""head2"": ""NewHeadTwo"", ""##ImportDate"" : ""DCImportDate"" } ] } ] }"; var importTasks = (List <ImportFileOptions>)CsvToSql.Configuration.ImportTasks.ReadTasks(log, jsonSettings); var sqlWriter = new SqlServerWriter(log, ""); var headers = new List <string>() { "head1", "head2", "head3" }; // Act sqlWriter.Init(importTasks.First(), headers); var headerFields = sqlWriter.GetHeaderFields(); // Assert Assert.AreEqual(headerFields.Count, 4); // { "NewHeadOne", "NewHeadTwo", "head3", "DCImportDate"} Assert.AreEqual(headerFields[0].Name, "NewHeadOne"); Assert.AreEqual(headerFields[1].Name, "NewHeadTwo"); Assert.AreEqual(headerFields[2].Name, "head3"); Assert.AreEqual(headerFields[3].Name, "DCImportDate"); Assert.IsTrue(sqlWriter != null); }
private static void CreateDailyTransactionDBAndTables(string sqlLocation, SqlServerWriter sqlWriter) { var sqlScript = string.Format(@"USE [master] if db_id('DailyTransaction') is null begin CREATE DATABASE [DailyTransaction] CONTAINMENT = NONE ON PRIMARY ( NAME = N'DailyTransaction', FILENAME = N'{0}\DailyTransaction.mdf' , SIZE = 5120KB , MAXSIZE = UNLIMITED, FILEGROWTH = 10%) LOG ON ( NAME = N'DailyTransaction_log', FILENAME = N'{0}\DailyTransaction_log.ldf' , SIZE = 2048KB , MAXSIZE = 2048GB , FILEGROWTH = 10%) ALTER DATABASE [DailyTransaction] SET COMPATIBILITY_LEVEL = 120 IF (1 = FULLTEXTSERVICEPROPERTY('IsFullTextInstalled')) begin EXEC [DailyTransaction].[dbo].[sp_fulltext_database] @action = 'enable' end end go if object_id('DailyTransaction.dbo.Stock') is null begin CREATE TABLE [DailyTransaction].[dbo].[Stock]( [Code] [varchar](20) NOT NULL, [DateTime] [date] NOT NULL, [OPEN] [decimal](12, 4) NULL, [HIGH] [decimal](12, 4) NULL, [LOW] [decimal](12, 4) NULL, [CLOSE] [decimal](12, 4) NULL, [VOLUME] [decimal](20, 0) NULL, [AMT] [decimal](20, 3) NULL, [ADJFACTOR] [decimal](20, 6) NULL, [TRADE_STATUS] [nvarchar](50) NULL, [UpdatedDateTime] [datetime] NULL CONSTRAINT [DF_Stock_UpdatedDateTime] DEFAULT (getdate()), CONSTRAINT [PK_Stock_1] PRIMARY KEY CLUSTERED ( [Code] ASC, [DateTime] ASC )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY] ) ON [PRIMARY] end", sqlLocation); sqlWriter.ExecuteSqlScript(sqlScript); }
public trendT0(StockMinuteRepository stockMinutelyRepo, StockDailyRepository stockDailyRepo, string stockBoard, DateTime startDate, DateTime endDate) { this.stockDailyRepo = stockDailyRepo; this.stockMinutelyRepo = stockMinutelyRepo; dateRepo = new TransactionDateTimeRepository(ConnectionType.Default); sqlWriter = new SqlServerWriter(ConnectionType.Server84); sqlReader = new SqlServerReader(ConnectionType.Local); this.stockBoard = stockBoard; this.startDate = startDate; this.endDate = endDate; this.tradedays = dateRepo.GetStockTransactionDate(startDate, endDate); var list = searchAllStocks(stockBoard, startDate, endDate); list = new List <stockInfo>(); stockInfo stock = new stockInfo(); stock.code = "IF.CFE"; stock.startDate = startDate; stock.endDate = endDate; list.Add(stock); getAllStocks(list); computeOnAllStocks(list); }
public void SqlWriterTest_CreateTableStatement() { // Arrange var jsonSettings = @" { ""importFiles"": [ { ""file"": ""..\\..\\..\\..\\TestCsv\\simpleComma.csv"", ""batchSize"": 4, ""forceCreateTable"": true } ] }"; var importTasks = (List <ImportFileOptions>)CsvToSql.Configuration.ImportTasks.ReadTasks(log, jsonSettings); var sqlWriter = new SqlServerWriter(log, ""); var headers = new List <string>() { "head1", "head2" }; // Act sqlWriter.Init(importTasks.First(), headers); var tableDropSql = sqlWriter.GetDropTableStatement(); var tableCreateSql = sqlWriter.GetCreateTableStatement(); // Assert Assert.IsTrue(sqlWriter != null); Assert.IsTrue(tableCreateSql.Contains("CREATE TABLE")); Assert.IsTrue(tableDropSql.Contains("DROP TABLE")); Assert.IsTrue(tableCreateSql.Contains("head1")); Assert.IsTrue(tableCreateSql.Contains("head2")); }
protected override void Load(ContainerBuilder builder) { if (_process == null) { return; } // connections foreach (var connection in _process.Connections.Where(c => _ado.Contains(c.Provider))) { // Connection Factory builder.Register <IConnectionFactory>(ctx => { switch (connection.Provider) { case "sqlserver": return(new SqlServerConnectionFactory(connection)); case "mysql": return(new MySqlConnectionFactory(connection)); case "postgresql": return(new PostgreSqlConnectionFactory(connection)); case "sqlite": return(new SqLiteConnectionFactory(connection)); case "sqlce": return(new SqlCeConnectionFactory(connection)); case "access": return(new AccessConnectionFactory(connection)); default: return(new NullConnectionFactory()); } }).Named <IConnectionFactory>(connection.Key).InstancePerLifetimeScope(); // Schema Reader builder.Register <ISchemaReader>(ctx => { var factory = ctx.ResolveNamed <IConnectionFactory>(connection.Key); return(new AdoSchemaReader(ctx.ResolveNamed <IConnectionContext>(connection.Key), factory)); }).Named <ISchemaReader>(connection.Key); } //ISchemaReader //IOutputController //IRead (Process for Calculated Columns) //IWrite (Process for Calculated Columns) //IInitializer (Process) // Per Entity // IInputVersionDetector // IRead (Input, per Entity) // IOutputController // -- IBatchReader (for matching) // -- IWriteMasterUpdateQuery (for updating) // IUpdate // IWrite // IEntityDeleteHandler // entitiy input foreach (var entity in _process.Entities.Where(e => _ado.Contains(_process.Connections.First(c => c.Name == e.Connection).Provider))) { // INPUT READER builder.Register <IRead>(ctx => { var input = ctx.ResolveNamed <InputContext>(entity.Key); var rowFactory = ctx.ResolveNamed <IRowFactory>(entity.Key, new NamedParameter("capacity", input.RowCapacity)); switch (input.Connection.Provider) { case "mysql": case "postgresql": case "sqlite": case "access": case "sqlce": case "sqlserver": return(new AdoInputReader( input, input.InputFields, ctx.ResolveNamed <IConnectionFactory>(input.Connection.Key), rowFactory )); default: return(new NullReader(input, false)); } }).Named <IRead>(entity.Key); // INPUT VERSION DETECTOR builder.Register <IInputProvider>(ctx => { var input = ctx.ResolveNamed <InputContext>(entity.Key); switch (input.Connection.Provider) { case "mysql": case "postgresql": case "access": case "sqlite": case "sqlce": case "sqlserver": return(new AdoInputProvider(input, ctx.ResolveNamed <IConnectionFactory>(input.Connection.Key))); default: return(new NullInputProvider()); } }).Named <IInputProvider>(entity.Key); } // entity output if (_ado.Contains(_process.Output().Provider)) { var calc = _process.ToCalculatedFieldsProcess(); // PROCESS OUTPUT CONTROLLER builder.Register <IOutputController>(ctx => { var output = ctx.Resolve <OutputContext>(); if (_process.Mode != "init") { return(new NullOutputController()); } switch (output.Connection.Provider) { case "mysql": case "postgresql": case "sqlite": case "access": case "sqlce": case "sqlserver": var actions = new List <IAction> { new AdoStarViewCreator(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key)) }; if (_process.Flatten) { actions.Add(new AdoFlatTableCreator(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key))); } return(new AdoStarController(output, actions)); default: return(new NullOutputController()); } }).As <IOutputController>(); // PROCESS CALCULATED READER builder.Register <IRead>(ctx => { var calcContext = new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, calc.Entities.First()); var outputContext = new OutputContext(calcContext, new Incrementer(calcContext)); var cf = ctx.ResolveNamed <IConnectionFactory>(outputContext.Connection.Key); var capacity = outputContext.Entity.Fields.Count + outputContext.Entity.CalculatedFields.Count; var rowFactory = new RowFactory(capacity, false, false); return(new AdoStarParametersReader(outputContext, _process, cf, rowFactory)); }).As <IRead>(); // PROCESS CALCULATED FIELD WRITER builder.Register <IWrite>(ctx => { var calcContext = new PipelineContext(ctx.Resolve <IPipelineLogger>(), calc, calc.Entities.First()); var outputContext = new OutputContext(calcContext, new Incrementer(calcContext)); var cf = ctx.ResolveNamed <IConnectionFactory>(outputContext.Connection.Key); return(new AdoCalculatedFieldUpdater(outputContext, _process, cf)); }).As <IWrite>(); // PROCESS INITIALIZER builder.Register <IInitializer>(ctx => { var output = ctx.Resolve <OutputContext>(); var adoInit = new AdoInitializer(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key)); switch (output.Connection.Provider) { case "access": return(new AccessInitializer(adoInit, output)); default: return(adoInit); } }).As <IInitializer>(); // ENTITIES foreach (var entity in _process.Entities) { builder.Register <IOutputProvider>(ctx => { IWrite writer; var output = ctx.ResolveNamed <OutputContext>(entity.Key); var cf = ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key); var rowFactory = ctx.ResolveNamed <IRowFactory>(entity.Key, new NamedParameter("capacity", output.GetAllEntityFields().Count())); // matcher determines what's an update vs. and insert var matcher = entity.Update ? (IBatchReader) new AdoEntityMatchingKeysReader(output, cf, rowFactory) : new NullBatchReader(); switch (output.Connection.Provider) { case "sqlserver": writer = new SqlServerWriter( output, cf, matcher, new AdoEntityUpdater(output, cf) ); break; case "sqlce": writer = new SqlCeWriter( output, cf, matcher, new AdoEntityUpdater(output, cf) ); break; case "mysql": case "postgresql": case "access": case "sqlite": writer = new AdoEntityWriter( output, matcher, new AdoEntityInserter(output, cf), entity.Update ? (IWrite) new AdoEntityUpdater(output, cf) : new NullWriter(output) ); break; default: writer = new NullWriter(output); break; } return(new AdoOutputProvider(output, cf, writer)); }).Named <IOutputProvider>(entity.Key); // ENTITY OUTPUT CONTROLLER builder.Register <IOutputController>(ctx => { var output = ctx.ResolveNamed <OutputContext>(entity.Key); var initializer = _process.Mode == "init" ? (IAction) new AdoEntityInitializer(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key)) : new NullInitializer(); switch (output.Connection.Provider) { case "access": return(new AdoOutputController( output, new AccessInitializer(initializer, output), ctx.ResolveNamed <IInputProvider>(entity.Key), ctx.ResolveNamed <IOutputProvider>(entity.Key), ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key) )); case "mysql": case "postgresql": case "sqlite": case "sqlce": case "sqlserver": return(new AdoOutputController( output, initializer, ctx.ResolveNamed <IInputProvider>(entity.Key), ctx.ResolveNamed <IOutputProvider>(entity.Key), ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key) )); default: return(new NullOutputController()); } }).Named <IOutputController>(entity.Key); // MASTER UPDATE QUERY builder.Register <IWriteMasterUpdateQuery>(ctx => { var output = ctx.ResolveNamed <OutputContext>(entity.Key); var factory = ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key); switch (output.Connection.Provider) { case "mysql": return(new MySqlUpdateMasterKeysQueryWriter(output, factory)); case "postgresql": return(new PostgreSqlUpdateMasterKeysQueryWriter(output, factory)); case "access": return(new AccessUpdateMasterKeysQueryWriter(output, factory)); default: return(new SqlServerUpdateMasterKeysQueryWriter(output, factory)); } }).Named <IWriteMasterUpdateQuery>(entity.Key + "MasterKeys"); // MASTER UPDATER builder.Register <IUpdate>(ctx => { var output = ctx.ResolveNamed <OutputContext>(entity.Key); switch (output.Connection.Provider) { case "mysql": case "postgresql": case "access": case "sqlserver": return(new AdoMasterUpdater( output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key), ctx.ResolveNamed <IWriteMasterUpdateQuery>(entity.Key + "MasterKeys") )); case "sqlite": case "sqlce": return(new AdoTwoPartMasterUpdater(output, ctx.ResolveNamed <IConnectionFactory>(output.Connection.Key))); default: return(new NullMasterUpdater()); } }).Named <IUpdate>(entity.Key); // DELETE HANDLER if (entity.Delete) { // register input keys and hashcode reader if necessary builder.Register(ctx => { var inputContext = ctx.ResolveNamed <InputContext>(entity.Key); var rowCapacity = inputContext.Entity.GetPrimaryKey().Count(); var rowFactory = new RowFactory(rowCapacity, false, true); switch (inputContext.Connection.Provider) { case "mysql": case "postgresql": case "sqlce": case "access": case "sqlite": case "sqlserver": return(new AdoReader( inputContext, entity.GetPrimaryKey(), ctx.ResolveNamed <IConnectionFactory>(inputContext.Connection.Key), rowFactory, ReadFrom.Input )); default: return(ctx.IsRegisteredWithName <IReadInputKeysAndHashCodes>(entity.Key) ? ctx.ResolveNamed <IReadInputKeysAndHashCodes>(entity.Key) : new NullReader(inputContext)); } }).Named <IReadInputKeysAndHashCodes>(entity.Key); // register output keys and hash code reader if necessary builder.Register((ctx => { var context = ctx.ResolveNamed <IContext>(entity.Key); var rowCapacity = context.Entity.GetPrimaryKey().Count(); var rowFactory = new RowFactory(rowCapacity, false, true); var outputConnection = _process.Output(); switch (outputConnection.Provider) { case "mysql": case "postgresql": case "access": case "sqlce": case "sqlite": case "sqlserver": var ocf = ctx.ResolveNamed <IConnectionFactory>(outputConnection.Key); return(new AdoReader(context, entity.GetPrimaryKey(), ocf, rowFactory, ReadFrom.Output)); default: return(ctx.IsRegisteredWithName <IReadOutputKeysAndHashCodes>(entity.Key) ? ctx.ResolveNamed <IReadOutputKeysAndHashCodes>(entity.Key) : new NullReader(context)); } })).Named <IReadOutputKeysAndHashCodes>(entity.Key); builder.Register((ctx) => { var outputConnection = _process.Output(); var outputContext = ctx.ResolveNamed <OutputContext>(entity.Key); switch (outputConnection.Provider) { case "mysql": case "postgresql": case "sqlce": case "access": case "sqlite": case "sqlserver": var ocf = ctx.ResolveNamed <IConnectionFactory>(outputConnection.Key); return(new AdoDeleter(outputContext, ocf)); default: return(ctx.IsRegisteredWithName <IDelete>(entity.Key) ? ctx.ResolveNamed <IDelete>(entity.Key) : new NullDeleter(outputContext)); } }).Named <IDelete>(entity.Key); builder.Register <IEntityDeleteHandler>(ctx => { var context = ctx.ResolveNamed <IContext>(entity.Key); var primaryKey = entity.GetPrimaryKey(); var handler = new DefaultDeleteHandler( context, ctx.ResolveNamed <IReadInputKeysAndHashCodes>(entity.Key), ctx.ResolveNamed <IReadOutputKeysAndHashCodes>(entity.Key), ctx.ResolveNamed <IDelete>(entity.Key) ); // since the primary keys from the input may have been transformed into the output, you have to transform before comparing // feels a lot like entity pipeline on just the primary keys... may look at consolidating handler.Register(new DefaultTransform(context, entity.GetPrimaryKey().ToArray())); handler.Register(TransformFactory.GetTransforms(ctx, context.Process, context.Entity, primaryKey)); handler.Register(new StringTruncateTransfom(context, primaryKey)); return(new ParallelDeleteHandler(handler)); }).Named <IEntityDeleteHandler>(entity.Key); } } } }