public void TwoTransactionsAndParallelWriting(IConnectionManager connection) { if (connection.ConnectionManagerType == ConnectionManagerType.SQLite) { return; } //Arrange var concopy = connection.Clone(); TwoColumnsTableFixture s2c = new TwoColumnsTableFixture(connection, "TransactionSourceParallelWrite"); s2c.InsertTestData(); TwoColumnsTableFixture d2c1 = new TwoColumnsTableFixture(connection, "TransactionDest1"); TwoColumnsTableFixture d2c2 = new TwoColumnsTableFixture(connection, "TransactionDest2"); DbSource <MySimpleRow> source = new DbSource <MySimpleRow>(connection, "TransactionSourceParallelWrite"); DbDestination <MySimpleRow> dest1 = new DbDestination <MySimpleRow>(connection, "TransactionDest1", batchSize: 2); DbDestination <MySimpleRow> dest2 = new DbDestination <MySimpleRow>(concopy, "TransactionDest2", batchSize: 2); Multicast <MySimpleRow> multicast = new Multicast <MySimpleRow>(); //Act & Assert connection.BeginTransaction(System.Data.IsolationLevel.ReadCommitted); concopy.BeginTransaction(System.Data.IsolationLevel.ReadCommitted); source.LinkTo(multicast); multicast.LinkTo(dest1); multicast.LinkTo(dest2); source.Execute(); dest1.Wait(); dest2.Wait(); connection.CommitTransaction(); concopy.CommitTransaction(); Assert.Equal(3, RowCountTask.Count(connection, "TransactionDest1")); Assert.Equal(3, RowCountTask.Count(connection, "TransactionDest2")); }
public void DB_Multicast_DB_WithStringArray() { TableDefinition sourceTableDefinition = CreateTableForMyDataRow("test.Source"); TableDefinition dest1TableDefinition = CreateTableForMyDataRow("test.Destination1"); TableDefinition dest2TableDefinition = CreateTableForMyDataRow("test.Destination2"); InsertDemoDataForMyRowTable("test.Source"); DBSource source = new DBSource(); source.SourceTableDefinition = sourceTableDefinition; Multicast multicast = new Multicast(); DBDestination dest1 = new DBDestination(); dest1.DestinationTableDefinition = dest1TableDefinition; DBDestination dest2 = new DBDestination(); dest2.DestinationTableDefinition = dest2TableDefinition; source.LinkTo(multicast); multicast.LinkTo(dest1); multicast.LinkTo(dest2); source.Execute(); dest1.Wait(); dest2.Wait(); Assert.AreEqual(3, RowCountTask.Count("test.Source", "Col2 in (1,2,3)")); Assert.AreEqual(3, RowCountTask.Count("test.Destination1", "Col2 in (1,2,3)")); Assert.AreEqual(3, RowCountTask.Count("test.Destination2", "Col2 in (1,2,3)")); }
public void DB_MulticastWPredicates_DB() { TableDefinition sourceTableDefinition = CreateTableForMyDataRow("test.Source"); TableDefinition dest1TableDefinition = CreateTableForMyDataRow("test.Destination1"); TableDefinition dest2TableDefinition = CreateTableForMyDataRow("test.Destination2"); InsertDemoDataForMyRowTable("test.Source"); DBSource <MyDataRow> source = new DBSource <MyDataRow>(); source.SourceTableDefinition = sourceTableDefinition; Multicast <MyDataRow> multicast = new Multicast <MyDataRow>(); DBDestination <MyDataRow> dest1 = new DBDestination <MyDataRow>(); dest1.DestinationTableDefinition = dest1TableDefinition; DBDestination <MyDataRow> dest2 = new DBDestination <MyDataRow>(); dest2.DestinationTableDefinition = dest2TableDefinition; source.LinkTo(multicast); multicast.LinkTo(dest1, row => row.Value2 <= 2); multicast.LinkTo(dest2, row => row.Value2 > 2); source.Execute(); dest1.Wait(); dest2.Wait(); Assert.AreEqual(3, RowCountTask.Count("test.Source", "Col2 in (1,2,3)")); Assert.AreEqual(2, RowCountTask.Count("test.Destination1", "Col2 in (1,2)")); Assert.AreEqual(1, RowCountTask.Count("test.Destination2")); }
public void DuplicateDataInto3Destinations() { //Arrange TwoColumnsTableFixture sourceTable = new TwoColumnsTableFixture("Source"); sourceTable.InsertTestData(); TwoColumnsTableFixture dest1Table = new TwoColumnsTableFixture("Destination1"); TwoColumnsTableFixture dest2Table = new TwoColumnsTableFixture("Destination2"); TwoColumnsTableFixture dest3Table = new TwoColumnsTableFixture("Destination3"); DbSource <MySimpleRow> source = new DbSource <MySimpleRow>(Connection, "Source"); DbDestination <MySimpleRow> dest1 = new DbDestination <MySimpleRow>(Connection, "Destination1"); DbDestination <MySimpleRow> dest2 = new DbDestination <MySimpleRow>(Connection, "Destination2"); DbDestination <MySimpleRow> dest3 = new DbDestination <MySimpleRow>(Connection, "Destination3"); //Act Multicast <MySimpleRow> multicast = new Multicast <MySimpleRow>(); source.LinkTo(multicast); multicast.LinkTo(dest1); multicast.LinkTo(dest2); multicast.LinkTo(dest3); source.Execute(); dest1.Wait(); dest2.Wait(); dest3.Wait(); //Assert dest1Table.AssertTestData(); dest2Table.AssertTestData(); dest3Table.AssertTestData(); }
public void Start() { CSVSource sourceOrderData = new CSVSource("src/DataFlow/DemoData.csv"); sourceOrderData.Configuration.Delimiter = ";"; RowTransformation <string[], Order> transIntoObject = new RowTransformation <string[], Order>(CSVIntoObject); DBSource <Customer> sourceCustomerData = new DBSource <Customer>(CustomerTableDef); LookupCustomerKey lookupCustKeyClass = new LookupCustomerKey(); Lookup <Order, Order, Customer> lookupCustomerKey = new Lookup <Order, Order, Customer>( lookupCustKeyClass.FindKey, sourceCustomerData, lookupCustKeyClass.LookupData); Multicast <Order> multiCast = new Multicast <Order>(); DBDestination <Order> destOrderTable = new DBDestination <Order>(OrderDataTableDef); BlockTransformation <Order> blockOrders = new BlockTransformation <Order>(BlockTransformOrders); DBDestination <Rating> destRating = new DBDestination <Rating>(CustomerRatingTableDef); RowTransformation <Order, Rating> transOrderIntoCust = new RowTransformation <Order, Rating>(OrderIntoRating); CustomDestination <Order> destSink = new CustomDestination <Order>(row => {; }); sourceOrderData.LinkTo(transIntoObject); transIntoObject.LinkTo(lookupCustomerKey); lookupCustomerKey.LinkTo(multiCast); multiCast.LinkTo(destOrderTable); multiCast.LinkTo(blockOrders); blockOrders.LinkTo(transOrderIntoCust, ord => ord.Rating != null); blockOrders.LinkTo(destSink, ord => ord.Rating == null); transOrderIntoCust.LinkTo(destRating); sourceOrderData.ExecuteAsync(); destOrderTable.Wait(); destRating.Wait(); }
public void SplitInto2Tables() { //Arrange TwoColumnsTableFixture sourceTable = new TwoColumnsTableFixture("Source"); sourceTable.InsertTestData(); TwoColumnsTableFixture dest1Table = new TwoColumnsTableFixture("Destination1"); TwoColumnsTableFixture dest2Table = new TwoColumnsTableFixture("Destination2"); DBSource source = new DBSource(Connection, "Source"); DBDestination dest1 = new DBDestination(Connection, "Destination1"); DBDestination dest2 = new DBDestination(Connection, "Destination2"); //Act Multicast multicast = new Multicast(); source.LinkTo(multicast); multicast.LinkTo(dest1); multicast.LinkTo(dest2); source.Execute(); dest1.Wait(); dest2.Wait(); //Assert dest1Table.AssertTestData(); dest2Table.AssertTestData(); }
public void SplitInto2Tables() { //Arrange TwoColumnsTableFixture sourceTable = new TwoColumnsTableFixture("Source"); sourceTable.InsertTestData(); TwoColumnsTableFixture dest1Table = new TwoColumnsTableFixture("Destination1"); TwoColumnsTableFixture dest2Table = new TwoColumnsTableFixture("Destination2"); DbSource <string[]> source = new DbSource <string[]>("Source", Connection); DbDestination <string[]> dest1 = new DbDestination <string[]>("Destination1", Connection); DbDestination <string[]> dest2 = new DbDestination <string[]>("Destination2", Connection); //Act Multicast <string[]> multicast = new Multicast <string[]>(); source.LinkTo(multicast); multicast.LinkTo(dest1); multicast.LinkTo(dest2); source.Execute(); dest1.Wait(); dest2.Wait(); //Assert dest1Table.AssertTestData(); dest2Table.AssertTestData(); }
public void OneTransactionAndParallelWritingWithMARS() { //Arrange TwoColumnsTableFixture s2c = new TwoColumnsTableFixture(SqlConnection, "TransactionSourceParallelWrite"); s2c.InsertTestData(); TwoColumnsTableFixture d2c1 = new TwoColumnsTableFixture(SqlConnection, "TransactionDest1"); TwoColumnsTableFixture d2c2 = new TwoColumnsTableFixture(SqlConnection, "TransactionDest2"); DbSource <MySimpleRow> source = new DbSource <MySimpleRow>(SqlConnection, "TransactionSourceParallelWrite"); string constring = $"{Config.SqlConnection.RawConnectionString("DataFlow")};MultipleActiveResultSets=True;"; var marscon = new SqlConnectionManager(constring); DbDestination <MySimpleRow> dest1 = new DbDestination <MySimpleRow>(marscon, "TransactionDest1", batchSize: 2); DbDestination <MySimpleRow> dest2 = new DbDestination <MySimpleRow>(marscon, "TransactionDest2", batchSize: 2); Multicast <MySimpleRow> multicast = new Multicast <MySimpleRow>(); //Act & Assert marscon.BeginTransaction(System.Data.IsolationLevel.ReadCommitted); source.LinkTo(multicast); multicast.LinkTo(dest1); multicast.LinkTo(dest2); source.Execute(); dest1.Wait(); dest2.Wait(); marscon.CommitTransaction(); d2c1.AssertTestData(); d2c1.AssertTestData(); }
public void PredicateFilteringWithInteger() { //Arrange TwoColumnsTableFixture sourceTable = new TwoColumnsTableFixture("Source"); sourceTable.InsertTestData(); TwoColumnsTableFixture dest1Table = new TwoColumnsTableFixture("Destination1"); TwoColumnsTableFixture dest2Table = new TwoColumnsTableFixture("Destination2"); DBSource <MySimpleRow> source = new DBSource <MySimpleRow>(Connection, "Source"); DBDestination <MySimpleRow> dest1 = new DBDestination <MySimpleRow>(Connection, "Destination1"); DBDestination <MySimpleRow> dest2 = new DBDestination <MySimpleRow>(Connection, "Destination2"); //Act Multicast <MySimpleRow> multicast = new Multicast <MySimpleRow>(); source.LinkTo(multicast); multicast.LinkTo(dest1, row => row.Col1 <= 2); multicast.LinkTo(dest2, row => row.Col1 > 2); source.Execute(); dest1.Wait(); dest2.Wait(); //Assert Assert.Equal(1, RowCountTask.Count(Connection, "Destination1", "Col1 = 1 AND Col2='Test1'")); Assert.Equal(1, RowCountTask.Count(Connection, "Destination1", "Col1 = 2 AND Col2='Test2'")); Assert.Equal(1, RowCountTask.Count(Connection, "Destination2", "Col1 = 3 AND Col2='Test3'")); }
public static void Run() { Console.WriteLine("Running data flow"); //Read data from csv file CsvSource sourceOrderData = new CsvSource("DemoData.csv"); sourceOrderData.Configuration.Delimiter = ";"; //Transform into Order object RowTransformation <ExpandoObject, Order> transIntoObject = new RowTransformation <ExpandoObject, Order>( csvLine => { dynamic order = csvLine as dynamic; return(new Order() { //Header in Csv: OrderNumber;OrderItem;OrderAmount;CustomerName Number = order.OrderNumber, Item = order.OrderItem, Amount = decimal.Parse(order.OrderAmount.ToString().Replace("€", ""), CultureInfo.GetCultureInfo("en-US")), CustomerName = order.CustomerName }); }); sourceOrderData.LinkTo(transIntoObject); //Find corresponding customer id if customer exists in Customer table DbSource <Customer> sourceCustomerData = new DbSource <Customer>("customer"); LookupTransformation <Order, Customer> lookupCustomerKey = new LookupTransformation <Order, Customer>(sourceCustomerData); transIntoObject.LinkTo(lookupCustomerKey); //Split data Multicast <Order> multiCast = new Multicast <Order>(); lookupCustomerKey.LinkTo(multiCast); //Store Order in Orders table DbDestination <Order> destOrderTable = new DbDestination <Order>("orders"); multiCast.LinkTo(destOrderTable); //Create rating for existing customers based total of order amount Aggregation <Order, Rating> aggregation = new Aggregation <Order, Rating>(); multiCast.LinkTo(aggregation); //Store the rating in the customer rating table DbDestination <Rating> destRating = new DbDestination <Rating>("customer_rating"); aggregation.LinkTo(destRating); //Execute the data flow synchronously sourceOrderData.Execute(); destOrderTable.Wait(); destRating.Wait(); }
/* Data flow * * JsonSource --> RowTransformation --> Lookup --> Multicast --> DbDestination ("orders" table) * (Order data) | | * CsvSource <---- --------> TextDestination ("order_data.log") * ("customer.csv") */ static void Main(string[] args) { //Preparation RecreateTargetTable(); //Step 1 - creating the components var source = new JsonSource <OrderRow>("https://www.etlbox.net/demo/api/orders", ResourceType.Http); var rowTransformation = new RowTransformation <OrderRow>(); rowTransformation.TransformationFunc = row => { row.Quantity = int.Parse(row.Description.Split(":").ElementAt(1)); return(row); }; var lookup = new LookupTransformation <OrderRow, ExpandoObject>(); lookup.Source = new CsvSource("files/customer.csv"); lookup.MatchColumns = new[] { new MatchColumn() { LookupSourcePropertyName = "Id", InputPropertyName = "CustomerId" } }; lookup.RetrieveColumns = new[] { new RetrieveColumn() { LookupSourcePropertyName = "Name", InputPropertyName = "CustomerName" } }; var multicast = new Multicast <OrderRow>(); var dbDest = new DbDestination <OrderRow>(sqlConnMan, "orders"); var textDest = new TextDestination <OrderRow>("files/order_data.log"); textDest.WriteLineFunc = row => { return($"{row.OrderNumber}\t{row.CustomerName}\t{row.Quantity}"); }; //Step2 - linking components source.LinkTo(rowTransformation); rowTransformation.LinkTo(lookup); lookup.LinkTo(multicast); multicast.LinkTo(dbDest); multicast.LinkTo(textDest, row => row.CustomerName == "Clark Kent", row => row.CustomerName != "Clark Kent"); //Step3 - executing the network Network.Execute(source); //Shortcut for Network.ExecuteAsync(source).Wait(); }
public void OneTransactionAndParallelWriting(IConnectionManager connection) { if (connection.ConnectionManagerType == ConnectionManagerType.SQLite) { return; } if (connection.ConnectionManagerType == ConnectionManagerType.Oracle) { return; } //Arrange TwoColumnsTableFixture s2c = new TwoColumnsTableFixture(connection, "TransactionSourceParallelWrite"); s2c.InsertTestData(); TwoColumnsTableFixture d2c1 = new TwoColumnsTableFixture(connection, "TransactionDest1"); TwoColumnsTableFixture d2c2 = new TwoColumnsTableFixture(connection, "TransactionDest2"); DbSource <MySimpleRow> source = new DbSource <MySimpleRow>(connection, "TransactionSourceParallelWrite"); DbDestination <MySimpleRow> dest1 = new DbDestination <MySimpleRow>(connection, "TransactionDest1", batchSize: 2); DbDestination <MySimpleRow> dest2 = new DbDestination <MySimpleRow>(connection, "TransactionDest2", batchSize: 2); Multicast <MySimpleRow> multicast = new Multicast <MySimpleRow>(); //Act & Assert Assert.ThrowsAny <Exception>(() => { try { connection.BeginTransaction(System.Data.IsolationLevel.ReadCommitted); source.LinkTo(multicast); multicast.LinkTo(dest1); multicast.LinkTo(dest2); source.Execute(); dest1.Wait(); dest2.Wait(); } catch { throw; } finally { connection.RollbackTransaction(); connection.Close(); } }); if (connection.GetType() == typeof(MySqlConnectionManager)) { Task.Delay(200).Wait(); //MySql needs a little bit longer to free resources } }
public void SplitCSVSourceIn2Tables() { //Arrange TwoColumnsTableFixture dest1Table = new TwoColumnsTableFixture("SplitDataDestination1"); FourColumnsTableFixture dest2Table = new FourColumnsTableFixture("SplitDataDestination2"); var source = new CSVSource <CSVPoco>("res/Multicast/CSVSourceToSplit.csv") { Configuration = new CsvHelper.Configuration.Configuration() { Delimiter = ";" } }; var multicast = new Multicast <CSVPoco>(); var row1 = new RowTransformation <CSVPoco, Entity1>(input => { return(new Entity1 { Col1 = input.CSVCol1, Col2 = input.CSVCol2 }); }); var row2 = new RowTransformation <CSVPoco, Entity2>(input => { return(new Entity2 { Col2 = input.CSVCol2, Col3 = input.CSVCol3, Col4 = input.CSVCol4 }); }); var destination1 = new DBDestination <Entity1>(Connection, "SplitDataDestination1"); var destination2 = new DBDestination <Entity2>(Connection, "SplitDataDestination2"); //Act source.LinkTo(multicast); multicast.LinkTo(row1); multicast.LinkTo(row2); row1.LinkTo(destination1); row2.LinkTo(destination2); source.Execute(); destination1.Wait(); destination2.Wait(); //Assert dest1Table.AssertTestData(); dest2Table.AssertTestData(); }
public void TestDuplicateCheckInRowTrans() { CreateLogTablesTask.CreateLog(); DataFlow.LoggingThresholdRows = 2; CSVSource <Poco> source = new CSVSource <Poco>("src/DataFlowExamples/Duplicate.csv"); source.Configuration.Delimiter = ";"; source.Configuration.TrimOptions = CsvHelper.Configuration.TrimOptions.Trim; source.Configuration.MissingFieldFound = null; List <int> IDs = new List <int>(); //at the end of the flow, this list will contain all IDs of your source RowTransformation <Poco, Poco> rowTrans = new RowTransformation <Poco, Poco>(input => { if (IDs.Contains(input.ID)) { input.IsDuplicate = true; } else { IDs.Add(input.ID); } return(input); }); var multicast = new Multicast <Poco>(); var dest = new DBDestination <Poco>("dbo.Staging"); TableDefinition stagingTable = new TableDefinition("dbo.Staging", new List <TableColumn>() { new TableColumn("Key", "INT", allowNulls: false, isPrimaryKey: true, isIdentity: true), new TableColumn("ID", "INT", allowNulls: false), new TableColumn("Value", "NVARCHAR(100)", allowNulls: false), new TableColumn("Name", "NVARCHAR(100)", allowNulls: false) }); stagingTable.CreateTable(); var trash = new VoidDestination <Poco>(); source.LinkTo(rowTrans); rowTrans.LinkTo(multicast); multicast.LinkTo(dest, input => input.IsDuplicate == false); multicast.LinkTo(trash, input => input.IsDuplicate == true); source.Execute(); dest.Wait(); trash.Wait(); }
public void Multicast_Into2Tables() { var tableDestination1 = this.CreateTable("test.Table1"); var tableDestination2 = this.CreateTable("test.Table2"); var row1 = new RowTransformation <TestPoco, TestEntity1>(input => { return(new TestEntity1 { Col1 = input.Value1, Col3 = input.Value3 }); }); var row2 = new RowTransformation <TestPoco, TestEntity2>(input => { return(new TestEntity2 { Col2 = input.Value2, Col4 = input.Value4 }); }); var source = new CSVSource <TestPoco>("src/DataFlowExamples/Issue5.csv") { Configuration = new CsvHelper.Configuration.Configuration() { Delimiter = ";" } }; var multicast = new Multicast <TestPoco>(); var destination1 = new DBDestination <TestEntity1>("test.Table1"); var destination2 = new DBDestination <TestEntity2>("test.Table2"); source.LinkTo(multicast); multicast.LinkTo(row1); multicast.LinkTo(row2); row1.LinkTo(destination1); row2.LinkTo(destination2); source.Execute(); destination1.Wait(); destination2.Wait(); Assert.AreEqual(2, RowCountTask.Count("test.Table1", "Col1 in ('one','five') and Col3 in ('three','seven')")); Assert.AreEqual(2, RowCountTask.Count("test.Table2", "Col2 in ('two','six') and Col4 in ('four','eight')")); }
public static void Main(string[] args) { PrepareSqlLiteDestination(); var currentYear = StartYear; var source = new CustomBatchSource <Accident>(); source.ReadBatchFunc = _ => { var accidents = ParseAccidentsFromUrl($"https://aviation-safety.net/database/dblist.php?Year={currentYear}"); currentYear++; return(accidents); }; source.ReadingCompleted = _ => currentYear > EndYear; var filter = new FilterTransformation <Accident>(); filter.FilterPredicate = accident => accident.Year <= 1; var multicast = new Multicast <Accident>(); var memDest = new MemoryDestination <Accident>(); var sqlLiteDest = new DbDestination <Accident>(SQLiteConnection, "Accidents"); var aggregation = new Aggregation <Accident, AccidentsPerYear>(); var csvDest = new CsvDestination <AccidentsPerYear>("aggregated.csv"); source.LinkTo(filter); filter.LinkTo(multicast); multicast.LinkTo(memDest); multicast.LinkTo(sqlLiteDest); multicast.LinkTo(aggregation, row => row.Year > 1); aggregation.LinkTo(csvDest); Network.Execute(source); Console.WriteLine($"Imported {memDest.Data.Count} rows from aviation-safety.net"); for (int year = StartYear; year <= EndYear; year++) { Console.WriteLine($"There were {memDest.Data.Where(a => a.Year == year).Count()} accidents in {year}"); } }
public void WriteIntoMultipleDestinations() { //Arrange var source = new MemorySource <string[]>(); source.DataAsList.Add(new string[] { "Test" }); var trans = new RowTransformation <string[]>(); trans.TransformationFunc = r => throw new Exception(); var dest = new MemoryDestination <string[]>(); CreateErrorTableTask.Create(SqlConnection, "error_log"); var mc = new Multicast <ETLBoxError>(); var errorMem = new MemoryDestination <ETLBoxError>(); var errorDb = new DbDestination <ETLBoxError>(SqlConnection, "error_log"); var errorCsv = new CsvDestination <ETLBoxError>("error_csv.csv"); source.LinkTo(trans); trans.LinkTo(dest); //Act trans.LinkErrorTo(mc); mc.LinkTo(errorMem); mc.LinkTo(errorDb); mc.LinkTo(errorCsv); source.Execute(); dest.Wait(); errorMem.Wait(); errorDb.Wait(); errorCsv.Wait(); //Assert Assert.True(errorMem.Data.Count > 0); Assert.True(RowCountTask.Count(SqlConnection, "error_log") > 0); Assert.True(File.ReadAllText("error_csv.csv").Length > 0); }
public void WriteParallelWhileTransactionOpen(IConnectionManager connection, int numberOfRows) { //Arrange BigDataCsvSource.CreateCSVFileIfNeeded(numberOfRows); ReCreateDestinationTable(connection, "TransactionDestination1"); ReCreateDestinationTable(connection, "TransactionDestination2"); var source = new CsvSource(BigDataCsvSource.GetCompleteFilePath(numberOfRows)); var dest1 = new DbDestination(connection, "TransactionDestination1"); var dest2 = new DbDestination(connection, "TransactionDestination2"); var multi = new Multicast(); //Act & Assert Assert.ThrowsAny <Exception>(() => { connection.BeginTransaction(); source.LinkTo(multi); multi.LinkTo(dest1); multi.LinkTo(dest2); source.Execute(); dest1.Wait(); dest2.Wait(); }); }
public void DuplicateCheckInRowTrans() { //Arrange CsvSource <Poco> source = CreateDuplicateCsvSource("res/UseCases/DuplicateCheck.csv"); List <int> IDs = new List <int>(); //at the end of the flow, this list will contain all IDs of your source //Act RowTransformation <Poco, Poco> rowTrans = new RowTransformation <Poco, Poco>(input => { if (IDs.Contains(input.ID)) { input.IsDuplicate = true; } else { IDs.Add(input.ID); } return(input); }); Multicast <Poco> multicast = new Multicast <Poco>(); DbDestination <Poco> dest = CreateDestinationTable("dbo.DuplicateCheck"); VoidDestination <Poco> trash = new VoidDestination <Poco>(); source.LinkTo(rowTrans); rowTrans.LinkTo(multicast); multicast.LinkTo(dest, input => input.IsDuplicate == false); multicast.LinkTo(trash, input => input.IsDuplicate == true); source.Execute(); dest.Wait(); trash.Wait(); //Assert AssertDataWithoutDuplicates(); }
public static async Task Run([TimerTrigger("0 */1 * * * *" //, RunOnStartup=true) //only for testing purposes )] TimerInfo myTimer, ILogger log) { Logging.LogInstance = log; string sqlConnectionString = Environment.GetEnvironmentVariable("SqlServerConnectionString", EnvironmentVariableTarget.Process); string storageConnString = Environment.GetEnvironmentVariable("AzureWebJobsStorage", EnvironmentVariableTarget.Process); SqlConnectionManager conn = new SqlConnectionManager(sqlConnectionString); if (!DemoHelper.WasInitialized) { containerName = DemoHelper.PrepareForDemo(storageConnString, conn); } SyncData syncDataLastRun = ReadLastSyncKey(); var parameter = new[] { new QueryParameter() { Name = "syncId", Value = syncDataLastRun.SyncId } }; var dbSource = new DbSource <Order>() { ConnectionManager = conn, Sql = $"SELECT Id, Number, Details, Date FROM Orders WHERE Id > @syncId ORDER BY Date", SqlParameter = parameter }; var jsonDest = new JsonDestination <Order>(); jsonDest.ResourceType = ResourceType.AzureBlob; jsonDest.AzureBlobStorage.ConnectionString = storageConnString; jsonDest.AzureBlobStorage.ContainerName = containerName; var currentDate = new DateTime(1900, 1, 1); jsonDest.HasNextUri = (_, order) => { if (order.Date.Date > currentDate.Date) { currentDate = order.Date; return(true); } return(false); }; jsonDest.GetNextUri = (_, order) => "OrderData_" + order.Date.ToString("yyyy-MM-dd") + ".json"; var multicast = new Multicast <Order>(); var aggregation = new Aggregation <Order, SyncData>(); aggregation.AggregateColumns = new[] { new AggregateColumn() { InputValuePropName = "Id", AggregatedValuePropName = "SyncId", AggregationMethod = AggregationMethod.Max } }; var syncMemoryDest = new MemoryDestination <SyncData>(); /* * |---> jsonDest ("OrderData_2020-01-01.json", "OrderData_2020-01-02.json", ..) * | * dbSource --> multicast * | * |---> aggregation --> syncMemoryDest (1st run: SyncId = 5, 2nd run: SyncId = 7) */ dbSource.LinkTo(multicast); multicast.LinkTo(jsonDest); multicast.LinkTo(aggregation); aggregation.LinkTo(syncMemoryDest); Network.Execute(dbSource); if (syncMemoryDest.Data.Count > 0) { SyncData syncDataThisRun = syncMemoryDest.Data.First(); StoreLastSyncKey(syncDataThisRun); } }