public DatalinkTestRun( TransformSettings transformSettings, ILogger logger, DexihDatalinkTest datalinkTest, DexihHub hub, TransformWriterOptions transformWriterOptions, IAlertQueue alertQueue, string[] alertEmails ) { _transformSettings = transformSettings; _transformWriterOptions = transformWriterOptions; _logger = logger; // create a copy of the hub as the test run will update objects. _hub = hub.Serialize().Deserialize <DexihHub>(); // _hub = hub.CloneProperties(); _datalinkTest = datalinkTest; _alertQueue = alertQueue; _alertEmails = alertEmails; Connection auditConnection; if (datalinkTest.AuditConnectionKey > 0) { var dbAuditConnection = _hub.DexihConnections.SingleOrDefault(c => c.IsValid && c.Key == datalinkTest.AuditConnectionKey); if (dbAuditConnection == null) { throw new DatalinkRunException( $"Audit connection with key {datalinkTest.AuditConnectionKey} was not found."); } auditConnection = dbAuditConnection.GetConnection(_transformSettings); } else { auditConnection = new ConnectionMemory(); } TestResults = new List <TestResult>(); WriterResult = new TransformWriterResult() { AuditConnection = auditConnection, AuditConnectionKey = datalinkTest.AuditConnectionKey ?? 0, AuditType = Constants.DatalinkTest, HubKey = _hub.HubKey, ReferenceKey = datalinkTest.Key, ParentAuditKey = 0, ReferenceName = datalinkTest.Name, SourceTableKey = 0, SourceTableName = "", TransformWriterOptions = _transformWriterOptions }; }
public void ResetWriterResult() { WriterResult = new TransformWriterResult { AuditConnection = _auditConnection, AuditConnectionKey = Datajob.AuditConnectionKey ?? 0, AuditType = Constants.Datajob, HubKey = _hub.HubKey, ReferenceKey = Datajob.Key, ParentAuditKey = 0, ReferenceName = Datajob.Name, SourceTableKey = 0, SourceTableName = "", TransformWriterOptions = _transformWriterOptions, }; WriterResult.OnProgressUpdate += Datajob_OnProgressUpdate; WriterResult.OnStatusUpdate += Datajob_OnStatusUpdate; }
public async Task RunDeltaTest_update() { var source = Helpers.CreateUnSortedTestData(); source.SetCacheMethod(Transform.ECacheMethod.PreLoadCache); var targetTable = source.CacheTable.Copy(); targetTable.AddAuditColumns(); Transform target = new ReaderMemory(targetTable); //run an update load with nothing in the target, which will result in 10 rows created. var transformDelta = new TransformDelta(source, target, TransformDelta.EUpdateStrategy.AppendUpdate, 0, false); transformDelta.SetCacheMethod(Transform.ECacheMethod.PreLoadCache); var count = 0; while (await transformDelta.ReadAsync()) { Assert.True((char)transformDelta["Operation"] == 'C'); Assert.True((long)transformDelta["SurrogateKey"] == count + 1); Assert.True((int)transformDelta["IntColumn"] == count + 1); count++; } Assert.Equal(10, count); transformDelta.SetRowNumber(0); //write result to a memory table var memoryConnection = new ConnectionMemory(); var writer = new TransformWriter(); var result = new TransformWriterResult(); result.SetProperties(0, 10, "DataLink", 1, 2, "Test", 1, "Source", 2, "Target", null, null, TransformWriterResult.ETriggerMethod.Manual, "Test"); var writeResult = await writer.WriteAllRecords(result, transformDelta, target.CacheTable, memoryConnection, CancellationToken.None); Assert.True(writeResult); target = memoryConnection .GetTransformReader(target.CacheTable); // new ReaderMemory(target.CacheTable, null); target.SetCacheMethod(Transform.ECacheMethod.PreLoadCache); //Set the target pointer back to the start and rerun. Now 10 rows should be ignored. source.SetRowNumber(0); target.SetRowNumber(0); //run an append. (only difference from reload is no truncate record at start. transformDelta = new TransformDelta(source, target, TransformDelta.EUpdateStrategy.AppendUpdate, 0, false); count = 0; while (await transformDelta.ReadAsync()) { count++; } Assert.Equal(10, transformDelta.TotalRowsIgnored); Assert.Equal(0, count); //change 3 rows. (first, middle, last) to similate target table data changes target.CacheTable.Data[0][4] = 100; target.CacheTable.Data[5][4] = 200; target.CacheTable.Data[9][4] = 300; //add a duplicate in the source var row = new object[target.CacheTable.Columns.Count]; target.CacheTable.Data[9].CopyTo(row, 0); target.CacheTable.Data.Add(row); transformDelta.Reset(); count = 0; while (await transformDelta.ReadAsync()) { count++; Assert.True((char)transformDelta["Operation"] == 'U'); } Assert.True(count == 3); //delete rows from the target, which should trigger two creates. target.CacheTable.Data.RemoveAt(1); target.CacheTable.Data.RemoveAt(7); transformDelta.Reset(); count = 0; var rowsCreated = 0; var rowsUpdated = 0; while (await transformDelta.ReadAsync()) { rowsCreated += (char)transformDelta["Operation"] == 'C' ? 1 : 0; rowsUpdated += (char)transformDelta["Operation"] == 'U' ? 1 : 0; count++; } Assert.Equal(2, rowsCreated); Assert.Equal(3, rowsUpdated); Assert.Equal(5, count); //delete rows from the source, which should not cause any change as delete detection is not on. source.CacheTable.Data.RemoveAt(9); source.CacheTable.Data.RemoveAt(0); //this is the row that was updated, so update now = 2 transformDelta.Reset(); count = 0; rowsCreated = 0; rowsUpdated = 0; while (await transformDelta.ReadAsync()) { rowsCreated += (char)transformDelta["Operation"] == 'C' ? 1 : 0; rowsUpdated += (char)transformDelta["Operation"] == 'U' ? 1 : 0; count++; } Assert.True(rowsCreated == 1); Assert.True(rowsUpdated == 2); Assert.True(count == 3); }
public async Task RunDeltaTest_updatePreserve() { var source = Helpers.CreateUnSortedTestData(); source.SetCacheMethod(Transform.ECacheMethod.PreLoadCache); var targetTable = source.CacheTable.Copy(); targetTable.AddAuditColumns(); long surrrogateKey = 0; Transform target = new ReaderMemory(targetTable); target.SetCacheMethod(Transform.ECacheMethod.PreLoadCache); //run an update load with nothing in the target. var transformDelta = new TransformDelta(source, target, TransformDelta.EUpdateStrategy.AppendUpdateDeletePreserve, surrrogateKey, false); transformDelta.SetCacheMethod(Transform.ECacheMethod.PreLoadCache); var count = 0; while (await transformDelta.ReadAsync()) { Assert.True((char)transformDelta["Operation"] == 'C'); Assert.True((long)transformDelta["SurrogateKey"] == count + 1); Assert.True((int)transformDelta["IntColumn"] == count + 1); count++; } Assert.True(count == 10); surrrogateKey = transformDelta.SurrogateKey; transformDelta.SetRowNumber(0); //write result to a memory table var memoryConnection = new ConnectionMemory(); var writer = new TransformWriter(); var result = new TransformWriterResult(); result.SetProperties(0, 1, "DataLink", 1, 2, "Test", 1, "Source", 2, "Target", null, null, TransformWriterResult.ETriggerMethod.Manual, "Test"); await writer.WriteAllRecords(result, transformDelta, target.CacheTable, memoryConnection, CancellationToken.None); target = memoryConnection.GetTransformReader(target.CacheTable); target.SetCacheMethod(Transform.ECacheMethod.PreLoadCache); //run an append. (only difference from reload is no truncate record at start. transformDelta = new TransformDelta(source, target, TransformDelta.EUpdateStrategy.AppendUpdatePreserve, surrrogateKey, false); count = 0; while (await transformDelta.ReadAsync()) { count++; } //change 3 rows. (first, middle, last) target.CacheTable.Data[0][4] = 100; target.CacheTable.Data[5][4] = 200; target.CacheTable.Data[9][4] = 300; //add a duplicate in the source var row = new object[target.CacheTable.Columns.Count]; target.CacheTable.Data[9].CopyTo(row, 0); target.CacheTable.Data.Add(row); transformDelta = new TransformDelta(source, target, TransformDelta.EUpdateStrategy.AppendUpdatePreserve, surrrogateKey, false); transformDelta.SetCacheMethod(Transform.ECacheMethod.PreLoadCache); count = 0; var rowsCreated = 0; while (await transformDelta.ReadAsync()) { rowsCreated += (char)transformDelta["Operation"] == 'C' ? 1 : 0; count++; } Assert.Equal(3, rowsCreated); Assert.Equal(3, transformDelta.TotalRowsPreserved); Assert.Equal(6, count); //run the delta again. this should ignore all 10 records. transformDelta.SetRowNumber(0); result = new TransformWriterResult(); result.SetProperties(0, 1, "DataLink", 30, 40, "Test", 1, "Source", 2, "Target", null, null, TransformWriterResult.ETriggerMethod.Manual, "Test"); await writer.WriteAllRecords(result, transformDelta, target.CacheTable, memoryConnection, CancellationToken.None); target = memoryConnection.GetTransformReader(target.CacheTable); transformDelta = new TransformDelta(source, target, TransformDelta.EUpdateStrategy.AppendUpdatePreserve, surrrogateKey, false); count = 0; while (await transformDelta.ReadAsync()) { count++; } Assert.Equal(10, transformDelta.TotalRowsIgnored); Assert.Equal(0, count); }
/// <summary> /// Perfromance tests should run in around 1 minute. /// </summary> /// <param name="connection"></param> public async Task PerformanceTransformWriter(Connection connection, string databaseName, long rows) { await connection.CreateDatabase(databaseName, CancellationToken.None); //create a table that utilizes every available datatype. var table = new Table("LargeTable" + (DataSets.counter++)); table.Columns.Add( new TableColumn("SurrogateKey", ETypeCode.Int32, TableColumn.EDeltaType.SurrogateKey) { IsIncrementalUpdate = true }); table.Columns.Add(new TableColumn("UpdateTest", ETypeCode.Int32)); foreach (ETypeCode typeCode in Enum.GetValues(typeof(ETypeCode))) { if (typeCode == ETypeCode.Binary && connection.CanUseBinary) { continue; } if (typeCode != ETypeCode.Binary) { continue; } table.Columns.Add(new TableColumn() { Name = "column" + typeCode, DataType = typeCode, MaxLength = 50, DeltaType = TableColumn.EDeltaType.TrackingField }); } //create the table await connection.CreateTable(table, true, CancellationToken.None); //add rows. var buffer = 0; for (var i = 0; i < rows; i++) { var row = new object[table.Columns.Count]; row[0] = i; row[1] = 0; //load the rows with random values. for (var j = 2; j < table.Columns.Count; j++) { var dataType = DataType.GetType(table.Columns[j].DataType); if (i % 2 == 0) { row[j] = connection.GetConnectionMaxValue(table.Columns[j].DataType, 20); } else { row[j] = connection.GetConnectionMinValue(table.Columns[j].DataType); } } table.Data.Add(row); buffer++; if (buffer >= 5000 || rows == i + 1) { //start a datawriter and insert the test data await connection.DataWriterStart(table); await connection.ExecuteInsertBulk(table, new ReaderMemory(table), CancellationToken.None); table.Data.Clear(); buffer = 0; } } var targetTable = table.Copy(); targetTable.AddAuditColumns(); targetTable.Name = "TargetTable"; await connection.CreateTable(targetTable, false, CancellationToken.None); var targetTransform = connection.GetTransformReader(targetTable); //count rows using reader var transform = connection.GetTransformReader(table); transform = new TransformMapping(transform, true, null, null); transform = new TransformValidation(transform, null, false); transform = new TransformDelta(transform, targetTransform, TransformDelta.EUpdateStrategy.Reload, 1, false); var writer = new TransformWriter(); var writerResult = new TransformWriterResult(); await connection.InitializeAudit(writerResult, 0, "Datalink", 1, 2, "Test", 1, "Source", 2, "Target", TransformWriterResult.ETriggerMethod.Manual, "Test", CancellationToken.None); Assert.NotNull(writerResult); var result = await writer.WriteAllRecords(writerResult, transform, targetTable, connection, null, null, null, null, CancellationToken.None); Assert.Equal(rows, writerResult.RowsCreated); //check the audit table loaded correctly. var auditTable = await connection.GetTransformWriterResults(0, null, "Datalink", writerResult.AuditKey, null, true, false, false, null, 1, 2, false, CancellationToken.None); Assert.Equal(writerResult.RowsCreated, auditTable[0].RowsCreated); Assert.Equal(rows - 1, Convert.ToInt64(auditTable[0].MaxIncrementalValue)); }
public void DatalinkTest_OnStatusUpdate(TransformWriterResult writer) { OnStatusUpdate?.Invoke(writer); }
public async Task Transform(Connection connection, string databaseName) { Table table = DataSets.CreateTable(); await connection.CreateDatabase(databaseName, CancellationToken.None); //create a new table and write some data to it. Transform reader = DataSets.CreateTestData(); await connection.CreateTable(table, true, CancellationToken.None); TransformWriter writer = new TransformWriter(); TransformWriterResult writerResult = new TransformWriterResult(); await connection.InitializeAudit(writerResult, 0, "DataLink", 1, 2, "Test", 1, "Source", 2, "Target", TransformWriterResult.ETriggerMethod.Manual, "Test", CancellationToken.None); var writeRecords = await writer.WriteAllRecords(writerResult, reader, table, connection, null, null, null, null, CancellationToken.None); Assert.True(writeRecords, $"WriteAllRecords failed with message {writerResult.Message}. Details:{writerResult.ExceptionDetails}"); //check database can sort if (connection.CanSort) { //use the new table test the data base is sorting reader = connection.GetTransformReader(table); SelectQuery query = new SelectQuery() { Sorts = new List <Sort>() { new Sort("IntColumn", Sort.EDirection.Descending) } }; await reader.Open(0, query, CancellationToken.None); int sortValue = 10; while (await reader.ReadAsync()) { Assert.Equal(sortValue, Convert.ToInt32(reader["IntColumn"])); sortValue--; } Assert.Equal(0, sortValue); } //check database can filter if (connection.CanFilter) { //use the new table to test database is filtering reader = connection.GetTransformReader(table); SelectQuery query = new SelectQuery() { Filters = new List <Filter>() { new Filter("IntColumn", Filter.ECompare.LessThanEqual, 5) } }; await reader.Open(0, query, CancellationToken.None); int count = 0; while (await reader.ReadAsync()) { Assert.True(Convert.ToInt32(reader["IntColumn"]) <= 5); count++; } Assert.Equal(5, count); } Table deltaTable = DataSets.CreateTable(); deltaTable.AddAuditColumns(); deltaTable.Name = "DeltaTable"; await connection.CreateTable(deltaTable, true, CancellationToken.None); Transform targetReader = connection.GetTransformReader(deltaTable); reader = connection.GetTransformReader(table); TransformDelta transformDelta = new TransformDelta(reader, targetReader, TransformDelta.EUpdateStrategy.AppendUpdate, 1, false); writerResult = new TransformWriterResult(); await connection.InitializeAudit(writerResult, 0, "Datalink", 1, 2, "Test", 1, "Source", 2, "Target", TransformWriterResult.ETriggerMethod.Manual, "Test", CancellationToken.None); var writeAllResult = await writer.WriteAllRecords(writerResult, transformDelta, deltaTable, connection, CancellationToken.None); Assert.True(writeAllResult, writerResult.Message); Assert.Equal(10L, writerResult.RowsCreated); //check the audit table loaded correctly. var auditTable = await connection.GetTransformWriterResults(0, null, "Datalink", writerResult.AuditKey, null, true, false, false, null, 1, null, false, CancellationToken.None); Assert.Equal(10L, auditTable[0].RowsCreated); }
public void Datajob_OnStatusUpdate(TransformWriterResult writer) { if (_alertQueue != null && Datajob.AlertLevel != EAlertLevel.None) { switch (writer.RunStatus) { case ERunStatus.Started: if (Datajob.AlertLevel == EAlertLevel.All) { _alertQueue.Add(new Alert() { Emails = _alertEmails, Subject = $"Datajob {Datajob.Name} has started.", Body = $"The datajob {Datajob.Name} started at {DateTime.Now}" }); } break; case ERunStatus.Finished: if (Datajob.AlertLevel == EAlertLevel.All) { _alertQueue.Add(new Alert() { Emails = _alertEmails, Subject = $"Datajob {Datajob.Name} finished successfully.", Body = $"The datajob {Datajob.Name} finished successfully at {DateTime.Now}" }); } break; case ERunStatus.FinishedErrors: if (Datajob.AlertLevel == EAlertLevel.Errors || Datajob.AlertLevel == EAlertLevel.Critical) { _alertQueue.Add(new Alert() { Emails = _alertEmails, Subject = $"Datajob {Datajob.Name} finished with some errors.", Body = $"The datajob {Datajob.Name} finished with some errors at {DateTime.Now}.\n\n{writer.Message}" }); } break; case ERunStatus.Abended: case ERunStatus.Cancelled: case ERunStatus.Failed: if (Datajob.AlertLevel != EAlertLevel.None) { _alertQueue.Add(new Alert() { Emails = _alertEmails, Subject = $"Datajob {Datajob.Name} finished with status {writer.RunStatus}.", Body = $"The datajob {Datajob.Name} finished with status {writer.RunStatus} at {DateTime.Now}.\n\n{writer.Message}" }); } break; } } OnDatajobStatusUpdate?.Invoke(writer); }
public void Datajob_OnProgressUpdate(TransformWriterResult writer) { OnDatajobProgressUpdate?.Invoke(writer); }