// ReSharper disable InconsistentNaming public void AbstractDataBaseBroker_GetServiceMethods_InvokesDbServerFetchStoredProcedures_Done() // ReSharper restore InconsistentNaming { //------------Setup for test-------------------------- var dbSource = new DbSource(); var procedureCommand = new SqlCommand("procedureCommand"); procedureCommand.Parameters.Add(new SqlParameter("@p1", "proc")); var functionCommand = new SqlCommand("functionCommand"); functionCommand.Parameters.Add(new SqlParameter("@f1", "func")); var dbServer = new Mock <TestDbServer>(); dbServer.Setup(s => s.Connect(It.IsAny <string>())).Verifiable(); dbServer.Setup(s => s.FetchStoredProcedures ( It.IsAny <Func <IDbCommand, List <IDbDataParameter>, string, string, bool> >(), It.IsAny <Func <IDbCommand, List <IDbDataParameter>, string, string, bool> >(), It.IsAny <bool>()) ) .Callback( ( Func <IDbCommand, List <IDbDataParameter>, string, string, bool> procedureProcessor, Func <IDbCommand, List <IDbDataParameter>, string, string, bool> functionProcessor, bool continueOnProcessorException ) => { procedureProcessor(procedureCommand, procedureCommand.Parameters.Cast <IDbDataParameter>().ToList(), "procHelpText", ""); functionProcessor(functionCommand, functionCommand.Parameters.Cast <IDbDataParameter>().ToList(), "funcHelpText", ""); }) .Verifiable(); var broker = new TestDatabaseBroker(dbServer.Object); //------------Execute Test--------------------------- var result = broker.GetServiceMethods(dbSource); //------------Assert Results------------------------- dbServer.Verify(s => s.Connect(It.IsAny <string>())); dbServer.Verify(s => s.FetchStoredProcedures ( It.IsAny <Func <IDbCommand, List <IDbDataParameter>, string, string, bool> >(), It.IsAny <Func <IDbCommand, List <IDbDataParameter>, string, string, bool> >(), It.IsAny <bool>()) ); Assert.IsNotNull(result); Assert.AreEqual(2, result.Count); for (var i = 0; i < result.Count; i++) { var command = i == 0 ? procedureCommand : functionCommand; var serviceMethod = result[i]; Assert.AreEqual(command.CommandText, serviceMethod.Name); for (var j = 0; j < serviceMethod.Parameters.Count; j++) { var methodParam = serviceMethod.Parameters[j]; var commandParam = (IDbDataParameter)command.Parameters[j]; Assert.AreEqual(commandParam.ParameterName, "@" + methodParam.Name); } } }
public void ReadDifferentTypes(IConnectionManager conn) { //Arrange CreateTableTask.Create(conn, "different_type_table", new List <TableColumn>() { new TableColumn("int_col", "INT", allowNulls: true), new TableColumn("long_col", "BIGINT", allowNulls: true), new TableColumn("decimal_col", "FLOAT", allowNulls: true), new TableColumn("double_col", "FLOAT", allowNulls: true), new TableColumn("datetime_col", "DATETIME", allowNulls: true), new TableColumn("date_col", "DATE", allowNulls: true), new TableColumn("string_col", "VARCHAR(200)", allowNulls: true), new TableColumn("char_col", "CHAR(1)", allowNulls: true), new TableColumn("decimal_string_col", "DECIMAL(12,10)", allowNulls: true), new TableColumn("null_col", "CHAR(1)", allowNulls: true), new TableColumn("enum_col", "INT", allowNulls: true), }); string datetime = "'2020-01-01 10:00:00.000'"; string date = "'2020-01-01'"; if (conn.GetType() == typeof(OracleConnectionManager)) { datetime = "TO_TIMESTAMP('01-JAN-2020 10:00:00')"; date = "'01 JAN 2020'"; } SqlTask.ExecuteNonQuery(conn, "Insert test data", $@" INSERT INTO {conn.QB}different_type_table{conn.QE} ( {conn.QB}int_col{conn.QE} , {conn.QB}long_col{conn.QE} , {conn.QB}decimal_col{conn.QE} , {conn.QB}double_col{conn.QE} , {conn.QB}datetime_col{conn.QE} , {conn.QB}date_col{conn.QE} , {conn.QB}string_col{conn.QE} , {conn.QB}char_col{conn.QE} , {conn.QB}decimal_string_col{conn.QE} , {conn.QB}null_col{conn.QE} , {conn.QB}enum_col{conn.QE} ) VALUES ( 1, -1, 2.3, 5.4, {datetime}, {date}, 'Test', 'T', '13.4566', NULL, 2 ) "); //Act DbSource <MyDataTypeRow> source = new DbSource <MyDataTypeRow>(conn, "different_type_table"); MemoryDestination <MyDataTypeRow> dest = new MemoryDestination <MyDataTypeRow>(); source.LinkTo(dest); source.Execute(); dest.Wait(); //Assert Assert.Equal(1, dest.Data.First().IntCol); Assert.Equal(-1, dest.Data.First().LongCol); Assert.Equal(2.3M, dest.Data.First().DecimalCol); Assert.True(dest.Data.First().DoubleCol >= 5.4 && dest.Data.First().DoubleCol < 5.5); Assert.Equal("2020-01-01 10:00:00.000", dest.Data.First().DateTimeCol.ToString("yyyy-MM-dd hh:mm:ss.fff")); Assert.Equal("2020-01-01", dest.Data.First().DateCol.ToString("yyyy-MM-dd")); Assert.Equal("Test", dest.Data.First().StringCol); Assert.Equal('T', dest.Data.First().CharCol); Assert.StartsWith("13.4566", dest.Data.First().DecimalStringCol); Assert.Null(dest.Data.First().NullCol); Assert.Equal(EnumType.Value2, dest.Data.First().EnumCol); }
public virtual void UpdateServiceOutParameters(DbService service, DbSource dbSource) { }
void DoInsertForMySql(DbSource runtimeDatabase, SqlBulkCopyOptions currentOptions, IWarewolfListIterator parametersIteratorCollection, IWarewolfIterator batchItr, IWarewolfIterator timeoutItr, IDSFDataObject dataObject, ErrorResultTO errorResultTo, ErrorResultTO allErrors, ref bool addExceptionToErrorList) { MySqlBulkLoader sqlBulkCopy = new MySqlBulkLoader(new MySqlConnection(runtimeDatabase.ConnectionString)); TableName = TableName.Replace("[", "").Replace("]", ""); if (TableName.Contains(".")) { TableName = TableName.Substring(TableName.IndexOf(".", StringComparison.Ordinal) + 1); } if (String.IsNullOrEmpty(BatchSize) && String.IsNullOrEmpty(Timeout)) { sqlBulkCopy = new MySqlBulkLoader(new MySqlConnection(runtimeDatabase.ConnectionString)) { TableName = TableName, FieldTerminator = ",", LineTerminator = "\n" }; } else { while (parametersIteratorCollection.HasMoreData()) { sqlBulkCopy = SetupMySqlBulkCopy(batchItr, parametersIteratorCollection, timeoutItr, runtimeDatabase, currentOptions); } } if (sqlBulkCopy != null) { var dataTableToInsert = BuildDataTableToInsertMySql(); if (InputMappings != null && InputMappings.Count > 0) { var iteratorCollection = new WarewolfListIterator(); var listOfIterators = GetIteratorsFromInputMappings(dataObject, iteratorCollection, out errorResultTo); allErrors.MergeErrors(errorResultTo); // oh no, we have an issue, bubble it out ;) if (allErrors.HasErrors()) { addExceptionToErrorList = false; throw new Exception("Problems with Iterators for SQLBulkInsert"); } // emit options to debug as per acceptance test ;) if (dataObject.IsDebugMode()) { AddBatchSizeAndTimeOutToDebug(dataObject.Environment); AddOptionsDebugItems(); } FillDataTableWithDataFromDataList(iteratorCollection, dataTableToInsert, listOfIterators); foreach (var dataColumnMapping in InputMappings) { if (!String.IsNullOrEmpty(dataColumnMapping.InputColumn)) { sqlBulkCopy.Columns.Add(dataColumnMapping.OutputColumn.ColumnName); } } } // Pass in wrapper now ;) var wrapper = new MySqlBulkCopyWrapper(sqlBulkCopy); var inserted = SqlBulkInserter.Insert(wrapper, dataTableToInsert); var resultString = inserted ? "Success" : "Failure"; dataObject.Environment.Assign(Result, resultString); if (dataObject.IsDebugMode()) { AddDebugOutputItem(new DebugItemWarewolfAtomResult(resultString, Result, "")); } allErrors.MergeErrors(errorResultTo); if (dataTableToInsert != null) { dataTableToInsert.Dispose(); } } }
public MySqlBulkLoader SetupMySqlBulkCopy(IWarewolfIterator batchItr, IWarewolfListIterator parametersIteratorCollection, IWarewolfIterator timeoutItr, DbSource runtimeDatabase, SqlBulkCopyOptions copyOptions) { var batchSize = -1; var timeout = -1; GetParameterValuesForBatchSizeAndTimeOut(batchItr, parametersIteratorCollection, timeoutItr, ref batchSize, ref timeout); var sqlBulkCopy = new MySqlBulkLoader(new MySqlConnection(runtimeDatabase.ConnectionString)) { TableName = TableName, FieldTerminator = ",", LineTerminator = "\n" }; return(sqlBulkCopy); }
protected override TestDbServer CreateDbServer(DbSource dbSource) { return(DbServer ?? (DbServer = base.CreateDbServer(dbSource))); }
public static async Task Run([TimerTrigger("0 */1 * * * *" //, RunOnStartup=true) //only for testing purposes )] TimerInfo myTimer, ILogger log) { Logging.LogInstance = log; string sqlConnectionString = Environment.GetEnvironmentVariable("SqlServerConnectionString", EnvironmentVariableTarget.Process); string storageConnString = Environment.GetEnvironmentVariable("AzureWebJobsStorage", EnvironmentVariableTarget.Process); SqlConnectionManager conn = new SqlConnectionManager(sqlConnectionString); if (!DemoHelper.WasInitialized) { containerName = DemoHelper.PrepareForDemo(storageConnString, conn); } SyncData syncDataLastRun = ReadLastSyncKey(); var parameter = new[] { new QueryParameter() { Name = "syncId", Value = syncDataLastRun.SyncId } }; var dbSource = new DbSource <Order>() { ConnectionManager = conn, Sql = $"SELECT Id, Number, Details, Date FROM Orders WHERE Id > @syncId ORDER BY Date", SqlParameter = parameter }; var jsonDest = new JsonDestination <Order>(); jsonDest.ResourceType = ResourceType.AzureBlob; jsonDest.AzureBlobStorage.ConnectionString = storageConnString; jsonDest.AzureBlobStorage.ContainerName = containerName; var currentDate = new DateTime(1900, 1, 1); jsonDest.HasNextUri = (_, order) => { if (order.Date.Date > currentDate.Date) { currentDate = order.Date; return(true); } return(false); }; jsonDest.GetNextUri = (_, order) => "OrderData_" + order.Date.ToString("yyyy-MM-dd") + ".json"; var multicast = new Multicast <Order>(); var aggregation = new Aggregation <Order, SyncData>(); aggregation.AggregateColumns = new[] { new AggregateColumn() { InputValuePropName = "Id", AggregatedValuePropName = "SyncId", AggregationMethod = AggregationMethod.Max } }; var syncMemoryDest = new MemoryDestination <SyncData>(); /* * |---> jsonDest ("OrderData_2020-01-01.json", "OrderData_2020-01-02.json", ..) * | * dbSource --> multicast * | * |---> aggregation --> syncMemoryDest (1st run: SyncId = 5, 2nd run: SyncId = 7) */ dbSource.LinkTo(multicast); multicast.LinkTo(jsonDest); multicast.LinkTo(aggregation); aggregation.LinkTo(syncMemoryDest); Network.Execute(dbSource); if (syncMemoryDest.Data.Count > 0) { SyncData syncDataThisRun = syncMemoryDest.Data.First(); StoreLastSyncKey(syncDataThisRun); } }
protected override PostgreServer CreateDbServer(DbSource dbSource) => new PostgreServer();
public void WithObject() { //Arrange TwoColumnsTableFixture dest2Columns = new TwoColumnsTableFixture(SqlConnection, "LookupErrorLinkingDest"); CreateSourceTable(SqlConnection, "LookupErrorLinkingSource"); DbSource <MyLookupRow> lookupSource = new DbSource <MyLookupRow>(SqlConnection, "LookupErrorLinkingSource"); MemorySource <MyInputDataRow> source = new MemorySource <MyInputDataRow>(); source.DataAsList = new List <MyInputDataRow>() { new MyInputDataRow() { Col1 = 1 }, new MyInputDataRow() { Col1 = 2 }, new MyInputDataRow() { Col1 = 3 }, new MyInputDataRow() { Col1 = 4 } }; MemoryDestination <ETLBoxError> errorDest = new MemoryDestination <ETLBoxError>(); //Act List <MyLookupRow> LookupTableData = new List <MyLookupRow>(); LookupTransformation <MyInputDataRow, MyLookupRow> lookup = new LookupTransformation <MyInputDataRow, MyLookupRow>( lookupSource, row => { row.Col2 = LookupTableData.Where(ld => ld.Key == row.Col1).Select(ld => ld.LookupValue).FirstOrDefault(); if (row.Col1 == 4) { throw new Exception("Error record"); } return(row); } , LookupTableData ); DbDestination <MyInputDataRow> dest = new DbDestination <MyInputDataRow>(SqlConnection, "LookupErrorLinkingDest"); source.LinkTo(lookup); lookup.LinkTo(dest); lookup.LinkLookupSourceErrorTo(errorDest); lookup.LinkLookupTransformationErrorTo(errorDest); source.Execute(); dest.Wait(); errorDest.Wait(); //Assert dest2Columns.AssertTestData(); Assert.Collection <ETLBoxError>(errorDest.Data, d => Assert.True(!string.IsNullOrEmpty(d.RecordAsJson) && !string.IsNullOrEmpty(d.ErrorText)), d => Assert.True(!string.IsNullOrEmpty(d.RecordAsJson) && !string.IsNullOrEmpty(d.ErrorText)) ); }
protected override MySqlServer CreateDbServer(DbSource dbSource) { return(new MySqlServer()); }
/// <summary> /// Executes the service /// </summary> /// <param name="values">The values.</param> /// <param name="theWorkspace">The workspace.</param> /// <returns></returns> public StringBuilder Execute(Dictionary <string, StringBuilder> values, IWorkspace theWorkspace) { Dev2JsonSerializer serializer = new Dev2JsonSerializer(); if (values == null) { throw new InvalidDataContractException("No parameter values provided."); } string database = null; StringBuilder tmp; values.TryGetValue("Database", out tmp); if (tmp != null) { database = tmp.ToString(); } if (string.IsNullOrEmpty(database)) { var res = new DbTableList("No database set."); Dev2Logger.Log.Debug("No database set."); return(serializer.SerializeToBuilder(res)); } DbSource dbSource; DbSource runtimeDbSource = null; try { dbSource = serializer.Deserialize <DbSource>(database); if (dbSource.ResourceID != Guid.Empty) { runtimeDbSource = ResourceCatalog.Instance.GetResource <DbSource>(theWorkspace.ID, dbSource.ResourceID); } } catch (Exception e) { Dev2Logger.Log.Error(e); var res = new DbTableList("Invalid JSON data for Database parameter. Exception: {0}", e.Message); return(serializer.SerializeToBuilder(res)); } if (runtimeDbSource == null) { var res = new DbTableList("Invalid Database source"); Dev2Logger.Log.Debug("Invalid Database source"); return(serializer.SerializeToBuilder(res)); } if (string.IsNullOrEmpty(runtimeDbSource.DatabaseName) || string.IsNullOrEmpty(runtimeDbSource.Server)) { var res = new DbTableList("Invalid database sent {0}.", database); Dev2Logger.Log.Debug(String.Format("Invalid database sent {0}.", database)); return(serializer.SerializeToBuilder(res)); } try { Dev2Logger.Log.Info("Get Database Tables. " + dbSource.DatabaseName); var tables = new DbTableList(); DataTable columnInfo; using (var connection = new SqlConnection(dbSource.ConnectionString)) { connection.Open(); columnInfo = connection.GetSchema("Tables"); } if (columnInfo != null) { foreach (DataRow row in columnInfo.Rows) { var tableName = row["TABLE_NAME"] as string; var schema = row["TABLE_SCHEMA"] as string; tableName = '[' + tableName + ']'; var dbTable = tables.Items.Find(table => table.TableName == tableName && table.Schema == schema); if (dbTable == null) { dbTable = new DbTable { Schema = schema, TableName = tableName, Columns = new List <IDbColumn>() }; tables.Items.Add(dbTable); } } } if (tables.Items.Count == 0) { tables.HasErrors = true; const string ErrorFormat = "The login provided in the database source uses {0} and most probably does not have permissions to perform the following query: " + "\r\n\r\n{1}SELECT * FROM INFORMATION_SCHEMA.TABLES;{2}"; if (dbSource.AuthenticationType == AuthenticationType.User) { tables.Errors = string.Format(ErrorFormat, "SQL Authentication (User: '******')", "EXECUTE AS USER = '******';\r\n", "\r\nREVERT;"); } else { tables.Errors = string.Format(ErrorFormat, "Windows Authentication", "", ""); } } return(serializer.SerializeToBuilder(tables)); } catch (Exception ex) { var tables = new DbTableList(ex); return(serializer.SerializeToBuilder(tables)); } }
public void DbService_ToXml_WhenRecordSetHasBlankFields_ExpectNotPartOfOutputDescription() { //------------Setup for test-------------------------- var dbService = new DbService(); var dbSource = new DbSource { ResourceName = "Source" }; var resourceId = Guid.NewGuid(); dbSource.ResourceID = resourceId; dbService.Source = dbSource; var serviceMethod = new ServiceMethod { Name = "Method" }; dbService.Method = serviceMethod; var recordset = new Recordset { Name = "SomeRecSet" }; var recordsetField = new RecordsetField { Alias = "SomeAlias", Name = "" }; recordset.Fields.Add(recordsetField); dbService.Recordset = recordset; const string expected = @"<Service ID=""00000000-0000-0000-0000-000000000000"" Name="""" ResourceType=""DbService"" IsValid=""false""> <Actions> <Action Name=""SomeRecSet"" Type=""InvokeStoredProc"" SourceID=""{0}"" SourceName=""Source"" ExecuteAction="""" SourceMethod=""Method""> <Inputs /> <Outputs /> <OutputDescription><![CDATA[<z:anyType xmlns:i=""http://www.w3.org/2001/XMLSchema-instance"" xmlns:d1p1=""http://schemas.datacontract.org/2004/07/Unlimited.Framework.Converters.Graph.Ouput"" i:type=""d1p1:OutputDescription"" xmlns:z=""http://schemas.microsoft.com/2003/10/Serialization/""><d1p1:DataSourceShapes xmlns:d2p1=""http://schemas.microsoft.com/2003/10/Serialization/Arrays""><d2p1:anyType i:type=""d1p1:DataSourceShape""><d1p1:_x003C_Paths_x003E_k__BackingField /></d2p1:anyType></d1p1:DataSourceShapes><d1p1:Format>ShapedXML</d1p1:Format></z:anyType>]]></OutputDescription> </Action> </Actions> <AuthorRoles /> <Comment /> <Tags /> <HelpLink /> <UnitTestTargetWorkflowService /> <BizRule /> <WorkflowActivityDef /> <XamlDefinition /> <DataList /> <TypeOf>InvokeStoredProc</TypeOf> <DisplayName></DisplayName> <AuthorRoles></AuthorRoles> <ErrorMessages /> </Service>"; //------------Execute Test--------------------------- var xElement = dbService.ToXml(); //------------Assert Results------------------------- var expectedResult = string.Format(expected, resourceId); var actual = xElement.ToString(); FixBreaks(ref expectedResult, ref actual); Assert.AreEqual(expectedResult, actual); }
protected override ODBCServer CreateDbServer(DbSource dbSource) => new ODBCServer();
public virtual DatabaseValidationResult DoDatabaseValidation(DbSource dbSourceDetails) { var result = new DatabaseValidationResult(); switch (dbSourceDetails.ServerType) { case enSourceType.SqlDatabase: var broker = CreateDatabaseBroker(); result.DatabaseList = broker.GetDatabases(dbSourceDetails); break; case enSourceType.MySqlDatabase: var mybroker = CreateMySqlDatabaseBroker(); result.DatabaseList = mybroker.GetDatabases(dbSourceDetails); break; case enSourceType.Oracle: var obroker = CreateOracleDatabaseBroker(); result.DatabaseList = obroker.GetDatabases(dbSourceDetails); break; case enSourceType.ODBC: var odbcbroker = CreateODBCDatabaseBroker(); result.DatabaseList = odbcbroker.GetDatabases(dbSourceDetails); break; case enSourceType.PostgreSQL: var postgreBroker = CreatePostgreDatabaseBroker(); result.DatabaseList = postgreBroker.GetDatabases(dbSourceDetails); break; case enSourceType.SQLiteDatabase: var sqliteBroker = CreateSqliteDatabaseBroker(); result.DatabaseList = sqliteBroker.GetDatabases(dbSourceDetails); break; case enSourceType.WebService: break; case enSourceType.DynamicService: break; case enSourceType.ManagementDynamicService: break; case enSourceType.PluginSource: break; case enSourceType.Unknown: break; case enSourceType.Dev2Server: break; case enSourceType.EmailSource: break; case enSourceType.WebSource: break; case enSourceType.OauthSource: break; case enSourceType.SharepointServerSource: break; case enSourceType.RabbitMQSource: break; case enSourceType.ExchangeSource: break; case enSourceType.WcfSource: break; case enSourceType.ComPluginSource: break; default: result.IsValid = false; break; } return(result); }
public DapperFactory(string name, DbSource source) : base(name, source) { }
public static DsfOracleDatabaseActivity GetDsfOracleDatabaseActivity(DsfDatabaseActivity dbActivity, DbService service, DbSource source) { var DsfOracleDatabaseActivity = new DsfOracleDatabaseActivity { ResourceID = dbActivity.ResourceID, SourceId = source.ResourceID, ProcedureName = service.Method.ExecuteAction, Inputs = TranslateInputMappingToInputs(dbActivity.InputMapping), Outputs = TranslateOutputMappingToOutputs(dbActivity.OutputMapping), ToolboxFriendlyName = dbActivity.ToolboxFriendlyName, IconPath = dbActivity.IconPath, ServiceName = dbActivity.ServiceName, DataTags = dbActivity.DataTags, ResultValidationRequiredTags = dbActivity.ResultValidationRequiredTags, ResultValidationExpression = dbActivity.ResultValidationExpression, FriendlySourceName = dbActivity.FriendlySourceName, EnvironmentID = dbActivity.EnvironmentID, Type = dbActivity.Type, ActionName = dbActivity.ActionName, RunWorkflowAsync = dbActivity.RunWorkflowAsync, Category = dbActivity.Category, ServiceUri = dbActivity.ServiceUri, ServiceServer = dbActivity.ServiceServer, UniqueID = dbActivity.UniqueID, ParentServiceName = dbActivity.ParentServiceName, ParentServiceID = dbActivity.ParentServiceID, ParentWorkflowInstanceId = dbActivity.ParentWorkflowInstanceId, ParentInstanceID = dbActivity.ParentInstanceID, }; return(DsfOracleDatabaseActivity); }
public DbCommand(DbSource dbSource) { _dbSource = dbSource; }
protected override OracleServer CreateDbServer(DbSource dbSource) => new OracleServer();
protected virtual TDbServer CreateDbServer(DbSource dbSource) => new TDbServer();
DbTableList GetDatabaseTables(DbSource dbSource) { var tables = _server.ResourceRepository.GetDatabaseTables(dbSource); return(tables ?? EmptyDbTables); }
void DoInsertForSqlServer(DbSource runtimeDatabase, SqlBulkCopyOptions currentOptions, IDSFDataObject dataObject, ErrorResultTO allErrors, IWarewolfIterator batchItr, IWarewolfListIterator parametersIteratorCollection, IWarewolfIterator timeoutItr, ref ErrorResultTO errorResultTo, ref bool addExceptionToErrorList) { SqlBulkCopy sqlBulkCopy; if (String.IsNullOrEmpty(BatchSize) && String.IsNullOrEmpty(Timeout)) { sqlBulkCopy = new SqlBulkCopy(runtimeDatabase.ConnectionString, currentOptions) { DestinationTableName = TableName }; } else { sqlBulkCopy = SetupSqlBulkCopy(batchItr, parametersIteratorCollection, timeoutItr, runtimeDatabase, currentOptions); } if (sqlBulkCopy != null) { var dataTableToInsert = BuildDataTableToInsert(); var types = GETTypesFromMappingTypes(); var columns = GetNamesFromMappings(); if (InputMappings != null && InputMappings.Count > 0) { var iteratorCollection = new WarewolfListIterator(); var listOfIterators = GetIteratorsFromInputMappings(dataObject, iteratorCollection, out errorResultTo); iteratorCollection.Types = types; iteratorCollection.Names = columns; allErrors.MergeErrors(errorResultTo); FillDataTableWithDataFromDataList(iteratorCollection, dataTableToInsert, listOfIterators); // oh no, we have an issue, bubble it out ;) if (allErrors.HasErrors()) { addExceptionToErrorList = false; throw new Exception("Problems with Iterators for SQLBulkInsert"); } // emit options to debug as per acceptance test ;) if (dataObject.IsDebugMode()) { AddBatchSizeAndTimeOutToDebug(dataObject.Environment); AddOptionsDebugItems(); } if (InputMappings != null) { foreach (var dataColumnMapping in InputMappings) { if (!String.IsNullOrEmpty(dataColumnMapping.InputColumn)) { sqlBulkCopy.ColumnMappings.Add(new SqlBulkCopyColumnMapping(dataColumnMapping.OutputColumn.ColumnName, dataColumnMapping.OutputColumn.ColumnName)); } } } var wrapper = new SqlBulkCopyWrapper(sqlBulkCopy); SqlBulkInserter.Insert(wrapper, dataTableToInsert); dataObject.Environment.Assign(Result, "Success"); if (dataObject.IsDebugMode()) { AddDebugOutputItem(new DebugItemWarewolfAtomResult("Success", Result, "")); } } if (dataTableToInsert != null) { dataTableToInsert.Dispose(); } } }
IDbColumnList GetDatabaseTableColumns(DbSource dbSource, DbTable dbTable) { var columns = _server.ResourceRepository.GetDatabaseTableColumns(dbSource, dbTable); return(columns ?? EmptyDbColumns); }
public SqlBulkCopy SetupSqlBulkCopy(IWarewolfIterator batchItr, IWarewolfListIterator parametersIteratorCollection, IWarewolfIterator timeoutItr, DbSource runtimeDatabase, SqlBulkCopyOptions copyOptions) { var batchSize = -1; var timeout = -1; GetParameterValuesForBatchSizeAndTimeOut(batchItr, parametersIteratorCollection, timeoutItr, ref batchSize, ref timeout); var sqlBulkCopy = new SqlBulkCopy(runtimeDatabase.ConnectionString, copyOptions) { DestinationTableName = TableName }; if (batchSize != -1) { sqlBulkCopy.BatchSize = batchSize; } if (timeout != -1) { sqlBulkCopy.BulkCopyTimeout = timeout; } return(sqlBulkCopy); }
public PlayerDatabase(DbSource source) { players = new List <PlayerInfo>(); ReadDatabaseFromFile(source); }
private void SetSourceReleaseDate() { var dbSource = new DbSource(SourceConnectionString, null, SourceSchemaName); SourceReleaseDate = dbSource.GetSourceReleaseDate(); }
DbTableList GetDatabaseTables(DbSource dbSource) { var tables = _environmentModel.ResourceRepository.GetDatabaseTables(dbSource); return(tables ?? EmptyDbTables); }
private Recordset FetchDbSourceRecordset(ref DbService dbService, bool addFields, DbSource source) { switch (source.ServerType) { case enSourceType.SqlDatabase: { var broker = CreateDatabaseBroker(); var outputDescription = broker.TestService(dbService); if (outputDescription?.DataSourceShapes == null || outputDescription.DataSourceShapes.Count == 0) { throw new Exception(ErrorResource.ErrorRetrievingShapeFromServiceOutput); } if (dbService.Recordset != null) { dbService.Recordset.Name = dbService.Method.ExecuteAction; if (dbService.Recordset.Name != null) { dbService.Recordset.Name = dbService.Recordset.Name.Replace(".", "_"); } dbService.Recordset.Fields.Clear(); var smh = new ServiceMappingHelper(); smh.MapDbOutputs(outputDescription, ref dbService, addFields); } return(dbService.Recordset); } case enSourceType.MySqlDatabase: { var broker = new MySqlDatabaseBroker(); var outputDescription = broker.TestService(dbService); if (outputDescription?.DataSourceShapes == null || outputDescription.DataSourceShapes.Count == 0) { throw new Exception(ErrorResource.ErrorRetrievingShapeFromServiceOutput); } dbService.Recordset.Fields.Clear(); var smh = new ServiceMappingHelper(); smh.MySqlMapDbOutputs(outputDescription, ref dbService, addFields); return(dbService.Recordset); } case enSourceType.SQLiteDatabase: { var broker = new SqliteDatabaseBroker(); var outputDescription = broker.TestService(dbService); if (outputDescription?.DataSourceShapes == null || outputDescription.DataSourceShapes.Count == 0) { throw new Exception(ErrorResource.ErrorRetrievingShapeFromServiceOutput); } dbService.Recordset.Fields.Clear(); var smh = new ServiceMappingHelper(); smh.MySqlMapDbOutputs(outputDescription, ref dbService, addFields); return(dbService.Recordset); } case enSourceType.PostgreSQL: { var broker = new PostgreSqlDataBaseBroker(); var outputDescription = broker.TestService(dbService); if (outputDescription?.DataSourceShapes == null || outputDescription.DataSourceShapes.Count == 0) { throw new Exception(ErrorResource.ErrorRetrievingShapeFromServiceOutput); } dbService.Recordset.Fields.Clear(); var smh = new ServiceMappingHelper(); smh.MySqlMapDbOutputs(outputDescription, ref dbService, addFields); return(dbService.Recordset); } case enSourceType.Oracle: { var broker = new OracleDatabaseBroker(); var outputDescription = broker.TestService(dbService); if (outputDescription?.DataSourceShapes == null || outputDescription.DataSourceShapes.Count == 0) { throw new Exception(ErrorResource.ErrorRetrievingShapeFromServiceOutput); } dbService.Recordset.Fields.Clear(); var smh = new ServiceMappingHelper(); smh.MapDbOutputs(outputDescription, ref dbService, addFields); return(dbService.Recordset); } case enSourceType.ODBC: { var broker = new ODBCDatabaseBroker(); var outputDescription = broker.TestService(dbService); if (outputDescription?.DataSourceShapes == null || outputDescription.DataSourceShapes.Count == 0) { throw new Exception(ErrorResource.ErrorRetrievingShapeFromServiceOutput); } dbService.Recordset.Fields.Clear(); var smh = new ServiceMappingHelper(); smh.MapDbOutputs(outputDescription, ref dbService, addFields); dbService.Recordset.Name = @"Unnamed"; return(dbService.Recordset); } default: return(null); } }
IDbColumnList GetDatabaseTableColumns(DbSource dbSource, DbTable dbTable) { var columns = _environmentModel.ResourceRepository.GetDatabaseTableColumns(dbSource, dbTable); return(columns ?? EmptyDbColumns); }
protected virtual TDbServer CreateDbServer(DbSource dbSource) { return(new TDbServer()); }
public override StringBuilder Execute(Dictionary <string, StringBuilder> values, IWorkspace theWorkspace) { var serializer = new Dev2JsonSerializer(); if (values == null) { throw new InvalidDataContractException(ErrorResource.NoParameter); } string database = null; values.TryGetValue("Database", out StringBuilder tmp); if (tmp != null) { database = tmp.ToString(); } if (string.IsNullOrEmpty(database)) { var res = new DbTableList("No database set."); Dev2Logger.Debug("No database set.", GlobalConstants.WarewolfDebug); return(serializer.SerializeToBuilder(res)); } DbSource dbSource; DbSource runtimeDbSource = null; try { dbSource = serializer.Deserialize <DbSource>(database); if (dbSource.ResourceID != Guid.Empty) { runtimeDbSource = ResourceCatalog.Instance.GetResource <DbSource>(theWorkspace.ID, dbSource.ResourceID); } } catch (Exception e) { Dev2Logger.Error(e, GlobalConstants.WarewolfError); var res = new DbTableList("Invalid JSON data for Database parameter. Exception: {0}", e.Message); return(serializer.SerializeToBuilder(res)); } if (runtimeDbSource == null) { var res = new DbTableList("Invalid Database source"); Dev2Logger.Debug("Invalid Database source", GlobalConstants.WarewolfDebug); return(serializer.SerializeToBuilder(res)); } if (string.IsNullOrEmpty(runtimeDbSource.DatabaseName) || string.IsNullOrEmpty(runtimeDbSource.Server)) { var res = new DbTableList("Invalid database sent {0}.", database); Dev2Logger.Debug($"Invalid database sent {database}.", GlobalConstants.WarewolfDebug); return(serializer.SerializeToBuilder(res)); } try { return(serializer.SerializeToBuilder(TryExecute(dbSource))); } catch (Exception ex) { var tables = new DbTableList(ex); return(serializer.SerializeToBuilder(tables)); } }