private static T Execute <T>(this IDbConnection conn, string cmdText, CommandType cmdType, int?cmdTimeout, IDbTransaction tran, DataParam[] dbParams, Func <IDbCommand, T> action) { bool flag = false; T result; try { using (IDbCommand dbCommand = conn.CreateCommand()) { DbConnectionExtension.SetupCommand(dbCommand, cmdText, cmdType, cmdTimeout, dbParams, tran); if (conn.State != ConnectionState.Open) { flag = true; conn.Open(); } result = action(dbCommand); } } finally { if (flag && conn.State == ConnectionState.Open) { conn.Close(); } } return(result); }
public void TestSqLiteConnectionUpdateViaTableNameViaQueryGroup() { using (var connection = new SqliteConnection(Database.ConnectionString)) { // Setup var table = Database.CreateCompleteTables(1, connection).First(); var queryFields = new[] { new QueryField("Id", table.Id), new QueryField("ColumnInt", table.ColumnInt) }; var queryGroup = new QueryGroup(queryFields); Helper.UpdateCompleteTableProperties(table); // Act var result = DbConnectionExtension.Update(connection, ClassMappedNameCache.Get <CompleteTable>(), table, queryGroup); // Assert Assert.AreEqual(1, result); // Act var queryResult = connection.Query(ClassMappedNameCache.Get <CompleteTable>(), table.Id).First(); // Assert Helper.AssertPropertiesEquality(table, queryResult); } }
public void TestSqLiteConnectionUpdateViaTableNameViaDynamic() { using (var connection = new SqliteConnection(Database.ConnectionString)) { // Setup var table = Database.CreateCompleteTables(1, connection).First(); Helper.UpdateCompleteTableProperties(table); // Act var result = DbConnectionExtension.Update(connection, ClassMappedNameCache.Get <CompleteTable>(), table, new { table.Id }); // Assert Assert.AreEqual(1, result); // Act var queryResult = connection.Query(ClassMappedNameCache.Get <CompleteTable>(), table.Id).First(); // Assert Helper.AssertPropertiesEquality(table, queryResult); } }
public void TestSqLiteConnectionUpdateAsyncViaTableNameAsExpandoObjectViaDataEntity() { using (var connection = new SqliteConnection(Database.ConnectionStringMDS)) { // Setup Database.CreateMdsCompleteTables(1, connection).First(); var table = Helper.CreateMdsCompleteTablesAsExpandoObjects(1).First(); // Act var result = DbConnectionExtension.UpdateAsync(connection, ClassMappedNameCache.Get <MdsCompleteTable>(), table).Result; // Assert Assert.AreEqual(1, result); // Act var queryResult = connection.Query(ClassMappedNameCache.Get <MdsCompleteTable>(), result).First(); // Assert Helper.AssertMembersEquality(queryResult, table); } }
public void TestPostgreSqlConnectionUpdateViaTableNameViaQueryField() { // Setup var table = Database.CreateCompleteTables(1).First(); using (var connection = new NpgsqlConnection(Database.ConnectionString)) { // Setup Helper.UpdateCompleteTableProperties(table); // Act var result = DbConnectionExtension.Update(connection, ClassMappedNameCache.Get <CompleteTable>(), table, new QueryField("Id", table.Id)); // Assert Assert.AreEqual(1, result); // Act var queryResult = connection.Query <CompleteTable>(table.Id).First(); // Assert Helper.AssertPropertiesEquality(table, queryResult); } }
/// <summary> /// Bulk insert an instance of <see cref="DataTable"/> object into the database in an asynchronous way. /// </summary> /// <param name="connection">The connection object to be used.</param> /// <param name="tableName">The target table for bulk-insert operation.</param> /// <param name="dataTable">The <see cref="DataTable"/> object to be used in the bulk-insert operation.</param> /// <param name="rowState">The state of the rows to be copied to the destination.</param> /// <param name="mappings">The list of the columns to be used for mappings. If this parameter is not set, then all columns will be used for mapping.</param> /// <param name="options">The bulk-copy options to be used.</param> /// <param name="bulkCopyTimeout">The timeout in seconds to be used.</param> /// <param name="batchSize">The size per batch to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of rows affected by the execution.</returns> public async Task <int> BulkInsertAsync(IDbConnection connection, string tableName, DataTable dataTable, DataRowState rowState = DataRowState.Unchanged, IEnumerable <BulkInsertMapItem> mappings = null, SqlBulkCopyOptions options = SqlBulkCopyOptions.Default, int?bulkCopyTimeout = null, int?batchSize = null, IDbTransaction transaction = null) { // Validate the objects ValidateConnection(connection); ValidateTransaction(transaction); DbConnectionExtension.ValidateTransactionConnectionObject(connection, transaction); // Variables for the operation var result = 0; // Before Execution Time var beforeExecutionTime = DateTime.UtcNow; // Actual Execution using (var sqlBulkCopy = new SqlBulkCopy((SqlConnection)connection, options, (SqlTransaction)transaction)) { // Set the destinationtable sqlBulkCopy.DestinationTableName = tableName; // Set the timeout if (bulkCopyTimeout != null && bulkCopyTimeout.HasValue) { sqlBulkCopy.BulkCopyTimeout = bulkCopyTimeout.Value; } // Set the batch szie if (batchSize != null && batchSize.HasValue) { sqlBulkCopy.BatchSize = batchSize.Value; } // Add the mappings if (mappings == null) { // Get the actual DB fields var dbFields = await DbFieldCache.GetAsync(connection, tableName, transaction); var fields = GetDataColumns(dataTable).Select(column => column.ColumnName); var filteredFields = new List <Tuple <string, string> >(); // To fix the casing problem of the bulk inserts foreach (var dbField in dbFields) { var field = fields.FirstOrDefault(f => string.Equals(f, dbField.UnquotedName, StringComparison.OrdinalIgnoreCase)); if (field != null) { filteredFields.Add(new Tuple <string, string>(field, dbField.UnquotedName)); } } // Iterate the filtered fields foreach (var field in filteredFields) { sqlBulkCopy.ColumnMappings.Add(field.Item1, field.Item2); } } else { // Iterate the provided mappings foreach (var mapItem in mappings) { sqlBulkCopy.ColumnMappings.Add(mapItem.SourceColumn, mapItem.DestinationColumn); } } // Open the connection and do the operation connection.EnsureOpen(); await sqlBulkCopy.WriteToServerAsync(dataTable, rowState); // Set the return value result = GetDataRows(dataTable, rowState).Count(); } // Result return(result); }
public override DataTable LoadOverview(DbConnection conn, ObjectPath parpath) { DbConnectionExtension.SafeChangeDatabase(conn, parpath); return(DbConnectionExtension.LoadTableFromQuery(conn, SqlScripts.getsequences)); }
/// <summary> /// Bulk insert an instance of <see cref="DbDataReader"/> object into the database in an asynchronous way. /// </summary> /// <param name="connection">The connection object to be used.</param> /// <param name="tableName">The target table for bulk-insert operation.</param> /// <param name="reader">The <see cref="DbDataReader"/> object to be used in the bulk-insert operation.</param> /// <param name="mappings">The list of the columns to be used for mappings. If this parameter is not set, then all columns will be used for mapping.</param> /// <param name="options">The bulk-copy options to be used.</param> /// <param name="bulkCopyTimeout">The timeout in seconds to be used.</param> /// <param name="batchSize">The size per batch to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of rows affected by the execution.</returns> public async Task <int> BulkInsertAsync(IDbConnection connection, string tableName, DbDataReader reader, IEnumerable <BulkInsertMapItem> mappings = null, SqlBulkCopyOptions options = SqlBulkCopyOptions.Default, int?bulkCopyTimeout = null, int?batchSize = null, IDbTransaction transaction = null) { // Validate the objects ValidateConnection(connection); ValidateTransaction(transaction); DbConnectionExtension.ValidateTransactionConnectionObject(connection, transaction); // Variables for the operation var result = 0; // Before Execution Time var beforeExecutionTime = DateTime.UtcNow; // Actual Execution using (var sqlBulkCopy = new SqlBulkCopy((SqlConnection)connection, options, (SqlTransaction)transaction)) { // Set the destinationtable sqlBulkCopy.DestinationTableName = tableName; // Set the timeout if (bulkCopyTimeout != null && bulkCopyTimeout.HasValue) { sqlBulkCopy.BulkCopyTimeout = bulkCopyTimeout.Value; } // Set the batch szie if (batchSize != null && batchSize.HasValue) { sqlBulkCopy.BatchSize = batchSize.Value; } // Add the mappings if (mappings != null) { foreach (var mapItem in mappings) { sqlBulkCopy.ColumnMappings.Add(mapItem.SourceColumn, mapItem.DestinationColumn); } } // Open the connection and do the operation await connection.EnsureOpenAsync(); await sqlBulkCopy.WriteToServerAsync(reader); // Hack the 'SqlBulkCopy' object var field = GetRowsCopiedField(); // Set the return value result = field != null ? (int)field.GetValue(sqlBulkCopy) : reader.RecordsAffected; } // Result return(result); }
/// <summary> /// Bulk insert a list of data entity objects into the database. /// </summary> /// <typeparam name="TEntity">The type of the data entity object.</typeparam> /// <param name="connection">The connection object to be used.</param> /// <param name="entities">The list of the data entities to be bulk-inserted.</param> /// <param name="mappings">The list of the columns to be used for mappings. If this parameter is not set, then all columns will be used for mapping.</param> /// <param name="options">The bulk-copy options to be used.</param> /// <param name="bulkCopyTimeout">The timeout in seconds to be used.</param> /// <param name="batchSize">The size per batch to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of rows affected by the execution.</returns> public int BulkInsert <TEntity>(IDbConnection connection, IEnumerable <TEntity> entities, IEnumerable <BulkInsertMapItem> mappings = null, SqlBulkCopyOptions options = SqlBulkCopyOptions.Default, int?bulkCopyTimeout = null, int?batchSize = null, IDbTransaction transaction = null) where TEntity : class { // Validate the objects ValidateConnection(connection); ValidateTransaction(transaction); DbConnectionExtension.ValidateTransactionConnectionObject(connection, transaction); // Variables for the operation var result = 0; // Before Execution Time var beforeExecutionTime = DateTime.UtcNow; // Actual Execution using (var reader = new DataEntityDataReader <TEntity>(entities, connection)) { using (var sqlBulkCopy = new SqlBulkCopy((SqlConnection)connection, options, (SqlTransaction)transaction)) { // Set the destinationtable sqlBulkCopy.DestinationTableName = ClassMappedNameCache.Get <TEntity>(); // Set the timeout if (bulkCopyTimeout != null && bulkCopyTimeout.HasValue) { sqlBulkCopy.BulkCopyTimeout = bulkCopyTimeout.Value; } // Set the batch szie if (batchSize != null && batchSize.HasValue) { sqlBulkCopy.BatchSize = batchSize.Value; } // Add the mappings if (mappings == null) { foreach (var property in reader.Properties) { var columnName = property.GetUnquotedMappedName(); sqlBulkCopy.ColumnMappings.Add(columnName, columnName); } } else { foreach (var mapItem in mappings) { sqlBulkCopy.ColumnMappings.Add(mapItem.SourceColumn, mapItem.DestinationColumn); } } // Open the connection and do the operation connection.EnsureOpen(); sqlBulkCopy.WriteToServer(reader); // Hack the 'SqlBulkCopy' object var field = GetRowsCopiedField(); // Set the return value result = field != null ? (int)field.GetValue(sqlBulkCopy) : reader.RecordsAffected; } } // Result return(result); }
public int RepoDbTenParameters() { return(DbConnectionExtension.ExecuteScalar <int>(sqlConnection, TenParametersSql, TenParameters)); }
public override DataTable LoadOverview(DbConnection conn, ObjectPath parpath) { conn.SafeChangeDatabase(parpath); return(DbConnectionExtension.LoadTableFromQuery(conn, "SHOW EVENTS")); }
public override DataTable LoadOverview(DbConnection conn, ObjectPath parpath) { DbConnectionExtension.SafeChangeDatabase(conn, parpath); return(DbConnectionExtension.LoadTableFromQuery(conn, "SHOW FUNCTION STATUS WHERE Db='" + parpath.DbName + "'")); }
public int RepoDbOneParameter() { return(DbConnectionExtension.ExecuteNonQuery(sqlConnection, OneParameterSql, OneParameter)); }
/// <summary> /// Bulk insert an instance of <see cref="DbDataReader"/> object into the database in an asynchronous way. /// </summary> /// <param name="connection">The connection object to be used.</param> /// <param name="tableName">The target table for bulk-insert operation.</param> /// <param name="reader">The <see cref="DbDataReader"/> object to be used in the bulk-insert operation.</param> /// <param name="mappings">The list of the columns to be used for mappings. If this parameter is not set, then all columns will be used for mapping.</param> /// <param name="options">The bulk-copy options to be used.</param> /// <param name="bulkCopyTimeout">The timeout in seconds to be used.</param> /// <param name="batchSize">The size per batch to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of rows affected by the execution.</returns> public async Task <int> BulkInsertAsync(IDbConnection connection, string tableName, DbDataReader reader, IEnumerable <BulkInsertMapItem> mappings = null, SqlBulkCopyOptions options = SqlBulkCopyOptions.Default, int?bulkCopyTimeout = null, int?batchSize = null, IDbTransaction transaction = null) { // Validate the objects ValidateConnection(connection); ValidateTransaction(transaction); DbConnectionExtension.ValidateTransactionConnectionObject(connection, transaction); // Variables for the operation var result = 0; // Before Execution Time var beforeExecutionTime = DateTime.UtcNow; // Actual Execution using (var sqlBulkCopy = new SqlBulkCopy((SqlConnection)connection, options, (SqlTransaction)transaction)) { // Set the destinationtable sqlBulkCopy.DestinationTableName = tableName; // Set the timeout if (bulkCopyTimeout != null && bulkCopyTimeout.HasValue) { sqlBulkCopy.BulkCopyTimeout = bulkCopyTimeout.Value; } // Set the batch szie if (batchSize != null && batchSize.HasValue) { sqlBulkCopy.BatchSize = batchSize.Value; } // Add the mappings if (mappings == null) { // Get the actual DB fields var dbFields = await DbFieldCache.GetAsync(connection, tableName, transaction); var fields = Enumerable.Range(0, reader.FieldCount).Select((index) => reader.GetName(index)); var filteredFields = new List <Tuple <string, string> >(); // To fix the casing problem of the bulk inserts foreach (var dbField in dbFields) { var readerField = fields.FirstOrDefault(field => string.Equals(field, dbField.UnquotedName, StringComparison.OrdinalIgnoreCase)); if (!string.IsNullOrEmpty(readerField)) { filteredFields.Add(new Tuple <string, string>(readerField, dbField.UnquotedName)); } } // Iterate the filtered fields foreach (var field in filteredFields) { sqlBulkCopy.ColumnMappings.Add(field.Item1, field.Item2); } } else { // Iterate the provided mappings foreach (var mapItem in mappings) { sqlBulkCopy.ColumnMappings.Add(mapItem.SourceColumn, mapItem.DestinationColumn); } } // Open the connection and do the operation await connection.EnsureOpenAsync(); await sqlBulkCopy.WriteToServerAsync(reader); // Hack the 'SqlBulkCopy' object var copiedField = GetRowsCopiedField(); // Set the return value result = copiedField != null ? (int)copiedField.GetValue(sqlBulkCopy) : reader.RecordsAffected; } // Result return(result); }
public override DataTable LoadOverview(DbConnection conn, ObjectPath parpath) { DbConnectionExtension.SafeChangeDatabase(conn, parpath); return(DbConnectionExtension.LoadTableFromQuery(conn, "select * from USER_SEQUENCES")); }
/// <summary> /// Bulk insert a list of data entity objects into the database. /// </summary> /// <typeparam name="TEntity">The type of the data entity object.</typeparam> /// <param name="connection">The connection object to be used.</param> /// <param name="entities">The list of the data entities to be bulk-inserted.</param> /// <param name="mappings">The list of the columns to be used for mappings. If this parameter is not set, then all columns will be used for mapping.</param> /// <param name="options">The bulk-copy options to be used.</param> /// <param name="bulkCopyTimeout">The timeout in seconds to be used.</param> /// <param name="batchSize">The size per batch to be used.</param> /// <param name="transaction">The transaction to be used.</param> /// <returns>The number of rows affected by the execution.</returns> public int BulkInsert <TEntity>(IDbConnection connection, IEnumerable <TEntity> entities, IEnumerable <BulkInsertMapItem> mappings = null, SqlBulkCopyOptions options = SqlBulkCopyOptions.Default, int?bulkCopyTimeout = null, int?batchSize = null, IDbTransaction transaction = null) where TEntity : class { // Validate the objects ValidateConnection(connection); ValidateTransaction(transaction); DbConnectionExtension.ValidateTransactionConnectionObject(connection, transaction); // Variables for the operation var result = 0; // Before Execution Time var beforeExecutionTime = DateTime.UtcNow; // Actual Execution using (var reader = new DataEntityDataReader <TEntity>(entities, connection)) { using (var sqlBulkCopy = new SqlBulkCopy((SqlConnection)connection, options, (SqlTransaction)transaction)) { var dbSetting = connection.GetDbSetting(); // Set the destinationtable sqlBulkCopy.DestinationTableName = ClassMappedNameCache.Get <TEntity>(dbSetting); // Set the timeout if (bulkCopyTimeout != null && bulkCopyTimeout.HasValue) { sqlBulkCopy.BulkCopyTimeout = bulkCopyTimeout.Value; } // Set the batch szie if (batchSize != null && batchSize.HasValue) { sqlBulkCopy.BatchSize = batchSize.Value; } // Add the mappings if (mappings == null) { // Get the actual DB fields var dbFields = DbFieldCache.Get(connection, ClassMappedNameCache.Get <TEntity>(dbSetting), transaction); var fields = reader.Properties.AsFields(); var filteredFields = new List <Tuple <string, string> >(); // To fix the casing problem of the bulk inserts foreach (var dbField in dbFields) { var field = fields.FirstOrDefault(f => string.Equals(f.UnquotedName, dbField.UnquotedName, StringComparison.OrdinalIgnoreCase)); if (field != null) { filteredFields.Add(new Tuple <string, string>(field.UnquotedName, dbField.UnquotedName)); } } // Iterate the filtered fields foreach (var field in filteredFields) { sqlBulkCopy.ColumnMappings.Add(field.Item1, field.Item2); } } else { // Iterate the provided mappings foreach (var mapItem in mappings) { sqlBulkCopy.ColumnMappings.Add(mapItem.SourceColumn, mapItem.DestinationColumn); } } // Open the connection and do the operation connection.EnsureOpen(); sqlBulkCopy.WriteToServer(reader); // Hack the 'SqlBulkCopy' object var copiedField = GetRowsCopiedField(); // Set the return value result = copiedField != null ? (int)copiedField.GetValue(sqlBulkCopy) : reader.RecordsAffected; } } // Result return(result); }
public string RepoDbTiny() { return(DbConnectionExtension.ExecuteScalar <string>(connection, Sql, Parameters)); }
public static WebResponse CreateWebResponse(PtunConnection conn, string op, Dictionary <string, byte[]> extpars, string sqlForLogPurpose) { var fp = conn.FailoverParams; var start = DateTime.Now; Exception httpError = null; int reqindex = 0; for (; reqindex < fp.MaxRequestCount; reqindex++) { try { HttpWebRequest webrequest = (HttpWebRequest)WebRequest.Create(conn.Params.Url); if (!String.IsNullOrEmpty(conn.Params.HttpLogin)) { webrequest.Credentials = new NetworkCredential(conn.Params.HttpLogin, conn.Params.HttpPassword); } webrequest.ContentType = "application/x-www-form-urlencoded; charset=" + conn.Params.RealEncoding.WebName; webrequest.Method = "POST"; Dictionary <string, byte[]> pars = new Dictionary <string, byte[]>(); pars["CHECK"] = Encoding.ASCII.GetBytes(conn.Params.Check); pars["HOST"] = Encoding.ASCII.GetBytes(conn.Params.Host ?? ""); pars["USER"] = Encoding.ASCII.GetBytes(conn.Params.Login ?? ""); pars["PORT"] = Encoding.ASCII.GetBytes(PtunDefaults.Instance.Port(conn.Params.Engine, conn.Params.Port).ToString()); pars["PASSWORD"] = Encoding.ASCII.GetBytes(conn.Params.Password ?? ""); pars["ENGINE"] = Encoding.ASCII.GetBytes(PtunDefaults.Instance.Engine(conn.Params.Engine)); pars["DATABASE"] = Encoding.ASCII.GetBytes(conn.Database ?? ""); pars["OPERATION"] = Encoding.ASCII.GetBytes(op); pars["VERSION"] = Encoding.ASCII.GetBytes(VERSION.ToString()); pars["ENCODINGSTYLE"] = Encoding.ASCII.GetBytes(conn.Params.EncodingStyle.ToString()); pars.AddAll(extpars); string pars_enc = StringTool.UrlEncode(pars); byte[] data = conn.Params.RealEncoding.GetBytes(pars_enc); webrequest.ContentLength = data.Length; webrequest.Timeout = reqindex == 0 ? fp.HttpRequestTimeoutMilisecs : fp.RepeatedHttpRequestTimeoutMilisecs; using (Stream requestStream = webrequest.GetRequestStream()) { requestStream.Write(data, 0, data.Length); } try { WebResponse resp = webrequest.GetResponse(); return(resp); } catch (Exception e) { Logging.Warning(String.Format("PHP tunnel error (run {0}), trying again:{1}", reqindex, e.Message)); Thread.Sleep(fp.DelayBeforeRepeatedRequestMilisecs); httpError = e; } if ((DateTime.Now - start).TotalMilliseconds > fp.RepeatedRequestTimeoutMilisecs) { break; } } catch (UriFormatException err) { throw new ConnectionFailedError(err); } } Logging.Info(String.Format("Repeated PTUN request failed, repeated={0}", reqindex)); if (sqlForLogPurpose != null) { DbConnectionExtension.LogExecuteQuery(sqlForLogPurpose); } else { Logging.Debug("Executing PTUN operation:" + op); } throw httpError; }
public override DataTable LoadOverview(DbConnection conn, ObjectPath parpath) { return(DbConnectionExtension.LoadTableFromQuery(conn, StdScripts.gettriggers)); }
public override DataTable LoadOverview(DbConnection conn, ObjectPath parpath) { return(DbConnectionExtension.LoadTableFromQuery(conn, "select * from USER_TRIGGERS")); }
public override DataTable LoadOverview(DbConnection conn, ObjectPath parpath) { return(DbConnectionExtension.LoadTableFromQuery(conn, "SELECT * FROM INFORMATION_SCHEMA.TRIGGERS")); }
public int RepoDbNoParameters() { return(DbConnectionExtension.ExecuteNonQuery(sqlConnection, NoParameterSql)); }