void WriteToSQL(WebBaseEventCollection events, int eventsDiscardedByBuffer, DateTime lastNotificationUtc) { // We don't want to send any more events until we've waited until the _retryDate (which defaults to minValue) if (_retryDate > DateTime.UtcNow) { return; } try { SqlConnectionHolder sqlConnHolder = SqlConnectionHelper.GetConnection(_sqlConnectionString, true); SqlCommand sqlCommand = new SqlCommand(SP_LOG_EVENT); CheckSchemaVersion(sqlConnHolder.Connection); sqlCommand.CommandType = CommandType.StoredProcedure; sqlCommand.Connection = sqlConnHolder.Connection; if (_commandTimeout > -1) { sqlCommand.CommandTimeout = _commandTimeout; } PrepareParams(sqlCommand); try { sqlConnHolder.Open(null, true); Interlocked.Increment(ref _connectionCount); if (eventsDiscardedByBuffer != 0) { WebBaseEvent infoEvent = new WebBaseEvent( SR.GetString(SR.Sql_webevent_provider_events_dropped, eventsDiscardedByBuffer.ToString(CultureInfo.InstalledUICulture), lastNotificationUtc.ToString("r", CultureInfo.InstalledUICulture)), null, WebEventCodes.WebEventProviderInformation, WebEventCodes.SqlProviderEventsDropped); FillParams(sqlCommand, infoEvent); sqlCommand.ExecuteNonQuery(); } foreach (WebBaseEvent eventRaised in events) { FillParams(sqlCommand, eventRaised); sqlCommand.ExecuteNonQuery(); } } #if DBG catch (Exception e) { Debug.Trace("SqlWebEventProvider", "ExecuteNonQuery failed: " + e); throw; } #endif finally { sqlConnHolder.Close(); Interlocked.Decrement(ref _connectionCount); } #if (!DBG) try { #endif EventProcessingComplete(events); #if (!DBG) } catch { // Ignore all errors. } #endif } catch { // For any failure, we will wait at least 30 seconds or _commandTimeout before trying again double timeout = 30; if (_commandTimeout > -1) { timeout = (double)_commandTimeout; } _retryDate = DateTime.UtcNow.AddSeconds(timeout); throw; } }
public void RunBenchmarks() { DbPostSeeder.SeedPosts(); var benchmarks = new BenchmarkCollection(); using (var connection = new SqlConnectionHelper()) using (var cnn = connection.OpenConnection()) { // HAND CODED var postCommand = new SqlCommand { Connection = cnn, CommandText = @"select postId, [Text], [CreationDate], LastChangeDate, Counter1,Counter2,Counter3,Counter4,Counter5,Counter6,Counter7,Counter8,Counter9 from Post" }; benchmarks.Add(() => { var list = new List <Post>(); using (var reader = postCommand.ExecuteReader()) { while (reader.Read()) { var post = new Post { PostId = reader.GetInt32(0), Text = reader.GetNullableString(1), CreationDate = reader.GetDateTime(2), LastChangeDate = reader.GetDateTime(3), Counter1 = reader.GetNullableValue <int>(4), Counter2 = reader.GetNullableValue <int>(5), Counter3 = reader.GetNullableValue <int>(6), Counter4 = reader.GetNullableValue <int>(7), Counter5 = reader.GetNullableValue <int>(8), Counter6 = reader.GetNullableValue <int>(9), Counter7 = reader.GetNullableValue <int>(10), Counter8 = reader.GetNullableValue <int>(11), Counter9 = reader.GetNullableValue <int>(12) }; list.Add(post); } } }, "HandCoded"); // With Skeleton.ORM var repository = _resolver.Resolve <IEntityReader <Post> >(); benchmarks.Add(() => { repository.Find(); }, "Skeleton.Orm => Cold start"); benchmarks.Add(() => { repository.Find(); }, "Skeleton.Orm => Hot start"); var repositoryAsync = _resolver.Resolve <IAsyncEntityReader <Post> >(); benchmarks.Add(() => { var list = repositoryAsync.FindAsync().Result; }, "Skeleton.Orm.Async => Hot start"); benchmarks.Run(); } }
///////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////// // Private methods ///////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////// private ProfileInfoCollection GetProfilesForQuery(SqlParameter [] args, ProfileAuthenticationOption authenticationOption, int pageIndex, int pageSize, out int totalRecords) { if (pageIndex < 0) { throw new ArgumentException(SR.GetString(SR.PageIndex_bad), "pageIndex"); } if (pageSize < 1) { throw new ArgumentException(SR.GetString(SR.PageSize_bad), "pageSize"); } long upperBound = (long)pageIndex * pageSize + pageSize - 1; if (upperBound > Int32.MaxValue) { throw new ArgumentException(SR.GetString(SR.PageIndex_PageSize_bad), "pageIndex and pageSize"); } try { SqlConnectionHolder holder = null; SqlDataReader reader = null; try { holder = SqlConnectionHelper.GetConnection(_sqlConnectionString, true); CheckSchemaVersion(holder.Connection); SqlCommand cmd = new SqlCommand("dbo.aspnet_Profile_GetProfiles", holder.Connection); cmd.CommandTimeout = CommandTimeout; cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.Add(CreateInputParam("@ApplicationName", SqlDbType.NVarChar, ApplicationName)); cmd.Parameters.Add(CreateInputParam("@ProfileAuthOptions", SqlDbType.Int, (int)authenticationOption)); cmd.Parameters.Add(CreateInputParam("@PageIndex", SqlDbType.Int, pageIndex)); cmd.Parameters.Add(CreateInputParam("@PageSize", SqlDbType.Int, pageSize)); foreach (SqlParameter arg in args) { cmd.Parameters.Add(arg); } reader = cmd.ExecuteReader(CommandBehavior.SequentialAccess); ProfileInfoCollection profiles = new ProfileInfoCollection(); while (reader.Read()) { string username; DateTime dtLastActivity, dtLastUpdated; bool isAnon; username = reader.GetString(0); isAnon = reader.GetBoolean(1); dtLastActivity = DateTime.SpecifyKind(reader.GetDateTime(2), DateTimeKind.Utc); dtLastUpdated = DateTime.SpecifyKind(reader.GetDateTime(3), DateTimeKind.Utc); int size = reader.GetInt32(4); profiles.Add(new ProfileInfo(username, isAnon, dtLastActivity, dtLastUpdated, size)); } totalRecords = profiles.Count; if (reader.NextResult()) { if (reader.Read()) { totalRecords = reader.GetInt32(0); } } return(profiles); } finally { if (reader != null) { reader.Close(); } if (holder != null) { holder.Close(); holder = null; } } } catch { throw; } }
public IEnumerable <IDataReader> Update(int buildingId, long visitOffset, long periodOffset, long drugOffset, long procedureOffset, long deviceOffset, long conditionOffset, long measurementOffset, long observationOffset, long observationPeriodOffset, long visitCostOffset, long procedureCostOffset, long deviceCostOffset, long drugEraOffset, long conditionEraOffset) { using (var connection = SqlConnectionHelper.OpenMSSQLConnection(connectionString)) using (var transaction = connection.BeginTransaction()) { const string updateQuery = "UPDATE [KeyOffset] " + "SET [VisitOccurrence] = [VisitOccurrence] + @visitOffset " + ",[PayerPlanPeriod] = [PayerPlanPeriod] + @periodOffset " + ",[DrugExposure] = [DrugExposure] + @drugOffset " + ",[ProcedureOccurrence] = [ProcedureOccurrence] + @procedureOffset " + ",[DeviceExposure] = [DeviceExposure] + @deviceOffset " + ",[ConditionOccurrence] = [ConditionOccurrence] + @conditionOffset " + ",[Measurement] = [Measurement] + @measurementOffset " + ",[Observation] = [Observation] + @observationOffset " + ",[ObservationPeriod] = [ObservationPeriod] + @observationPeriodOffset " + ",[VisitCost] = [VisitCost] + @visitCostOffset " + ",[ProcedureCost] = [ProcedureCost] + @procedureCostOffset " + ",[DeviceCost] = [DeviceCost] + @deviceCostOffset " + ",[DrugEra] = [DrugEra] + @drugEraOffset " + ",[ConditionEra] = [ConditionEra] + @conditionEraOffset " + "OUTPUT INSERTED.[VisitOccurrence], INSERTED.[PayerPlanPeriod], INSERTED.[DrugExposure], INSERTED.[ProcedureOccurrence], INSERTED.[DeviceExposure], " + "INSERTED.[ConditionOccurrence], INSERTED.[Measurement], INSERTED.[Observation], INSERTED.[ObservationPeriod], INSERTED.[VisitCost], INSERTED.[ProcedureCost], INSERTED.[DeviceCost], INSERTED.[DrugEra], INSERTED.[ConditionEra] " + "WHERE [BuildingId] = @buildingId "; using (var cmd = new SqlCommand(updateQuery, connection, transaction)) { cmd.Parameters.Add("@buildingId", SqlDbType.Int); cmd.Parameters["@buildingId"].Value = buildingId; cmd.Parameters.Add("@visitOffset", SqlDbType.BigInt); cmd.Parameters["@visitOffset"].Value = visitOffset; cmd.Parameters.Add("@periodOffset", SqlDbType.BigInt); cmd.Parameters["@periodOffset"].Value = periodOffset; cmd.Parameters.Add("@drugOffset", SqlDbType.BigInt); cmd.Parameters["@drugOffset"].Value = drugOffset; cmd.Parameters.Add("@procedureOffset", SqlDbType.BigInt); cmd.Parameters["@procedureOffset"].Value = procedureOffset; cmd.Parameters.Add("@deviceOffset", SqlDbType.BigInt); cmd.Parameters["@deviceOffset"].Value = deviceOffset; cmd.Parameters.Add("@conditionOffset", SqlDbType.BigInt); cmd.Parameters["@conditionOffset"].Value = conditionOffset; cmd.Parameters.Add("@measurementOffset", SqlDbType.BigInt); cmd.Parameters["@measurementOffset"].Value = measurementOffset; cmd.Parameters.Add("@observationOffset", SqlDbType.BigInt); cmd.Parameters["@observationOffset"].Value = observationOffset; cmd.Parameters.Add("@observationPeriodOffset", SqlDbType.BigInt); cmd.Parameters["@observationPeriodOffset"].Value = observationPeriodOffset; cmd.Parameters.Add("@visitCostOffset", SqlDbType.BigInt); cmd.Parameters["@visitCostOffset"].Value = visitCostOffset; cmd.Parameters.Add("@procedureCostOffset", SqlDbType.BigInt); cmd.Parameters["@procedureCostOffset"].Value = procedureCostOffset; cmd.Parameters.Add("@deviceCostOffset", SqlDbType.BigInt); cmd.Parameters["@deviceCostOffset"].Value = deviceCostOffset; cmd.Parameters.Add("@drugEraOffset", SqlDbType.BigInt); cmd.Parameters["@drugEraOffset"].Value = drugEraOffset; cmd.Parameters.Add("@conditionEraOffset", SqlDbType.BigInt); cmd.Parameters["@conditionEraOffset"].Value = conditionEraOffset; cmd.CommandTimeout = 30000; using (var reader = cmd.ExecuteReader()) { while (reader.Read()) { yield return(reader); } } } transaction.Commit(); } }
private void SaveSet(int setIndex) { var dbAvailableOnS3 = new DbAvailableOnS3(Settings.Current.Building.BuilderConnectionString); Logger.Write(null, LogMessageTypes.Debug, "sss SaveSet BEGIN index - " + setIndex); var chunkIds = dbAvailableOnS3.GetChunksId(Settings.Current.Building.Id.Value, setIndex).ToArray(); Logger.Write(null, LogMessageTypes.Debug, "sss SaveSet chunkIds: " + string.Join(",", chunkIds)); var timer = new Stopwatch(); timer.Start(); using ( var connection = SqlConnectionHelper.OpenOdbcConnection(Settings.Current.Building.DestinationConnectionString)) using (var transaction = connection.BeginTransaction()) { try { SaveTable(connection, transaction, setIndex, "PERSON"); SaveTable(connection, transaction, setIndex, "OBSERVATION_PERIOD"); SaveTable(connection, transaction, setIndex, "PAYER_PLAN_PERIOD"); SaveTable(connection, transaction, setIndex, "CONDITION_OCCURRENCE"); SaveTable(connection, transaction, setIndex, "DEATH"); SaveTable(connection, transaction, setIndex, "DRUG_EXPOSURE"); SaveTable(connection, transaction, setIndex, "OBSERVATION"); SaveTable(connection, transaction, setIndex, "VISIT_OCCURRENCE"); SaveTable(connection, transaction, setIndex, "PROCEDURE_OCCURRENCE"); SaveTable(connection, transaction, setIndex, "DRUG_ERA"); SaveTable(connection, transaction, setIndex, "CONDITION_ERA"); SaveTable(connection, transaction, setIndex, "DEVICE_EXPOSURE"); SaveTable(connection, transaction, setIndex, "MEASUREMENT"); SaveTable(connection, transaction, setIndex, "COHORT"); if (Settings.Current.Building.CDM == CDMVersions.v5) { SaveTable(connection, transaction, setIndex, "DRUG_COST"); SaveTable(connection, transaction, setIndex, "DEVICE_COST"); SaveTable(connection, transaction, setIndex, "VISIT_COST"); SaveTable(connection, transaction, setIndex, "PROCEDURE_COST"); } else if (Settings.Current.Building.CDM == CDMVersions.v501) { SaveTable(connection, transaction, setIndex, "COST"); } transaction.Commit(); } catch (Exception e) { foreach (var chunkId in chunkIds) { Logger.WriteError(chunkId, e); transaction.Rollback(); Logger.Write(chunkId, LogMessageTypes.Debug, "Rollback - Complete"); } } } var dbChunk = new DbChunk(Settings.Current.Building.BuilderConnectionString); foreach (var chunkId in chunkIds) { dbChunk.ChunkComplete(chunkId); } timer.Stop(); Logger.Write(null, LogMessageTypes.Info, string.Format("Save - {0} ms", timer.ElapsedMilliseconds)); }
public AnonymousBidderDataContext() : base(SqlConnectionHelper.GetEntityConnectionString()) { }
private void MoveChunkDataToS3() { var chunkIds = _dbChunk.GetNotMovedToS3Chunks(Settings.Settings.Current.Building.Id.Value).ToArray(); if (chunkIds.Length == 0) { return; } var baseFolder = $"{Settings.Settings.Current.Bucket}/{Settings.Settings.Current.Building.Vendor}/{Settings.Settings.Current.Building.Id}/raw"; Console.WriteLine("S3 raw folder - " + baseFolder); Parallel.ForEach(Settings.Settings.Current.Building.SourceQueryDefinitions, queryDefinition => { if (queryDefinition.Providers != null) { return; } if (queryDefinition.Locations != null) { return; } if (queryDefinition.CareSites != null) { return; } var sql = GetSqlHelper.GetSql(Settings.Settings.Current.Building.SourceEngine.Database, queryDefinition.GetSql(Settings.Settings.Current.Building.Vendor, Settings.Settings.Current.Building.SourceSchemaName), Settings.Settings.Current.Building.SourceSchemaName); if (string.IsNullOrEmpty(sql)) { return; } sql = string.Format(sql, chunkIds[0]); if (queryDefinition.FieldHeaders == null) { StoreMetadataToS3(queryDefinition, sql); } }); Parallel.ForEach(chunkIds, new ParallelOptions { MaxDegreeOfParallelism = 2 }, cId => { var chunkId = cId; Parallel.ForEach(Settings.Settings.Current.Building.SourceQueryDefinitions, new ParallelOptions { MaxDegreeOfParallelism = 5 }, queryDefinition => { try { if (queryDefinition.Providers != null) { return; } if (queryDefinition.Locations != null) { return; } if (queryDefinition.CareSites != null) { return; } var sql = GetSqlHelper.GetSql(Settings.Settings.Current.Building.SourceEngine.Database, queryDefinition.GetSql(Settings.Settings.Current.Building.Vendor, Settings.Settings.Current.Building.SourceSchemaName), Settings.Settings.Current.Building.SourceSchemaName); if (string.IsNullOrEmpty(sql)) { return; } sql = string.Format(sql, chunkId); //if (queryDefinition.FieldHeaders == null) //{ // StoreMetadataToS3(queryDefinition, sql); //} var personIdField = queryDefinition.GetPersonIdFieldName(); var tmpTableName = "#" + queryDefinition.FileName + "_" + chunkId; var folder = $"{baseFolder}/{chunkId}/{queryDefinition.FileName}"; var fileName = $@"{folder}/{queryDefinition.FileName}"; var unloadQuery = string.Format(@"create table {0} sortkey ({1}) distkey ({1}) as {2}; " + @"UNLOAD ('select * from {0} order by {1}') to 's3://{3}' " + @"DELIMITER AS '\t' " + @"credentials 'aws_access_key_id={4};aws_secret_access_key={5}' " + @"GZIP ALLOWOVERWRITE PARALLEL ON", tmpTableName, //0 personIdField, //1 sql, //2 fileName, //3 Settings.Settings.Current.S3AwsAccessKeyId, //4 Settings.Settings.Current.S3AwsSecretAccessKey); //5 using (var connection = SqlConnectionHelper.OpenOdbcConnection(Settings.Settings.Current.Building .SourceConnectionString)) using (var c = new OdbcCommand(unloadQuery, connection)) { c.CommandTimeout = 999999999; c.ExecuteNonQuery(); } } catch (Exception e) { Logger.WriteError(chunkId, e); throw; } }); _dbChunk.ChunkCreated(chunkId, Settings.Settings.Current.Building.Id.Value); Console.WriteLine("Raw data for chunkId=" + chunkId + " is available on S3"); }); }
/// <summary> /// Init db /// </summary> private static void SetupDB() { /// <summary> /// Db username /// </summary> string _userName = "******"; /// <summary> /// Db password /// </summary> string _password = "******"; /// <summary> /// Db host name /// </summary> string _hostName = "localhost"; /// <summary> /// Db port /// </summary> int _port = 3306; /// <summary> /// Db name /// </summary> string _dbName = "sys"; var mysqlConnectionString = $"Server={_hostName};Port={_port};User Id={_userName};Password={_password};Database={_dbName};SslMode=None"; var dbrecreated = false; SimpleCRUD.SetDialect(SimpleCRUD.Dialect.MySQL); using (var connection = SqlConnectionHelper.GetOpenConnection(mysqlConnectionString)) { try { connection.Execute(@" DROP DATABASE SimplecrudDemoWebsite; "); } catch (Exception ex) { Debug.WriteLine("database drop failed - close and reopen VS and try again:" + ex.Message); } try { connection.Execute(@" CREATE DATABASE SimplecrudDemoWebsite; "); dbrecreated = true; } catch (Exception ex) { Debug.WriteLine("database create failed - close and reopen VS and try again:" + ex.Message); } } if (!dbrecreated) { return; } mysqlConnectionString = AppConfigurationHelper.GetAppSettings("AppSettings:MySqlConnectionString"); using (var connection = SqlConnectionHelper.GetOpenConnection(mysqlConnectionString)) { connection.Execute(@" DROP TABLE IF EXISTS `car`; CREATE TABLE `car` ( `Id` int(11) NOT NULL AUTO_INCREMENT, `Make` varchar(100) NOT NULL, `ModelName` varchar(100) NOT NULL, PRIMARY KEY (`Id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; "); connection.Insert(new CarViewModel() { Make = "Honda", ModelName = "Civic" }); connection.Execute(@" DROP TABLE IF EXISTS `users`; CREATE TABLE `users` ( `UserId` int(11) NOT NULL AUTO_INCREMENT, `FirstName` varchar(100) NOT NULL, `LastName` varchar(100) NOT NULL, `intAge` int(11) NOT NULL, PRIMARY KEY (`UserId`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; "); connection.Insert(new UserViewModel() { Age = 42, FirstName = "Jim", LastName = "Smith" }); connection.Execute(@" DROP TABLE IF EXISTS `guidtest`; CREATE TABLE `guidtest` ( `Id` int(11) Not NULL AUTO_INCREMENT, `guid` char(36) NULL, `name` varchar(50) NOT NULL, PRIMARY KEY (`Id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; "); connection.Insert(new GUIDTestViewModel { guid = Guid.NewGuid().ToString(), name = "Example" }); int x = 1; do { connection.Insert(new User { FirstName = "Jim ", LastName = "Smith " + x, Age = x }); x++; } while (x < 101); } }
private void GetPropertyValuesFromDatabase(string userName, SettingsPropertyValueCollection svc) { if (HostingEnvironment.IsHosted && EtwTrace.IsTraceEnabled(4, 8)) { EtwTrace.Trace(EtwTraceType.ETW_TYPE_PROFILE_BEGIN, HttpContext.Current.WorkerRequest); } HttpContext current = HttpContext.Current; string[] names = null; string values = null; byte[] buffer = null; if (current != null) { if (!current.Request.IsAuthenticated) { string anonymousID = current.Request.AnonymousID; } else { string name = current.User.Identity.Name; } } try { SqlConnectionHolder connection = null; SqlDataReader reader = null; try { connection = SqlConnectionHelper.GetConnection(this._sqlConnectionString, true); this.CheckSchemaVersion(connection.Connection); SqlCommand command = new SqlCommand("dbo.aspnet_Profile_GetProperties", connection.Connection) { CommandTimeout = this.CommandTimeout, CommandType = CommandType.StoredProcedure }; command.Parameters.Add(this.CreateInputParam("@ApplicationName", SqlDbType.NVarChar, this.ApplicationName)); command.Parameters.Add(this.CreateInputParam("@UserName", SqlDbType.NVarChar, userName)); command.Parameters.Add(this.CreateInputParam("@CurrentTimeUtc", SqlDbType.DateTime, DateTime.UtcNow)); reader = command.ExecuteReader(CommandBehavior.SingleRow); if (reader.Read()) { names = reader.GetString(0).Split(new char[] { ':' }); values = reader.GetString(1); int length = (int)reader.GetBytes(2, 0L, null, 0, 0); buffer = new byte[length]; reader.GetBytes(2, 0L, buffer, 0, length); } } finally { if (connection != null) { connection.Close(); connection = null; } if (reader != null) { reader.Close(); } } ProfileModule.ParseDataFromDB(names, values, buffer, svc); if (HostingEnvironment.IsHosted && EtwTrace.IsTraceEnabled(4, 8)) { EtwTrace.Trace(EtwTraceType.ETW_TYPE_PROFILE_END, HttpContext.Current.WorkerRequest, userName); } } catch { throw; } }
protected static async Task CleanupOutboxAsync() { await using var sqlConnection = await SqlConnectionHelper.CreateMicrosoftDataSqlConnectionAsync(); await sqlConnection.TruncateTransactionalOutboxTableAsync(); }
private ProfileInfoCollection GetProfilesForQuery(SqlParameter[] args, ProfileAuthenticationOption authenticationOption, int pageIndex, int pageSize, out int totalRecords) { ProfileInfoCollection infos2; if (pageIndex < 0) { throw new ArgumentException(System.Web.SR.GetString("PageIndex_bad"), "pageIndex"); } if (pageSize < 1) { throw new ArgumentException(System.Web.SR.GetString("PageSize_bad"), "pageSize"); } long num = ((pageIndex * pageSize) + pageSize) - 1L; if (num > 0x7fffffffL) { throw new ArgumentException(System.Web.SR.GetString("PageIndex_PageSize_bad"), "pageIndex and pageSize"); } try { SqlConnectionHolder connection = null; SqlDataReader reader = null; try { connection = SqlConnectionHelper.GetConnection(this._sqlConnectionString, true); this.CheckSchemaVersion(connection.Connection); SqlCommand command = new SqlCommand("dbo.aspnet_Profile_GetProfiles", connection.Connection) { CommandTimeout = this.CommandTimeout, CommandType = CommandType.StoredProcedure }; command.Parameters.Add(this.CreateInputParam("@ApplicationName", SqlDbType.NVarChar, this.ApplicationName)); command.Parameters.Add(this.CreateInputParam("@ProfileAuthOptions", SqlDbType.Int, (int)authenticationOption)); command.Parameters.Add(this.CreateInputParam("@PageIndex", SqlDbType.Int, pageIndex)); command.Parameters.Add(this.CreateInputParam("@PageSize", SqlDbType.Int, pageSize)); foreach (SqlParameter parameter in args) { command.Parameters.Add(parameter); } reader = command.ExecuteReader(CommandBehavior.SequentialAccess); ProfileInfoCollection infos = new ProfileInfoCollection(); while (reader.Read()) { string username = reader.GetString(0); bool boolean = reader.GetBoolean(1); DateTime lastActivityDate = DateTime.SpecifyKind(reader.GetDateTime(2), DateTimeKind.Utc); DateTime lastUpdatedDate = DateTime.SpecifyKind(reader.GetDateTime(3), DateTimeKind.Utc); int size = reader.GetInt32(4); infos.Add(new ProfileInfo(username, boolean, lastActivityDate, lastUpdatedDate, size)); } totalRecords = infos.Count; if (reader.NextResult() && reader.Read()) { totalRecords = reader.GetInt32(0); } infos2 = infos; } finally { if (reader != null) { reader.Close(); } if (connection != null) { connection.Close(); connection = null; } } } catch { throw; } return(infos2); }
public override int DeleteProfiles(string[] usernames) { SecUtility.CheckArrayParameter(ref usernames, true, true, true, 0x100, "usernames"); int num = 0; bool flag = false; try { SqlConnectionHolder connection = null; try { try { connection = SqlConnectionHelper.GetConnection(this._sqlConnectionString, true); this.CheckSchemaVersion(connection.Connection); int length = usernames.Length; while (length > 0) { SqlCommand command; string objValue = usernames[usernames.Length - length]; length--; for (int i = usernames.Length - length; i < usernames.Length; i++) { if (((objValue.Length + usernames[i].Length) + 1) >= 0xfa0) { break; } objValue = objValue + "," + usernames[i]; length--; } if (!flag && (length > 0)) { command = new SqlCommand("BEGIN TRANSACTION", connection.Connection); command.ExecuteNonQuery(); flag = true; } command = new SqlCommand("dbo.aspnet_Profile_DeleteProfiles", connection.Connection) { CommandTimeout = this.CommandTimeout, CommandType = CommandType.StoredProcedure }; command.Parameters.Add(this.CreateInputParam("@ApplicationName", SqlDbType.NVarChar, this.ApplicationName)); command.Parameters.Add(this.CreateInputParam("@UserNames", SqlDbType.NVarChar, objValue)); object obj2 = command.ExecuteScalar(); if ((obj2 != null) && (obj2 is int)) { num += (int)obj2; } } if (flag) { new SqlCommand("COMMIT TRANSACTION", connection.Connection).ExecuteNonQuery(); flag = false; } } catch { if (flag) { new SqlCommand("ROLLBACK TRANSACTION", connection.Connection).ExecuteNonQuery(); flag = false; } throw; } return(num); } finally { if (connection != null) { connection.Close(); connection = null; } } } catch { throw; } return(num); }
public async Task TestTransactionalOutboxFailureByTimeToLive() { var failedItemTestDataSizeByBatch = 3; var successfulItemTestDataSize = 3; var timeToLiveTimeSpan = TimeSpan.FromSeconds(5); var testHarnessPublisher = new TestHarnessSqlTransactionalOutboxPublisher(); var expectedTotalSuccessCount = 0; var expectedTotalExpiredCount = 0; //***************************************************************************************** //* STEP 1 - Prepare/Clear the Queue Table and populate initial Set of items (expected to Fail/Expire) // then wait for them to expire... //***************************************************************************************** await SystemDataSqlTestHelpers.PopulateTransactionalOutboxTestDataAsync(failedItemTestDataSizeByBatch); expectedTotalExpiredCount += failedItemTestDataSizeByBatch; await Task.Delay(timeToLiveTimeSpan + TimeSpan.FromSeconds(1)); //***************************************************************************************** //* STEP 2 - Add a Second & Third batch of items with different insertion Timestamps to // ensure only the first set expire as expected... //***************************************************************************************** await SystemDataSqlTestHelpers.PopulateTransactionalOutboxTestDataAsync(successfulItemTestDataSize, false); expectedTotalSuccessCount += successfulItemTestDataSize; //Insert in a second batch to force different Creation Dates at the DB level... await SystemDataSqlTestHelpers.PopulateTransactionalOutboxTestDataAsync(successfulItemTestDataSize, false); expectedTotalSuccessCount += successfulItemTestDataSize; //***************************************************************************************** //* STEP 2 - Process Outbox and get Results //***************************************************************************************** await using var sqlConnection = await SqlConnectionHelper.CreateSystemDataSqlConnectionAsync(); var processingResults = await sqlConnection.ProcessPendingOutboxItemsAsync(testHarnessPublisher, new OutboxProcessingOptions() { TimeSpanToLive = timeToLiveTimeSpan }); //***************************************************************************************** //* STEP 3 - Validate Results returned! //***************************************************************************************** Assert.AreEqual(expectedTotalExpiredCount, processingResults.FailedItems.Count); Assert.AreEqual(expectedTotalSuccessCount, processingResults.SuccessfullyPublishedItems.Count); //We expect all items to be processed before any item is failed.... //So the First Item will be repeated as the 10'th item after the next 9 are also attempted... processingResults.SuccessfullyPublishedItems.ForEach(i => { Assert.AreEqual(OutboxItemStatus.Successful, i.Status); }); processingResults.FailedItems.ForEach(i => { Assert.AreEqual(OutboxItemStatus.FailedExpired, i.Status); }); //***************************************************************************************** //* STEP 3 - Validate Results In the DB! //***************************************************************************************** await using var sqlTransaction2 = (SqlTransaction) await sqlConnection.BeginTransactionAsync().ConfigureAwait(false); var outboxProcessor = new DefaultSqlServerTransactionalOutboxProcessor <string>(sqlTransaction2, testHarnessPublisher); var outboxRepository = outboxProcessor.OutboxRepository; var successfulItems = await outboxRepository.RetrieveOutboxItemsAsync(OutboxItemStatus.Successful); Assert.AreEqual(expectedTotalSuccessCount, successfulItems.Count); successfulItems.ForEach(i => { Assert.AreEqual(OutboxItemStatus.Successful, i.Status); }); var failedItems = await outboxRepository.RetrieveOutboxItemsAsync(OutboxItemStatus.FailedExpired); Assert.AreEqual(expectedTotalExpiredCount, failedItems.Count); processingResults.FailedItems.ForEach(i => { Assert.AreEqual(OutboxItemStatus.FailedExpired, i.Status); }); }
public virtual IDbConnection GetConnection(string odbcConnectionString) { return(SqlConnectionHelper.OpenConnection(odbcConnectionString, Database)); }
public async Task TestTransactionalOutboxEndToEndSuccessfulProcessing() { //Organize await using var sqlConnection = await SqlConnectionHelper.CreateSystemDataSqlConnectionAsync().ConfigureAwait(false); //***************************************************************************************** //* STEP 1 - Prepare/Clear the Queue Table //***************************************************************************************** await SystemDataSqlTestHelpers.PopulateTransactionalOutboxTestDataAsync(100); //***************************************************************************************** //* STEP 3 - Executing processing of the Pending Items in the Queue... //***************************************************************************************** //Execute Processing of Items just inserted! //NOTE: We need to re-initialize a NEW Transaction and Processor to correctly simulate this running separately! var publishedItemList = new List <ISqlTransactionalOutboxItem <Guid> >(); var testPublisher = new TestHarnessSqlTransactionalOutboxPublisher((item, isFifoEnabled) => { publishedItemList.Add(item); TestContext.WriteLine($"Successfully Published Item: {item.UniqueIdentifier}"); return(Task.CompletedTask); } ); var publishedResults = await sqlConnection .ProcessPendingOutboxItemsAsync(testPublisher, new OutboxProcessingOptions()) .ConfigureAwait(false); //Assert results Assert.AreEqual(publishedItemList.Count, publishedResults.SuccessfullyPublishedItems.Count); Assert.AreEqual(0, publishedResults.FailedItems.Count); //Assert Unique Items all match var publishedItemLookup = publishedItemList.ToLookup(i => i.UniqueIdentifier); publishedResults.SuccessfullyPublishedItems.ForEach(r => { Assert.IsTrue(publishedItemLookup.Contains(r.UniqueIdentifier)); }); //***************************************************************************************** //* STEP 4 - Retrieve and Validate Data is updated and no pending Items Remain... //***************************************************************************************** //Assert All Items in the DB are Successful! await using var sqlTransaction3 = (SqlTransaction) await sqlConnection.BeginTransactionAsync().ConfigureAwait(false); var outboxProcessor = new DefaultSqlServerTransactionalOutboxProcessor <string>(sqlTransaction3, testPublisher); var successfulResultsFromDb = await outboxProcessor.OutboxRepository .RetrieveOutboxItemsAsync(OutboxItemStatus.Successful) .ConfigureAwait(false); //Assert the results from the DB match those returned from the Processing method... Assert.AreEqual(publishedResults.SuccessfullyPublishedItems.Count, successfulResultsFromDb.Count); successfulResultsFromDb.ForEach(dbItem => { Assert.AreEqual(OutboxItemStatus.Successful, dbItem.Status); Assert.AreEqual(1, dbItem.PublishAttempts); }); }
private void button1_Click(object sender, EventArgs e) { string Query; switch (tabControl1.SelectedIndex) { case 0: Query = @"SELECT Книги.Товар_ID, Книги.Наименование, Книги.Количество, Книги.Цена, Отделы.Отдел FROM Отделы INNER JOIN Книги ON Отделы.Отдел_ID = Книги.Отдел_ID; "; break; case 1: Query = "Select * from Авторы"; break; case 2: Query = "Select * from Издания"; break; case 3: Query = "Select * from Сотрудники"; break; case 4: Query = "Select * from Склад"; break; case 5: Query = "Select * from Заказы"; break; case 6: Query = "Select * from Издательства"; break; case 7: Query = "Select * from Классификатор"; break; case 8: Query = "Select * from ФИО_Авторов"; break; case 9: Query = "Select * from Продажа"; break; default: Query = ""; break; } var connect = SqlConnectionHelper.GetSqlConnection(); SqlCommand command = new SqlCommand(Query, connect); command.CommandTimeout = 30; SqlDataAdapter sotrDA = new SqlDataAdapter(); sotrDA.SelectCommand = command; try { connect.Open(); MessageBox.Show("Соединение прошло успешно"); } catch { MessageBox.Show("Ошибка соединения"); } DataSet ds = new DataSet(); ds.Clear(); sotrDA.Fill(ds, "Магазин"); switch (tabControl1.SelectedIndex) { case 0: dataGridView1.DataSource = ds.Tables["Магазин"].DefaultView; break; case 1: dataGridView2.DataSource = ds.Tables["Магазин"].DefaultView; break; case 2: dataGridView3.DataSource = ds.Tables["Магазин"].DefaultView; break; case 3: dataGridView5.DataSource = ds.Tables["Магазин"].DefaultView; break; case 4: dataGridView6.DataSource = ds.Tables["Магазин"].DefaultView; break; case 5: dataGridView7.DataSource = ds.Tables["Магазин"].DefaultView; break; case 6: dataGridView9.DataSource = ds.Tables["Магазин"].DefaultView; break; case 7: dataGridView4.DataSource = ds.Tables["Магазин"].DefaultView; break; case 8: dataGridView10.DataSource = ds.Tables["Магазин"].DefaultView; break; case 9: dataGridView11.DataSource = ds.Tables["Магазин"].DefaultView; break; } connect.Close(); }
//////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// private void GetPropertyValuesFromDatabase(string userName, SettingsPropertyValueCollection svc) { HttpContext context = HttpContext.Current; if (context != null && HostingEnvironment.IsHosted && EtwTrace.IsTraceEnabled(EtwTraceLevel.Information, EtwTraceFlags.AppSvc)) { EtwTrace.Trace(EtwTraceType.ETW_TYPE_PROFILE_BEGIN, HttpContext.Current.WorkerRequest); } string[] names = null; string values = null; byte[] buf = null; string sName = null; if (context != null) { sName = (context.Request.IsAuthenticated ? context.User.Identity.Name : context.Request.AnonymousID); } try { SqlConnectionHolder holder = null; SqlDataReader reader = null; try { holder = SqlConnectionHelper.GetConnection(_sqlConnectionString, true); CheckSchemaVersion(holder.Connection); SqlCommand cmd = new SqlCommand("dbo.aspnet_Profile_GetProperties", holder.Connection); cmd.CommandTimeout = CommandTimeout; cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.Add(CreateInputParam("@ApplicationName", SqlDbType.NVarChar, ApplicationName)); cmd.Parameters.Add(CreateInputParam("@UserName", SqlDbType.NVarChar, userName)); cmd.Parameters.Add(CreateInputParam("@CurrentTimeUtc", SqlDbType.DateTime, DateTime.UtcNow)); reader = cmd.ExecuteReader(CommandBehavior.SingleRow); if (reader.Read()) { names = reader.GetString(0).Split(':'); values = reader.GetString(1); int size = (int)reader.GetBytes(2, 0, null, 0, 0); buf = new byte[size]; reader.GetBytes(2, 0, buf, 0, size); } } finally { if (holder != null) { holder.Close(); holder = null; } if (reader != null) { reader.Close(); } } ProfileModule.ParseDataFromDB(names, values, buf, svc); if (context != null && HostingEnvironment.IsHosted && EtwTrace.IsTraceEnabled(EtwTraceLevel.Information, EtwTraceFlags.AppSvc)) { EtwTrace.Trace(EtwTraceType.ETW_TYPE_PROFILE_END, HttpContext.Current.WorkerRequest, userName); } } catch { throw; } }
private static void MoveChunkDataToS3(IEnumerable <int> chunkIds) { var baseFolder = string.Format("{0}/{1}/{2}/raw", Settings.Current.Bucket, Settings.Current.Building.Vendor, Settings.Current.Building.Id); Parallel.ForEach(chunkIds, new ParallelOptions { MaxDegreeOfParallelism = 2 }, cId => { var chunkId = cId; Parallel.ForEach(Settings.Current.Building.SourceQueryDefinitions, new ParallelOptions { MaxDegreeOfParallelism = 5 }, queryDefinition => { try { if (queryDefinition.Providers != null) { return; } if (queryDefinition.Locations != null) { return; } if (queryDefinition.CareSites != null) { return; } var sql = queryDefinition.GetSql(Settings.Current.Building.SourceEngine.Database, Settings.Current.Building.Vendor, Settings.Current.Building.SourceSchemaName); if (string.IsNullOrEmpty(sql)) { return; } sql = string.Format(sql, chunkId); if (queryDefinition.FieldHeaders == null) { StoreMetadataToS3(queryDefinition, sql); } var personIdField = queryDefinition.GetPersonIdFieldName(); var tmpTableName = "#" + queryDefinition.FileName + "_" + chunkId; var folder = string.Format("{0}/{1}/{2}", baseFolder, chunkId, queryDefinition.FileName); var fileName = string.Format(@"{0}/{1}", folder, queryDefinition.FileName); var unloadQuery = string.Format(@"create table {0} sortkey ({1}) distkey ({1}) as {2}; " + @"UNLOAD ('select * from {0} order by {1}') to 's3://{3}' " + @"DELIMITER AS '\t' " + @"credentials 'aws_access_key_id={4};aws_secret_access_key={5}' " + @"GZIP ALLOWOVERWRITE PARALLEL ON", tmpTableName, //0 personIdField, //1 sql, //2 fileName, //3 Settings.Current.S3AwsAccessKeyId, //4 Settings.Current.S3AwsSecretAccessKey); //5 using (var connection = SqlConnectionHelper.OpenOdbcConnection(Settings.Current.Building.SourceConnectionString)) using (var c = new OdbcCommand(unloadQuery, connection)) { c.CommandTimeout = 999999999; c.ExecuteNonQuery(); } } catch (Exception e) { Logger.WriteError(chunkId, e); throw; } }); }); }
///////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////// public override int DeleteProfiles(string[] usernames) { SecUtility.CheckArrayParameter(ref usernames, true, true, true, 256, "usernames"); int numProfilesDeleted = 0; bool beginTranCalled = false; try { SqlConnectionHolder holder = null; try { holder = SqlConnectionHelper.GetConnection(_sqlConnectionString, true); CheckSchemaVersion(holder.Connection); SqlCommand cmd; int numUsersRemaing = usernames.Length; while (numUsersRemaing > 0) { string allUsers = usernames[usernames.Length - numUsersRemaing]; numUsersRemaing--; for (int iter = usernames.Length - numUsersRemaing; iter < usernames.Length; iter++) { if (allUsers.Length + usernames[iter].Length + 1 >= 4000) { break; } allUsers += "," + usernames[iter]; numUsersRemaing--; } // We don't need to start a transaction if we can finish this in one sql command if (!beginTranCalled && numUsersRemaing > 0) { cmd = new SqlCommand("BEGIN TRANSACTION", holder.Connection); cmd.ExecuteNonQuery(); beginTranCalled = true; } cmd = new SqlCommand("dbo.aspnet_Profile_DeleteProfiles", holder.Connection); cmd.CommandTimeout = CommandTimeout; cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.Add(CreateInputParam("@ApplicationName", SqlDbType.NVarChar, ApplicationName)); cmd.Parameters.Add(CreateInputParam("@UserNames", SqlDbType.NVarChar, allUsers)); object o = cmd.ExecuteScalar(); if (o != null && o is int) { numProfilesDeleted += (int)o; } } if (beginTranCalled) { cmd = new SqlCommand("COMMIT TRANSACTION", holder.Connection); cmd.ExecuteNonQuery(); beginTranCalled = false; } } catch { if (beginTranCalled) { SqlCommand cmd = new SqlCommand("ROLLBACK TRANSACTION", holder.Connection); cmd.ExecuteNonQuery(); beginTranCalled = false; } throw; } finally { if (holder != null) { holder.Close(); holder = null; } } } catch { throw; } return(numProfilesDeleted); }
public async Task <List <ISqlTransactionalOutboxItem <Guid> > > DoTestTransactionalOutboxCrawlingOfBlockingFailureItems( int testDataSize, int maxPublishingAttempts, bool enforceFifoProcessing, bool throwExceptionOnFailedItem) { //***************************************************************************************** //* STEP 1 - Prepare/Clear the Queue Table //***************************************************************************************** await MicrosoftDataSqlTestHelpers.PopulateTransactionalOutboxTestDataAsync(testDataSize); //***************************************************************************************** //* STEP 2 - Setup Custom Publisher & Processing Options... //***************************************************************************************** var publishedAttemptsList = new List <ISqlTransactionalOutboxItem <Guid> >(); var failingPublisher = new TestHarnessSqlTransactionalOutboxPublisher( (i, isFifoEnabled) => { publishedAttemptsList.Add(i); TestContext.WriteLine($"Successful -- We have intentionally Failed to Publish Item: {i.UniqueIdentifier}"); //Force an Error on Failure... this should result in ALL Publishing attempts to fail... throw new Exception("Failed to Publish!"); } ); var outboxProcessingOptions = new OutboxProcessingOptions() { MaxPublishingAttempts = maxPublishingAttempts, FifoEnforcedPublishingEnabled = enforceFifoProcessing }; //***************************************************************************************** //* STEP 3 - Executing processing of the Pending Items in the Queue... //***************************************************************************************** //Execute Processing of Items just inserted! //NOTE: We need to re-initialize a NEW Transaction and Processor to correctly simulate this running separately! ISqlTransactionalOutboxProcessingResults <Guid> publishedResults = null; int loopCounter = 0; bool handledExceptionSoItsOkToContinue = false; do { await using var sqlConnection = await SqlConnectionHelper.CreateMicrosoftDataSqlConnectionAsync(); await using var sqlTransaction = (SqlTransaction) await sqlConnection.BeginTransactionAsync().ConfigureAwait(false); handledExceptionSoItsOkToContinue = false; try { publishedResults = await sqlTransaction.ProcessPendingOutboxItemsAsync( failingPublisher, outboxProcessingOptions, throwExceptionOnFailedItem ).ConfigureAwait(false); } catch (Exception exc) { if (throwExceptionOnFailedItem) { //DO Nothing, as we Expect there to be exceptions when Throw Exception on Failure is Enabled! TestContext.WriteLine($"Successfully handled expected Exception: {exc.Message}"); publishedResults = new OutboxProcessingResults <Guid>(); handledExceptionSoItsOkToContinue = true; } else { //IF we get an exception but ThrowExceptionOnFailure is disabled, then this is an issue! throw; } } await sqlTransaction.CommitAsync(); //Provide Infinite Loop fail-safe... loopCounter++; Assert.IsTrue(loopCounter <= (testDataSize * maxPublishingAttempts * 2), $"Infinite Loop Breaker Tripped at [{loopCounter}]!"); //Assert there are never any successfully published items... Assert.AreEqual(0, publishedResults.SuccessfullyPublishedItems.Count); } while (publishedResults.FailedItems.Count > 0 || (throwExceptionOnFailedItem && handledExceptionSoItsOkToContinue)); //***************************************************************************************** //* STEP 4 - Retrieve and Validate Data in the Database is updated and all have failed out... //***************************************************************************************** //Assert All Items in the DB are Successful! await using var sqlConnection2 = await SqlConnectionHelper.CreateMicrosoftDataSqlConnectionAsync(); await using var sqlTransaction2 = (SqlTransaction) await sqlConnection2.BeginTransactionAsync().ConfigureAwait(false); var outboxProcessor2 = new DefaultSqlServerTransactionalOutboxProcessor <string>(sqlTransaction2, failingPublisher); var successfulResultsFromDb = await outboxProcessor2.OutboxRepository .RetrieveOutboxItemsAsync(OutboxItemStatus.Pending) .ConfigureAwait(false); //Assert the results from the DB match those returned from the Processing method... Assert.AreEqual(successfulResultsFromDb.Count, 0); var failedResultsFromDb = await outboxProcessor2.OutboxRepository .RetrieveOutboxItemsAsync(OutboxItemStatus.FailedAttemptsExceeded) .ConfigureAwait(false); //Assert the results from the DB match those returned from the Processing method... Assert.AreEqual(failedResultsFromDb.Count * maxPublishingAttempts, publishedAttemptsList.Count); foreach (var dbItem in failedResultsFromDb) { Assert.AreEqual(OutboxItemStatus.FailedAttemptsExceeded, dbItem.Status); Assert.AreEqual(maxPublishingAttempts, dbItem.PublishAttempts); } //RETURN the Attempted Publishing list for additional validation based on the Pattern // we expect for items to be processed. return(publishedAttemptsList); }
private void Load(IEnumerable <EntityDefinition> definitions) { if (definitions == null) { return; } foreach (var ed in definitions) { ed.Vocabulary = this; if (ed.Concepts == null) { continue; } foreach (var c in ed.Concepts) { if (c.ConceptIdMappers == null) { continue; } foreach (var conceptIdMapper in c.ConceptIdMappers) { if (!string.IsNullOrEmpty(conceptIdMapper.Lookup)) { if (!_lookups.ContainsKey(conceptIdMapper.Lookup)) { string sql = string.Empty; var baseSql = string.Empty; var sqlFileDestination = string.Empty; baseSql = File.ReadAllText(Path.Combine(_settings.Folder, "ETL", "Common", "Lookups", "Base.sql")); sql = _settings.Lookups[conceptIdMapper.Lookup]; sql = sql.Replace("{base}", baseSql); sql = sql.Replace("{sc}", _settings.ConversionSettings.VocabularySchema); try { Console.WriteLine(conceptIdMapper.Lookup + " - Loading..."); var timer = new Stopwatch(); timer.Start(); _logHub.Clients.All.SendAsync("Log", string.Format("{0}| {1}", DateTime.Now, conceptIdMapper.Lookup + " - Loading into RAM...")).Wait(); using (var connection = SqlConnectionHelper.OpenOdbcConnection(_settings.VocabularyConnectionString)) using (var command = new OdbcCommand(sql, connection) { CommandTimeout = 0 }) using (var reader = command.ExecuteReader()) { Console.WriteLine(conceptIdMapper.Lookup + " - filling"); var lookup = new Lookup(); while (reader.Read()) { var lv = CreateLookupValue(reader); lookup.Add(lv); } _lookups.Add(conceptIdMapper.Lookup, lookup); } Console.WriteLine(conceptIdMapper.Lookup + " - Done"); timer.Stop(); _logHub.Clients.All.SendAsync("Log", string.Format("{0}| {1}", DateTime.Now, $"DONE - {timer.ElapsedMilliseconds} ms | KeysCount={_lookups[conceptIdMapper.Lookup].KeysCount}")).Wait(); } catch (Exception e) { Console.WriteLine("Lookup error [file]: " + sqlFileDestination); Console.WriteLine("Lookup error [query]: " + sql); _logHub.Clients.All.SendAsync("Log", string.Format("{0}| {1}", DateTime.Now, "Lookup error [file]: " + sqlFileDestination)).Wait(); _logHub.Clients.All.SendAsync("Log", string.Format("{0}| {1}", DateTime.Now, "Lookup error [query]: " + sql)).Wait(); throw; } } } } } } }