public SyncResponse Exec() { var startedOn = DateTime.UtcNow; log.Info("process started at " + startedOn.NormalizedFormat()); client = new ElasticsearchClient(_config.ElasticSearchConfiguration); using (var _lock = new SyncLock(client, LogIndex, LockType)) { DateTime? lastSyncDate = ConfigureIncrementalProcess(_config.SqlCommand, _config.ColumnsToCompareWithLastSyncDate); var data = GetSerializedObject(); log.Info(String.Format("{0} objects have been serialized.", data.Count())); var syncResponse = new SyncResponse(startedOn); syncResponse = IndexProcess(data, syncResponse); if (_config.DeleteConfiguration != null) { _config.SqlConnection.Open(); Dictionary<object, Dictionary<string, object>> deleteData = null; if (lastSyncDate != null) ConfigureIncrementalProcess(_config.DeleteConfiguration.SqlCommand, _config.DeleteConfiguration.ColumnsToCompareWithLastSyncDate, lastSyncDate); using (SqlDataReader rdr = _config.DeleteConfiguration.SqlCommand.ExecuteReader()) { deleteData = rdr.Serialize(); } _config.SqlConnection.Close(); syncResponse = DeleteProcess(deleteData, syncResponse); } syncResponse = Log(syncResponse); log.Info(String.Format("process duration: {0}ms", Math.Truncate((syncResponse.EndedOn - syncResponse.StartedOn).TotalMilliseconds))); return syncResponse; } }
/// <summary> /// LogProcess in {logIndex}/{logType} the synchronization results and metrics /// </summary> private SyncResponse LogProcess(SyncResponse syncResponse) { stopwatch.Start(); syncResponse.EndedOn = DateTime.UtcNow; var logBulk = ElasticsearchHelpers.GetPartialIndexBulk(LogIndex, LogType, new { startedOn = syncResponse.StartedOn, endedOn = syncResponse.EndedOn, success = syncResponse.Success, indexedDocuments = syncResponse.IndexedDocuments, deletedDocuments = syncResponse.DeletedDocuments, bulks = syncResponse.BulkResponses.Select(x => new { success = x.Success, httpStatusCode = x.HttpStatusCode, affectedDocuments = x.AffectedDocuments, duration = x.Duration + "ms", exception = x.ESexception != null ? ((Exception)x.ESexception).Message : null }) }); if (_config.ColumnsToCompareWithLastSyncDate != null && _config.ColumnsToCompareWithLastSyncDate.Any()) { logBulk += ElasticsearchHelpers.GetPartialIndexBulk(LogIndex, LastLogType, LastLogID, new { date = syncResponse.StartedOn }); } client.Bulk(logBulk); stopwatch.Stop(); log.Info(String.Format("log index duration: {0}ms", stopwatch.ElapsedMilliseconds)); stopwatch.Reset(); return(syncResponse); }
public SyncResponse Exec(bool force = false) { try { var startedOn = DateTime.UtcNow; log.Debug("process started at " + startedOn.NormalizedFormat()); client = new ElasticsearchClient(_config.ElasticSearchConfiguration); using (var _lock = new SyncLock(client, LogIndex, LockType, force)) { DateTime?lastSyncDate = ConfigureIncrementalProcess(_config.SqlCommand, _config.ColumnsToCompareWithLastSyncDate); log.Info(String.Format("last sync date: {0}", lastSyncDate != null ? lastSyncDate.ToString() : "null")); var syncResponse = new SyncResponse(startedOn); //DELETE PROCESS if (_config.DeleteConfiguration != null) { _config.SqlConnection.Open(); Dictionary <object, Dictionary <string, object> > deleteData = null; if (lastSyncDate != null) { ConfigureIncrementalProcess(_config.DeleteConfiguration.SqlCommand, _config.DeleteConfiguration.ColumnsToCompareWithLastSyncDate, lastSyncDate); } using (SqlDataReader rdr = _config.DeleteConfiguration.SqlCommand.ExecuteReader()) { deleteData = rdr.Serialize(); } _config.SqlConnection.Close(); syncResponse = DeleteProcess(deleteData, syncResponse); } //INDEX PROCESS if (_config.SqlCommand != null) { var dataCount = 0; try { _config.SqlConnection.Open(); if (_config.PageSize.HasValue) { var page = 0; var size = _config.PageSize; var commandText = _config.SqlCommand.CommandText; while (true) { var conditionBuilder = new StringBuilder("("); conditionBuilder .Append("RowNumber BETWEEN ") .Append(page * size + 1) .Append(" AND ") .Append(page * size + size) .Append(")"); _config.SqlCommand.CommandText = AddSqlCondition(commandText, conditionBuilder.ToString()); var pageData = GetSerializedObject(); var pageDataCount = pageData.Count(); dataCount += pageDataCount; log.Info(String.Format("{0} objects have been serialized from page {1}.", pageDataCount, page)); IndexProcess(pageData, syncResponse); pageData.Clear(); pageData = null; GC.Collect(GC.MaxGeneration); if (pageDataCount < size) { break; } page++; } } else { var data = GetSerializedObject(); dataCount = data.Count(); IndexProcess(data, syncResponse); } log.Info(String.Format("{0} objects have been serialized.", dataCount)); } finally { _config.SqlConnection.Close(); } } //LOG PROCESS syncResponse = LogProcess(syncResponse); log.Debug(String.Format("process duration: {0}ms", Math.Truncate((syncResponse.EndedOn - syncResponse.StartedOn).TotalMilliseconds))); return(syncResponse); } } catch (Exception ex) { log.Error("an error has occurred: " + ex); throw ex; } }
private SyncResponse DeleteProcess(Dictionary <object, Dictionary <string, object> > data, SyncResponse syncResponse) { var d = 0; while (d < data.Count()) { var partialData = data.Skip(d).Take(_config.BulkSize).ToDictionary(x => x.Key, x => x.Value); var bulkResponse = BulkDeleteProcess(partialData); syncResponse.BulkResponses.Add(bulkResponse); syncResponse.DeletedDocuments += bulkResponse.AffectedDocuments; syncResponse.Success = syncResponse.Success && bulkResponse.Success; log.Info(String.Format("bulk duration: {0}ms. so far {1} documents have been deleted successfully.", bulkResponse.Duration, syncResponse.DeletedDocuments)); d += _config.BulkSize; } return(syncResponse); }
private SyncResponse IndexProcess(Dictionary <object, Dictionary <string, object> > data, SyncResponse syncResponse) { var c = 0; while (c < data.Count()) { var partialData = data.Skip(c).Take(_config.BulkSize).ToDictionary(x => x.Key, x => x.Value); var bulkResponse = BulkIndexProcess(partialData); if (ConfigSection.Default.Index.LogBulk) { syncResponse.BulkResponses.Add(bulkResponse); } syncResponse.IndexedDocuments += bulkResponse.AffectedDocuments; syncResponse.Success = syncResponse.Success && bulkResponse.Success; log.Info(String.Format("bulk duration: {0}ms. so far {1} documents have been indexed successfully.", bulkResponse.Duration, syncResponse.IndexedDocuments)); c += _config.BulkSize; } return(syncResponse); }
/// <summary> /// Log in {logIndex}/{logType} the synchronization results and metrics /// </summary> private SyncResponse Log(SyncResponse syncResponse) { stopwatch.Start(); syncResponse.EndedOn = DateTime.UtcNow; var logBulk = String.Format("{0}{1}", ElasticsearchHelpers.GetPartialIndexBulk(LogIndex, LogType, new { startedOn = syncResponse.StartedOn, endedOn = syncResponse.EndedOn, success = syncResponse.Success, indexedDocuments = syncResponse.IndexedDocuments, deletedDocuments = syncResponse.DeletedDocuments, bulks = syncResponse.BulkResponses.Select(x => new { success = x.Success, httpStatusCode = x.HttpStatusCode, affectedDocuments = x.AffectedDocuments, duration = x.Duration + "ms", exception = x.ESexception != null ? ((Exception)x.ESexception).Message : null }) }), ElasticsearchHelpers.GetPartialIndexBulk(LogIndex, LastLogType, LastLogID, new { date = syncResponse.StartedOn })); client.Bulk(logBulk); stopwatch.Stop(); log.Debug(String.Format("log index duration: {0}ms", stopwatch.ElapsedMilliseconds)); stopwatch.Reset(); return syncResponse; }
private SyncResponse IndexProcess(Dictionary<object, Dictionary<string, object>> data, SyncResponse syncResponse) { var c = 0; while (c < data.Count()) { var partialData = data.Skip(c).Take(_config.BulkSize).ToDictionary(x => x.Key, x => x.Value); var bulkResponse = BulkIndexProcess(partialData); syncResponse.BulkResponses.Add(bulkResponse); syncResponse.IndexedDocuments += bulkResponse.AffectedDocuments; syncResponse.Success = syncResponse.Success && bulkResponse.Success; log.Info(String.Format("bulk duration: {0}ms. so far {1} documents have been indexed successfully.", bulkResponse.Duration, syncResponse.IndexedDocuments)); c += _config.BulkSize; } return syncResponse; }