private static BulkRequest CreateIndexBulkRequest <T>(IEnumerable <T> objects, Func <T, string> getIndex, string type, Func <T, string> getParent) where T : class { var bulkRequest = new BulkRequest(); TypeNameMarker typeNameMarker = type; bulkRequest.Type = typeNameMarker; var list = objects.Select(o => { var doc = new BulkIndexOperation <T>(o); if (getParent != null) { doc.Parent = getParent(o); } if (getIndex != null) { doc.Index = getIndex(o); } var versionedDoc = o as IVersioned; if (versionedDoc != null) { doc.Version = versionedDoc.Version.ToString(); } return(doc); }).Cast <IBulkOperation>().ToList(); bulkRequest.Operations = list; return(bulkRequest); }
private static BulkIndexOperation <EventData> CreateOperation(EventData eventData, string currentIndexName) { BulkIndexOperation <EventData> operation = new BulkIndexOperation <EventData>(eventData); operation.Index = currentIndexName; return(operation); }
public async Task UpdateElastic(IStudent updatedStudent) { List <IBulkOperation> bulkOps = new List <IBulkOperation>(); var updateReq = new BulkUpdateOperation <Student, object>(updatedStudent.StudentId); updateReq.Index = ElasticConfig.Indices.Main.Name(); updateReq.DocAsUpsert = true; updateReq.Doc = updatedStudent; bulkOps.Add(updateReq); var updateReqLog = new BulkIndexOperation <Activity <string> >(new Activity <string>() { Id = Guid.NewGuid().ToString(), Timestamp = DateTime.UtcNow, Payload = new ActivityPayload <string>() { Data = string.Format($"SearchProvider: Edit(), Edited Student - {JsonConvert.SerializeObject(updatedStudent)}") }, ActivityType = ActivityType.StudentEdit }); updateReqLog.Index = ElasticConfig.Indices.Logs.Name(); bulkOps.Add(updateReqLog); await _elasticProvider.BulkAsync(new BulkRequest() { Operations = bulkOps }); }
public async Task RemoveFromElastic(IClass removedClass) { List <IBulkOperation> bulkOps = new List <IBulkOperation>(); var updateReq = new BulkDeleteOperation <Class>(removedClass.Code); updateReq.Index = ElasticConfig.Indices.Main.Name(); bulkOps.Add(updateReq); var updateReqLog = new BulkIndexOperation <Activity <string> >(new Activity <string>() { Id = Guid.NewGuid().ToString(), Timestamp = DateTime.UtcNow, Payload = new ActivityPayload <string>() { Data = string.Format($"SearchProvider: Delete(), Deleted Class - {JsonConvert.SerializeObject(removedClass)}") }, ActivityType = ActivityType.ClassDelete }); updateReqLog.Index = ElasticConfig.Indices.Logs.Name(); bulkOps.Add(updateReqLog); await _elasticProvider.BulkAsync(new BulkRequest() { Operations = bulkOps }); }
public async Task PublishToElastic(IStudent createdStudent) { List <IBulkOperation> bulkOps = new List <IBulkOperation>(); var createReq = new BulkIndexOperation <Student>(createdStudent as Student); createReq.Index = ElasticConfig.Indices.Main.Name(); bulkOps.Add(createReq); var createReqLog = new BulkIndexOperation <Activity <string> >(new Activity <string>() { Id = Guid.NewGuid().ToString(), Timestamp = DateTime.UtcNow, Payload = new ActivityPayload <string>() { Data = string.Format($"SearchProvider: Create(), Created Student - {JsonConvert.SerializeObject(createdStudent)}") }, ActivityType = ActivityType.Log }); createReqLog.Index = ElasticConfig.Indices.Logs.Name(); bulkOps.Add(createReqLog); await _elasticProvider.BulkAsync(new BulkRequest() { Operations = bulkOps }); }
async Task IServcieOperationStorage.ServcieOperationStorage(IEnumerable <SpanServiceOperation> spanServiceOperation, CancellationToken cancellationToken) { if (spanServiceOperation == null) { return; } List <ServiceOperationModel> serviceOperationModels = new List <ServiceOperationModel>(); foreach (var serviceOperation in spanServiceOperation.Where(w => !string.IsNullOrEmpty(w.Operation) && w.Process != null && !string.IsNullOrEmpty(w.Process.ServiceName))) { var cacheKey = CreateCacheKey(serviceOperation.Process.ServiceName, serviceOperation.Operation); if (MemoryCache.TryGetValue(cacheKey, out var _)) { return; } MemoryCache.Set(cacheKey, true); serviceOperationModels.Add(new ServiceOperationModel() { Operation = serviceOperation.Operation, Service = serviceOperation.Process.ServiceName, }); } if (serviceOperationModels.Count > 0) { var bulkRequest = new BulkRequest { Operations = new List <IBulkOperation>() }; foreach (var serviceOperationModel in serviceOperationModels) { var operation = new BulkIndexOperation <ServiceOperationModel>(serviceOperationModel) { Index = _IndexName }; bulkRequest.Operations.Add(operation); } var result = await ElasticClient.BulkAsync(bulkRequest, cancellationToken); foreach (var item in result.ItemsWithErrors) { ServiceOperationModel source; if ((source = item.GetResponse <ServiceOperationModel>()?.Source) != null) { var cacheKey = CreateCacheKey(source.Service, source.Operation); MemoryCache.Remove(cacheKey); } } } }
private Task BulkStore(IEnumerable <Span> spans, CancellationToken cancellationToken) { var bulkRequest = new BulkRequest { Operations = new List <IBulkOperation>() }; foreach (var span in spans) { var operation = new BulkIndexOperation <Span>(span) { Index = _indexManager.CreateTracingIndex(DateTimeOffset.Now) }; bulkRequest.Operations.Add(operation); } return(_elasticClient.BulkAsync(bulkRequest, cancellationToken)); }
// // Send the messages to Elasticsearch (bulk) // private DateTime transmitBulkData(IEnumerable <JObject> bulkItems, string bulkIndexName, string bulkTypeName, ElasticClient client, DateTime lastFlushTime, List <JObject> messages) { var bulkRequest = new BulkRequest() { Refresh = true }; bulkRequest.Operations = new List <IBulkOperation>(); foreach (var json in bulkItems) { // ES requires a timestamp, add one if not present var ts = json["@timestamp"]; if (ts == null) { json["@timestamp"] = DateTime.UtcNow; } var bi = new BulkIndexOperation <JObject>(json); bi.Index = bulkIndexName; bi.Type = bulkTypeName; bulkRequest.Operations.Add(bi); } // The total messages processed for this operation. int numMessages = bulkItems.Count(); var response = client.Bulk(bulkRequest); if (!response.IsValid) { LogManager.GetCurrentClassLogger().Error("Failed to send: {0}", response); Interlocked.Increment(ref _errorCount); interlockedInsert(messages); // Put the messages back into the queue } else // Success! { lastFlushTime = DateTime.UtcNow; LogManager.GetCurrentClassLogger() .Info("Successfully sent {0} messages in a single bulk request", numMessages); Interlocked.Add(ref _sentMessages, numMessages); } // Remove them from the working list messages.RemoveRange(0, numMessages); return(lastFlushTime); }
private Task BulkStore(IEnumerable <Span> spans, CancellationToken cancellationToken) { var bulkRequest = new BulkRequest { Operations = new List <IBulkOperation>() }; foreach (var span in spans) { var operation = new BulkIndexOperation <Span>(span) { Index = _indexManager.CreateTracingIndex(DateTimeOffset.UtcNow) }; bulkRequest.Operations.Add(operation); } //GeoDistanceQuery query = new GeoDistanceQuery //{ // Distance = new Distance(100, DistanceUnit.Meters), // Location = new GeoLocation(30, 120), // DistanceType = GeoDistanceType.SloppyArc, // Field = new Field("") //}; //GeoDistanceSort sort = new GeoDistanceSort() //{ // DistanceType = GeoDistanceType.SloppyArc, // Field = new Field(""), // GeoUnit = DistanceUnit.Meters, // Order = SortOrder.Ascending, // Points = new List<GeoLocation> { new GeoLocation(30, 120) } //}; //SearchRequest search = new SearchRequest() //{ // Query = new QueryContainer(query), // Sort = new List<ISort> { sort }, // Size = 200 //}; //_elasticClient.Search<object>(search).Documents.ToList(); return(_elasticClient.BulkAsync(bulkRequest, cancellationToken)); }
/// <summary> /// Enqueues the specified values for writing. /// </summary> /// <param name="tag">The tag that the values are being written for.</param> /// <param name="values">The values to write to the archive.</param> /// <param name="archiveCandidate">The current archive candidate value for the tag.</param> internal void WriteValues(ElasticsearchTagDefinition tag, IEnumerable <TagValue> values, ArchiveCandidateValue archiveCandidate) { _valuesLock.EnterReadLock(); try { foreach (var value in values ?? new TagValue[0]) { var op = new BulkIndexOperation <TagValueDocument>(value.ToTagValueDocument(tag, null)) { Index = IndexUtility.GetIndexNameForArchiveTagValue(_historian.ArchiveIndexNamePrefix, tag, value.UtcSampleTime, _historian.ArchiveIndexSuffixGenerator) }; _nextInsert.AddOperation(op); } if (archiveCandidate != null) { _archiveCandidateValues[tag.IdAsGuid] = archiveCandidate; } } finally { _valuesLock.ExitReadLock(); } }
public async Task TracingStorage(IEnumerable <Span> spans, CancellationToken cancellationToken) { if (spans == null || spans.Count() <= 0) { return; } //List<SpanModel> spanModels = new List<SpanModel>(spans.Count()); var bulkRequest = new BulkRequest { Operations = new List <IBulkOperation>() }; var indexName = GetIndexName(); foreach (var span in spans) { var spanmodel = _Mapper.Map <SpanModel>(span); var operation = new BulkIndexOperation <SpanModel>(spanmodel) { Index = indexName }; bulkRequest.Operations.Add(operation); } var result = await ElasticClient.BulkAsync(bulkRequest, cancellationToken); }
// // Send the messages to Elasticsearch (bulk) // private DateTime transmitBulkData(IEnumerable<JObject> bulkItems, string bulkIndexName, string bulkTypeName, ElasticClient client, DateTime lastFlushTime, List<JObject> messages) { var bulkRequest = new BulkRequest() {Refresh = true}; bulkRequest.Operations = new List<IBulkOperation>(); foreach (var json in bulkItems) { // ES requires a timestamp, add one if not present var ts = json["@timestamp"]; if (ts == null) json["@timestamp"] = DateTime.UtcNow; var bi = new BulkIndexOperation<JObject>(json); bi.Index = bulkIndexName; bi.Type = bulkTypeName; bulkRequest.Operations.Add(bi); } // The total messages processed for this operation. int numMessages = bulkItems.Count(); var response = client.Bulk(bulkRequest); if (!response.IsValid) { LogManager.GetCurrentClassLogger().Error("Failed to send: {0}", response); Interlocked.Increment(ref _errorCount); interlockedInsert(messages); // Put the messages back into the queue } else // Success! { lastFlushTime = DateTime.UtcNow; LogManager.GetCurrentClassLogger() .Info("Successfully sent {0} messages in a single bulk request", numMessages); Interlocked.Add(ref _sentMessages, numMessages); } // Remove them from the working list messages.RemoveRange(0, numMessages); return lastFlushTime; }
/// <summary> /// Runs the writer operation. /// </summary> /// <param name="cancellationToken">The cancellation token that will signal when the writer is to stop.</param> /// <returns> /// A task that will complete when cancellation is requested. /// </returns> /// <exception cref="ObjectDisposedException">The object has already been disposed.</exception> internal async Task Execute(CancellationToken cancellationToken) { if (_ctSource.Token.IsCancellationRequested) { throw new ObjectDisposedException(GetType().FullName); } cancellationToken.Register(() => _ctSource.Cancel()); while (!_ctSource.Token.IsCancellationRequested) { await Task.Delay(_interval, _ctSource.Token).ConfigureAwait(false); if (Interlocked.CompareExchange(ref _taskLock, 1, 0) != 0) { continue; } IDictionary <Guid, TagValue> values; _valuesLock.EnterWriteLock(); try { if (_values.Count == 0) { _taskLock = 0; continue; } values = _values.ToDictionary(x => x.Key, x => x.Value); _values.Clear(); } finally { _valuesLock.ExitWriteLock(); } _historian.TaskRunner.RunBackgroundTask(async ct => { try { var descriptor = new BulkDescriptor(); var send = false; foreach (var item in values) { var tag = _historian.GetTagById(item.Key); if (tag == null) { continue; } var op = new BulkIndexOperation <TagValueDocument>(item.Value.ToTagValueDocument(tag, tag.IdAsGuid)) { Index = _historian.SnapshotValuesIndexName }; descriptor.AddOperation(op); send = true; } if (send) { await _historian.Client.BulkAsync(descriptor, ct).ConfigureAwait(false); } } catch (Exception e) { _logger?.LogError("An error occurred while writing a bulk snapshot update.", e); } finally { _taskLock = 0; } }, _ctSource.Token); } }
public BulkOperationsResponse Ingest(IOpenSearchableElasticType type, IOpenSearchResultCollection results) { OpenSearchFactory.RemoveLinksByRel(ref results, "self"); OpenSearchFactory.RemoveLinksByRel(ref results, "search"); IElasticCollection docs = type.FromOpenSearchResultCollection(results); BulkRequest bulkRequest = new BulkRequest() { Refresh = true, Consistency = Consistency.One, Index = type.Index, Type = type.Type, Operations = new List<IBulkOperation>() }; RootObjectMapping currentMapping = null; try { var mappingResponse = client.GetMapping<IElasticType>(g => g.Index(type.Index.Name).Type(type.Type.Name)); currentMapping = mappingResponse.Mapping; } catch (Exception) { } var rootObjectMapping = type.GetRootMapping(); if (!rootObjectMapping.Equals(currentMapping)) { client.Map<IElasticType>(m => m.Index(type.Index.Name).Type(type.Type.Name)); } foreach (var doc in docs.ElasticItems) { var bulkIndexOperation = new BulkIndexOperation<IElasticItem>(doc); bulkIndexOperation.Id = ((IOpenSearchResultItem)doc).Identifier; bulkIndexOperation.Type = type.Type.Name; var bulkOp = bulkIndexOperation; bulkRequest.Operations.Add(bulkOp); } var response = client.Bulk(bulkRequest); BulkOperationsResponse ingestionResponse = new BulkOperationsResponse(); foreach (var item in response.Items) { if (!item.IsValid) { ingestionResponse.Errors++; continue; } if (item.Version == "1") ingestionResponse.Added++; else ingestionResponse.Updated++; } return ingestionResponse; }
public BulkOperationsResponse Ingest(IOpenSearchableElasticType type, IOpenSearchResultCollection results) { OpenSearchFactory.RemoveLinksByRel(ref results, "self"); OpenSearchFactory.RemoveLinksByRel(ref results, "search"); IElasticCollection docs = type.FromOpenSearchResultCollection(results); BulkRequest bulkRequest = new BulkRequest() { Refresh = true, Consistency = Consistency.One, Index = type.Index, Type = type.Type, Operations = new List <IBulkOperation>() }; RootObjectMapping currentMapping = null; try { var mappingResponse = client.GetMapping <IElasticType>(g => g.Index(type.Index.Name).Type(type.Type.Name)); currentMapping = mappingResponse.Mapping; } catch (Exception) { } var rootObjectMapping = type.GetRootMapping(); if (!rootObjectMapping.Equals(currentMapping)) { client.Map <IElasticType>(m => m.Index(type.Index.Name).Type(type.Type.Name)); } foreach (var doc in docs.ElasticItems) { var bulkIndexOperation = new BulkIndexOperation <IElasticItem>(doc); bulkIndexOperation.Id = ((IOpenSearchResultItem)doc).Identifier; bulkIndexOperation.Type = type.Type.Name; var bulkOp = bulkIndexOperation; bulkRequest.Operations.Add(bulkOp); } var response = client.Bulk(bulkRequest); BulkOperationsResponse ingestionResponse = new BulkOperationsResponse(); foreach (var item in response.Items) { if (!item.IsValid) { ingestionResponse.Errors++; continue; } if (item.Version == "1") { ingestionResponse.Added++; } else { ingestionResponse.Updated++; } } return(ingestionResponse); }
public void IndexPosts(string path) { CreatePostsIndexIfNotExists(); _client.Indices.UpdateSettings(PostsIndex, u => u .IndexSettings(i => i .RefreshInterval("-1") ) ); var handle = new ManualResetEvent(false); var size = 1000; var posts = StackOverflowData.GetPosts(path); var observableBulk = _client.BulkAll(posts, f => f .MaxDegreeOfParallelism(Environment.ProcessorCount * 2) .BackOffTime(TimeSpan.FromSeconds(10)) .BackOffRetries(2) .Size(size) .BufferToBulk((bulk, buffer) => { foreach (var post in buffer) { if (post is Question question) { var item = new BulkIndexOperation <Question>(question); bulk.AddOperation(item); } else { var answer = (Answer)post; var item = new BulkIndexOperation <Answer>(answer); bulk.AddOperation(item); } } }) .RefreshOnCompleted() .Index(PostsIndex) ); var seenPages = 0; var indexedDocs = 0; var totalDocs = 0; Exception exception = null; var bulkObserver = new BulkAllObserver( onError: e => { exception = e; handle.Set(); }, onCompleted: () => handle.Set(), onNext: b => { Interlocked.Add(ref indexedDocs, b.Items.Count(i => i.IsValid)); Interlocked.Add(ref totalDocs, b.Items.Count); Interlocked.Increment(ref seenPages); Log.WriteLine($"indexed page {seenPages} of questions and answers, {indexedDocs} out of {totalDocs}"); } ); var stopWatch = Stopwatch.StartNew(); observableBulk.Subscribe(bulkObserver); handle.WaitOne(); if (exception != null) { throw exception; } Log.WriteLine($"time taken to index posts: {stopWatch.Elapsed}"); _client.Indices.UpdateSettings(PostsIndex, u => u .IndexSettings(i => i .RefreshInterval("30s") ) ); }