public static void Setup() { var client = ElasticsearchConfiguration.Client; var projects = NestTestData.Data; var people = NestTestData.People; client.CreateIndex(ElasticsearchConfiguration.DefaultIndex, c => c .NumberOfReplicas(0) .NumberOfShards(1) .AddMapping<ElasticSearchProject>(m => m.MapFromAttributes()) .AddMapping<Person>(m => m.MapFromAttributes()) ); client.CreateIndex(ElasticsearchConfiguration.DefaultIndex + "_clone", c => c .NumberOfReplicas(0) .NumberOfShards(1) .AddMapping<ElasticSearchProject>(m => m.MapFromAttributes()) .AddMapping<Person>(m => m.MapFromAttributes()) ); var bulk = new BulkDescriptor(); foreach (var p in projects) bulk.Index<ElasticSearchProject>(i=>i.Object(p)); foreach (var p in people) bulk.Index<Person>(i => i.Object(p)); client.Bulk(bulk); client.Refresh(new[] {ElasticsearchConfiguration.DefaultIndex, ElasticsearchConfiguration.DefaultIndex + "_clone"}); }
public void Setup() { _indexName = ElasticsearchConfiguration.NewUniqueIndexName(); _repositoryName = ElasticsearchConfiguration.NewUniqueIndexName(); _snapshotName = ElasticsearchConfiguration.NewUniqueIndexName(); var descriptor = new BulkDescriptor(); _indexedElements = new List<ElasticsearchProject>(); for (int i = 0; i < 100; i++) { var elementToIndex = new ElasticsearchProject() { Id = i, Name = "Coboles", Content = "COBOL elasticsearch client" }; descriptor = descriptor.Index<ElasticsearchProject>(d => d.Index(_indexName).Document(elementToIndex)); _indexedElements.Add(elementToIndex); } var bulkResponse = this.Client.Bulk(d => descriptor); this.Client.CreateRepository(_repositoryName, r => r .FileSystem(@"local\\path", o => o .Compress() .ConcurrentStreams(10))); }
public void BulkUpdateObject() { //Lets first insert some documents with id range 5000-6000 var descriptor = new BulkDescriptor(); foreach (var i in Enumerable.Range(5000, 1000)) descriptor.Index<ElasticsearchProject>(op => op.Object(new ElasticsearchProject { Id = i })); var result = this._client.Bulk(d=>descriptor); result.Should().NotBeNull(); result.IsValid.Should().BeTrue(); //Now lets update all of them giving them a name descriptor = new BulkDescriptor().Refresh(); foreach (var i in Enumerable.Range(5000, 1000)) { int id = i; descriptor.Update<ElasticsearchProject, object>(op => op .Object(new ElasticsearchProject { Id = id }) .Document(new { name = "SufixedName-" + id}) ); } result = this._client.Bulk(d=>descriptor); result.Should().NotBeNull(); result.IsValid.Should().BeTrue(); result.Errors.Should().BeFalse(); result.Items.Count().Should().Be(1000); result.Items.All(i => i != null).Should().BeTrue(); var updatedObject = this._client.Source<ElasticsearchProject>(i=>i.Id(5000)); Assert.NotNull(updatedObject); Assert.AreEqual(updatedObject.Name, "SufixedName-5000"); }
public async Task AddTestDataInLoop() { log.LogInformation("Inside AddTest data"); try { await Task.Delay(5000); for (int i = 0; i < 4; i++) { IPingResponse x = await client.PingAsync(); if (x.ConnectionStatus.Success) { log.LogInformation(DateTime.Now.ToLongTimeString() + " Connection successful to - " + x.ConnectionStatus.RequestUrl); break; } else { log.LogWarning(DateTime.Now.ToLongTimeString() + " Unable to connect to - " + x.ConnectionStatus.RequestUrl); await Task.Delay(i * 1000); } } var allProducts = new List<Product>(); var descriptor = new BulkDescriptor(); for (int i = 0; i < itemCount; i++) { Product p = new Product() { Id = i, Title = "test " + i, Price = i, CategoryId = 1 }; descriptor.Index<Product>(op => op.Document(p)); allProducts.Add(p); } for (int j = 0; j < 10; j++) { Category c = new Category() { Id = j, Name = "Category " + j, Products = allProducts.Where(p => p.CategoryId == j).ToList<Product>() }; descriptor.Index<Category>(op => op.Document(c)); } log.LogWarning("before bulk async"); var result = await client.BulkAsync(descriptor); log.LogWarning("after bulk async"); } catch (Exception ex) { log.LogError(DateTime.Now.ToLongTimeString() + " - Ex Caught:" + ex.Message); } }
public async Task AddTestDataInLoop() { log.LogInformation("Inside AddTest data"); try { await Task.Delay(5000); for (int i = 0; i < 4; i++) { IPingResponse x = await client.PingAsync(); if (x.ConnectionStatus.Success) { log.LogInformation(DateTime.Now.ToLongTimeString() + " Connection successful to - " + x.ConnectionStatus.RequestUrl); break; } else { log.LogWarning(DateTime.Now.ToLongTimeString() + " Unable to connect to - " + x.ConnectionStatus.RequestUrl); await Task.Delay(i * 1000); } } var allProducts = new List <Product>(); var descriptor = new BulkDescriptor(); for (int i = 0; i < itemCount; i++) { Product p = new Product() { Id = i, Title = "test " + i, Price = i, CategoryId = 1 }; descriptor.Index <Product>(op => op.Document(p)); allProducts.Add(p); } for (int j = 0; j < 10; j++) { Category c = new Category() { Id = j, Name = "Category " + j, Products = allProducts.Where(p => p.CategoryId == j).ToList <Product>() }; descriptor.Index <Category>(op => op.Document(c)); } log.LogWarning("before bulk async"); var result = await client.BulkAsync(descriptor); log.LogWarning("after bulk async"); } catch (Exception ex) { log.LogError(DateTime.Now.ToLongTimeString() + " - Ex Caught:" + ex.Message); } }
public void BulkIndexWithPercolate() { // register up some percolator queries to test matching var query1 = "bulkindex-test-doc-1"; this._client.UnregisterPercolator(query1, ur => ur.Index <ElasticsearchProject>()); var perc = this._client.RegisterPercolator <ElasticsearchProject>(query1, p => p .Query(q => q .Term(f => f.Country, "netherlands") ) ); this._client.Refresh(r => r.Index <ElasticsearchProject>()); var descriptor = new BulkDescriptor(); // match against any doc descriptor.Index <ElasticsearchProject>(i => i .Object(new ElasticsearchProject { Id = 2, Country = "netherlands" }) .Percolate("*") // match on any percolated docs ); // no percolate requested this time descriptor.Index <ElasticsearchProject>(i => i .Object(new ElasticsearchProject { Id = 3, Country = "netherlands" }) ); var result = this._client.Bulk(d => descriptor); result.Should().NotBeNull(); result.IsValid.Should().BeTrue(); result.Errors.Should().BeFalse(); result.Items.Should().NotBeNull().And.NotBeEmpty().And.HaveCount(2); var indexResponses = result.Items.OfType <BulkIndexResponseItem>().ToList(); // tests on percolated responses indexResponses.Should().HaveCount(2); indexResponses.First().Id.Should().BeEquivalentTo("2"); indexResponses.First().Index.Should().BeEquivalentTo(ElasticsearchConfiguration.DefaultIndex); indexResponses.First().Type.Should().BeEquivalentTo(this._client.Infer.TypeName <ElasticsearchProject>()); indexResponses.First().Matches.Should().NotBeNull(); indexResponses.ElementAt(1).Id.Should().BeEquivalentTo("3"); indexResponses.ElementAt(1).Index.Should().BeEquivalentTo(ElasticsearchConfiguration.DefaultIndex); indexResponses.ElementAt(1).Type.Should().BeEquivalentTo(this._client.Infer.TypeName <ElasticsearchProject>()); indexResponses.ElementAt(1).Matches.Should().BeNull(); // cleanup this._client.UnregisterPercolator(query1, ur => ur.Index <ElasticsearchProject>()); }
public static void Indexing() { using (LocationContext dbContext = new LocationContext()) { var customerLocationList = dbContext.CustomerLocations.Where(s => !s.IsDeleted) .Include(s => s.Customer) .Select(s => new CustomerModel { CustomerId = s.CustomerId, CustomerName = s.Customer.Name, LocationId = s.Id, LocationName = s.Name, Location = new GeoLocation(Convert.ToDouble(s.Latitude), Convert.ToDouble(s.Longitude)) }).ToList(); var defaultIndex = "customerlocation"; var client = new ElasticClient(); if (client.IndexExists(defaultIndex).Exists) { client.DeleteIndex(defaultIndex); } if (!elasticClient.IndexExists("location_alias").Exists) { client.CreateIndex(defaultIndex, c => c .Mappings(m => m .Map <CustomerModel>(mm => mm .AutoMap() ) ).Aliases(a => a.Alias("location_alias")) ); } // Insert Data Classic // for (int i = 0; i < customerLocationList.Count; i++) // { // var item = customerLocationList[i]; // elasticClient.Index<CustomerModel>(item, idx => idx.Index("customerlocation").Id(item.LocationId)); // } // Bulk Insert var bulkIndexer = new BulkDescriptor(); foreach (var document in customerLocationList) { bulkIndexer.Index <CustomerModel>(i => i .Document(document) .Id(document.LocationId) .Index("customerlocation")); } elasticClient.Bulk(bulkIndexer); } }
private async Task<BulkResponse> IndexMany(IEnumerable<T> models) { var descriptor = new BulkDescriptor(); descriptor.IndexMany(models, (bd, q) => bd .Index(_indexName) .Id(q.Id.ToString()) ); return await _client.BulkAsync(descriptor); }
protected virtual void AddElementsToIndex(BulkDescriptor descriptor, List <T> elements) { foreach (T element in elements) { descriptor.Index <T>(op => op.Index(_indexName) .Version(DateTime.Now.Ticks) .VersionType(VersionType.External) .Document(element)); } }
public void bulkIndexCreate(string indexName, Object modelList) { var descriptor = new BulkDescriptor(); descriptor.Index <ElasticDataModel>(op => op .Document(modelList as ElasticDataModel) ); var result = esClient.Bulk(descriptor); }
public ReindexApiTests(ManualReindexCluster cluster, EndpointUsage usage) { this._client = cluster.Client; // create a couple of projects var projects = Project.Generator.Generate(2).ToList(); var indexProjectsResponse = this._client.IndexMany(projects, IndexName); this._client.Refresh(IndexName); // create a thousand commits and associate with the projects var commits = CommitActivity.Generator.Generate(5000).ToList(); var bb = new BulkDescriptor(); for (int i = 0; i < commits.Count; i++) { var commit = commits[i]; var project = i % 2 == 0 ? projects[0].Name : projects[1].Name; bb.Index <CommitActivity>(bi => bi .Document(commit) .Index(IndexName) .Id(commit.Id) .Routing(project) ); } var bulkResult = this._client.Bulk(b => bb); bulkResult.ShouldBeValid(); this._client.Refresh(IndexName); this._reindexManyTypesResult = this._client.Reindex <ILazyDocument>(r => r .BackPressureFactor(10) .ScrollAll("1m", 2, s => s .Search(ss => ss .Index(IndexName) .AllTypes() ) .MaxDegreeOfParallelism(4) ) .BulkAll(b => b .Index(NewManyTypesIndexName) .Size(100) .MaxDegreeOfParallelism(2) .RefreshOnCompleted() ) ); this._reindexSingleTypeResult = this._client.Reindex <Project>(IndexName, NewSingleTypeIndexName); this._reindexProjectionResult = this._client.Reindex <CommitActivity, CommitActivityVersion2>( IndexName, NewProjectionIndex, p => new CommitActivityVersion2(p)); }
public IBulkResponse BulkInsertCandidates(IEnumerable <VacancyElasticModel> candidates) { var bulk = new BulkDescriptor(); candidates.ForEach(cand => bulk.Index <VacancyElasticModel>(i => i .Index(IndexName) .Id(cand.Id) .Document(cand) )); return(_client.Bulk(bulk)); }
public static void InsertBulkDocument() { var descriptor = new BulkDescriptor(); foreach (var employeeObj in PopulateEmployees()) { Employee obj = employeeObj; descriptor.Index <Employee>(op => op.Document(obj)); } var bulkresult = EsClient.Bulk(descriptor); }
public void InsertCards(IEnumerable <Card> cards, ElasticClient client) { var descriptor = new BulkDescriptor(); foreach (var card in cards) { descriptor.Create <Card>(op => op.Document(card)); } var result = client.Bulk(descriptor); }
protected virtual BulkDescriptor BuildQueryCore(BulkDescriptor descriptor, string index, bool refreshOnSave) { descriptor = descriptor .IndexMany(_documents, (d, i) => d .Type(i.GetType()) .Index(index) ) .Refresh(refreshOnSave); return(BuildQuery(descriptor)); }
public void InsertTransactions(IEnumerable <Transaction> transactions, ElasticClient client) { var descriptor = new BulkDescriptor(); foreach (var transaction in transactions) { descriptor.Create <Transaction>(op => op.Document(transaction)); } var result = client.Bulk(descriptor); }
public void IndexBulk(IEnumerable <ElasticArchiveRecord> records) { var descriptor = new BulkDescriptor(); foreach (var r in records) { descriptor.Index <ElasticArchiveRecord>(op => op.Document(r).Id(r.ArchiveRecordId)); } Client.Bulk(descriptor); }
private async Task AddRequestInfoAsync(IEnumerable <IRequestInfo> requests, CancellationToken token) { BulkDescriptor Descriptor = new BulkDescriptor(GetIndexName <RequestInfo>()); foreach (var item in requests) { Descriptor.Create <IRequestInfo>(op => op.Document(item)); } await Client.BulkAsync(Descriptor).ConfigureAwait(false); }
public void RecordChanges(INestedAspect compareTo, BulkDescriptor bulk) { var correlation = byLocalName.CorrelateWith((compareTo as VariableSet)?.byLocalName); bulk.IndexMany( correlation.SourceItems.Where(t => t.Item.IsChanged(t.PairedWith)).Select(t => t.Item), (d, v) => d.Index(Constants.ControlIndex).Document(v)); bulk.DeleteMany( correlation.NonSourceItems.Select(v => v.Item), (d, v) => d.Index(Constants.ControlIndex).Id(v.Id)); }
public static Task <IBulkResponse> BulkInsertAsync <T>(IEnumerable <T> elasticIndexes, string parentId = null) where T : class, new() { var descriptor = new BulkDescriptor(); foreach (var i in elasticIndexes) { descriptor.Index <T>(op => op.Document(i).Parent(parentId)); } var bulkresult = Client.BulkAsync(descriptor); return(bulkresult); }
public IResponse AddDocuments(List <Document> documents) { var bulkDescriptor = new BulkDescriptor(); foreach (var doc in documents) { bulkDescriptor.Index <Document>(x => x .Index(indexName) .Document(doc)); } return(client.Bulk(bulkDescriptor)); }
private static BulkDescriptor SetupBulk(IEnumerable <T> documents, string indexName) { var bulk = new BulkDescriptor(); foreach (var doc in documents) { bulk.Index <T>(i => i.Index(indexName). Document(doc)); } return(bulk); }
public void BulkAlternativeWayOfWriting() { var descriptor = new BulkDescriptor(); foreach (var i in Enumerable.Range(3000, 1000)) descriptor.Index<ElasticsearchProject>(op => op.Object(new ElasticsearchProject {Id = i})); var result = this._client.Bulk(d=>descriptor); result.Should().NotBeNull(); result.IsValid.Should().BeTrue(); result.Items.Should().NotBeNull().And.NotBeEmpty().And.HaveCount(1000).And.OnlyContain(r => r.OK); }
public void BulkIndex <T>(IEnumerable <T> data, string indexName) where T : class { var descriptor = new BulkDescriptor(); descriptor.IndexMany(data, (desc, content) => desc.Type(content.GetType()).Index(indexName)); var response = _client.Bulk(descriptor); if (response.Errors) { throw new Exception("Failed while indexing items.", response.OriginalException); } }
private async Task<ReindexResult> ReindexAsync(ReindexWorkItem workItem, WorkItemContext context, int startProgress = 0, int endProgress = 100, DateTime? startTime = null) { const int pageSize = 100; const string scroll = "10s"; var scanResults = await _client.SearchAsync<JObject>(s => s.Index(workItem.OldIndex).AllTypes().Filter(f => startTime.HasValue ? f.Range(r => r.OnField(workItem.TimestampField ?? "_timestamp").Greater(startTime.Value)) : f.MatchAll()).From(0).Take(pageSize).SearchType(SearchType.Scan).Scroll(scroll)).AnyContext(); if (!scanResults.IsValid || scanResults.ScrollId == null) { Logger.Error().Message("Invalid search result: message={0}", scanResults.GetErrorMessage()).Write(); return new ReindexResult(); } long totalHits = scanResults.Total; long completed = 0; int page = 0; var results = await _client.ScrollAsync<JObject>(scroll, scanResults.ScrollId).AnyContext(); while (results.Documents.Any()) { var bulkDescriptor = new BulkDescriptor(); foreach (var hit in results.Hits) { var h = hit; // TODO: Add support for doing JObject based schema migrations bulkDescriptor.Index<JObject>(idx => idx.Index(workItem.NewIndex).Type(h.Type).Id(h.Id).Document(h.Source)); } var bulkResponse = await _client.BulkAsync(bulkDescriptor).AnyContext(); if (!bulkResponse.IsValid) { string message = $"Reindex bulk error: old={workItem.OldIndex} new={workItem.NewIndex} page={page} message={bulkResponse.GetErrorMessage()}"; Logger.Warn().Message(message).Write(); // try each doc individually so we can see which doc is breaking us foreach (var hit in results.Hits) { var h = hit; var response = await _client.IndexAsync(h.Source, d => d.Index(workItem.NewIndex).Type(h.Type).Id(h.Id)).AnyContext(); if (response.IsValid) continue; message = $"Reindex error: old={workItem.OldIndex} new={workItem.NewIndex} id={hit.Id} page={page} message={response.GetErrorMessage()}"; Logger.Error().Message(message).Write(); throw new ReindexException(response.ConnectionStatus, message); } } completed += bulkResponse.Items.Count(); await context.ReportProgressAsync(CalculateProgress(totalHits, completed, startProgress, endProgress), $"Total: {totalHits} Completed: {completed}").AnyContext(); results = await _client.ScrollAsync<JObject>(scroll, results.ScrollId).AnyContext(); page++; } return new ReindexResult { Total = totalHits, Completed = completed }; }
public bool Bulk(IEnumerable <T> documents) { BulkDescriptor descriptor = new BulkDescriptor(); foreach (var document in documents) { descriptor.Index <T>(i => i .Index(this.GetIndexName(document)) .Document(document)); } return(this.ElasticClient.Bulk(descriptor).IsValid); }
public static void BulkPeople(List <Person> people, string index, ElasticClient client) { var bulkDescriptor = new BulkDescriptor(); foreach (var person in people) { bulkDescriptor.Index <Person>(x => x .Index(index) .Document(person) ); } client.Bulk(bulkDescriptor); }
public void BulkInsert(IEnumerable <TModel> models, string indexName) { CheckIndex(indexName, true); var bulk = new BulkDescriptor(); foreach (var model in models) { bulk.Index <TModel>(x => x.Index(indexName).Document(model)); } var response = elasticClient.Bulk(bulk); ElasticResponseValidator.Validate(response); }
/// <summary> /// Create BulkDescriptor to add given items to index. /// </summary> /// <param name="items">List of IIndexItems.</param> /// <returns>BulkDescriptor created to add given items to index</returns> public BulkDescriptor GetBulkDescriptor(IEnumerable <IModel> items) { var bulkDescriptor = new BulkDescriptor(); foreach (var item in items) { bulkDescriptor.Index <IModel>(x => x .Index(IndexName) .Document(item) ); } return(bulkDescriptor); }
public async Task <BulkResponse> BatchSave(IEnumerable <T> docs) { if (docs == null || docs.Count() == 0) { return(null); } var db = new BulkDescriptor(); db.Index(this.Meta.Index); var bulkIndexResponse = await this.Conn.BulkAsync(b => db); return(bulkIndexResponse); }
private IBulkResponse BulkUpsert(BulkOutputRequest request) { var descriptor = new BulkDescriptor(); foreach (var doc in request.documents) { descriptor.Index <object>(op => op.Index(request.index) .Type(request.type) .Object(doc) .Id(((IDictionary <string, object>)doc)[request.idFieldName].ToString())); } return(request.client.Bulk(descriptor)); }
private void InsertDocuments(List <ShopES> list) { var descriptor = new BulkDescriptor(); foreach (var item in list) { descriptor.Index <ShopES>(op => op .Document(item) ); } var result = _client.Bulk(descriptor); }
public void BulkDocs <T>(List <T> documents) where T : class { var bulkDescriptor = new BulkDescriptor(); foreach (var doc in documents) { bulkDescriptor.Index <T>(x => x .Index(index) .Document(doc) ); } ResponseValidator.handleValidation(client.Bulk(bulkDescriptor), TOPIC_OF_BULKING_DOCS_REQUEST); }
private void InsertDocuments(List <ProductES> products) { var descriptor = new BulkDescriptor(); foreach (var product in products) { descriptor.Index <ProductES>(op => op .Document(product) ); } var result = _client.Bulk(descriptor); }
private BulkDescriptor CreateBulk(IEnumerable <T> documents, string index) { var bulkDescriptor = new BulkDescriptor(); foreach (var document in documents) { bulkDescriptor.Index <T>(x => x .Index(index) .Document(document) ); } return(bulkDescriptor); }
public async Task <IBulkResponse> SynkAsync(CancellationToken cancellationToken) { var response = await _elasticConnectionClient.Client.Value.SearchAsync <SitePackChannel>(s => s .Index(_elasticConnectionClient.ElasticConfig.SitePackIndex) .Size(_elasticConnectionClient.ElasticConfig.MaxResultWindow) .From(0) .Aggregations(a => a .Terms("unique", te => te .Field(f => f.Channel_name.Suffix(ElasticConfig.ELK_KEYWORD_SUFFIX)) .Order(o => o .KeyAscending() .CountDescending()) ) ) , cancellationToken); var mediasRef = response.Documents.Select(x => new MediaRef(x.Channel_name, x.Site, x.Country, x.Xmltv_id, x.Id)).ToList(); //var qc = new QueryContainerDescriptor<Picon>(); //var query = mediasRef.SelectMany(x => x.DisplayNames.Take(2)) // .Select(val => qc.Fuzzy(fz => fz.Field(f => f.Name).Value(val.Replace("+", "plus")))).Aggregate((a, b) => a || b); //var query = mediasRef.Select(x => x.DisplayNames.FirstOrDefault()) // .Select(val => qc.Fuzzy(fz => fz.Field(f => f.Name).Value(val.Replace("+", "plus")))).FirstOrDefault(); //var searchPiconDisc = new SearchDescriptor<Picon>() // .Size(1) // .Query(x => query); var tasks = mediasRef.Select(async m => { var findLogoResponse = await _elasticConnectionClient.Client.Value.SearchAsync <Picon>(x => x.Query(q => q.Match(fz => fz.Field(f => f.Name) .Query(m.DisplayNames.FirstOrDefault()))).Size(1), cancellationToken); if (findLogoResponse.Documents.Any()) { m.Tvg.Logo = findLogoResponse.Documents.First().RawUrl; } }); await Task.WhenAll(tasks); var descriptor = new BulkDescriptor(); descriptor.IndexMany(mediasRef); return(await _elasticConnectionClient.Client.Value.BulkAsync(descriptor, cancellationToken)); }
public void BulkIndexWithPercolate() { // register up some percolator queries to test matching var query1 = "bulkindex-test-doc-1"; this._client.UnregisterPercolator<ElasticSearchProject>(query1); var perc = this._client.RegisterPercolator<ElasticSearchProject>(p => p .Name(query1) .Query(q => q .Term(f => f.Country, "netherlands") ) ); this._client.Refresh<ElasticSearchProject>(); var descriptor = new BulkDescriptor(); // match against any doc descriptor.Index<ElasticSearchProject>(i => i .Object(new ElasticSearchProject { Id = 2, Country = "netherlands" }) .Percolate("*") // match on any percolated docs ); // no percolate requested this time descriptor.Index<ElasticSearchProject>(i => i .Object(new ElasticSearchProject { Id = 3, Country = "netherlands" }) ); var result = this._client.Bulk(descriptor); result.Should().NotBeNull(); result.IsValid.Should().BeTrue(); result.Items.Should().NotBeNull().And.NotBeEmpty().And.HaveCount(2).And.OnlyContain(r => r.OK); var indexResponses = result.Items.OfType<BulkIndexResponseItem>().ToList(); // tests on percolated responses indexResponses.Should().HaveCount(2); indexResponses.First().Id.Should().BeEquivalentTo("2"); indexResponses.First().Index.Should().BeEquivalentTo(ElasticsearchConfiguration.DefaultIndex); indexResponses.First().Type.Should().BeEquivalentTo(this._client.Infer.TypeName<ElasticSearchProject>()); indexResponses.First().Matches.Should().NotBeNull(); indexResponses.ElementAt(1).Id.Should().BeEquivalentTo("3"); indexResponses.ElementAt(1).Index.Should().BeEquivalentTo(ElasticsearchConfiguration.DefaultIndex); indexResponses.ElementAt(1).Type.Should().BeEquivalentTo(this._client.Infer.TypeName<ElasticSearchProject>()); indexResponses.ElementAt(1).Matches.Should().BeNull(); // cleanup this._client.UnregisterPercolator<ElasticSearchProject>(query1); }
public bool UpdateStock(List<SalesInfo> salesInfo) { try { var elasticClient = GetElasticClient(); var updateDescriptor = new BulkDescriptor(); foreach (var item in salesInfo) { } } catch (Exception e) { throw e; } return false; }
public bool AddSales(Admin admin, Sales sales, List<SalesInfo> salesInfo, bool stock) { if (sales == null || salesInfo == null) throw new Exception(ErrorConstants.REQUIRED_FIELD_EMPTY); if (admin == null || admin.type != (int)BillingEnums.USER_TYPE.ADMIN) throw new Exception(ErrorConstants.NO_PREVILAGE); try { var elasticClient = GetElasticClient(); var salesResponse = elasticClient.Index<Sales>(sales, i => i .Index(ElasticMappingConstants.INDEX_NAME) .Type(ElasticMappingConstants.TYPE_SALES) ); var insertDescriptor = new BulkDescriptor(); foreach (var item in salesInfo) { item.salesid = sales.salesid; insertDescriptor.Index<SalesInfo>(i => i .Index(ElasticMappingConstants.INDEX_NAME) .Type(ElasticMappingConstants.TYPE_SALES_INFO) .Document(item) ); } var bulkResponse = elasticClient.Bulk(insertDescriptor); return salesResponse.RequestInformation.Success && bulkResponse.RequestInformation.Success; } catch (Exception e) { throw e; } }
public static void Setup() { var client = ElasticsearchConfiguration.Client; var projects = NestTestData.Data; var people = NestTestData.People; var boolTerms = NestTestData.BoolTerms; client.CreateIndex(ElasticsearchConfiguration.DefaultIndex, c => c .NumberOfReplicas(0) .NumberOfShards(1) .AddMapping<ElasticSearchProject>(m => m.MapFromAttributes()) .AddMapping<Person>(m => m.MapFromAttributes()) .AddMapping<BoolTerm>(m => m.Properties(pp=>pp .String(sm => sm.Name(p => p.Name1).Index(FieldIndexOption.not_analyzed)) .String(sm => sm.Name(p => p.Name2).Index(FieldIndexOption.not_analyzed)) )) ); client.CreateIndex(ElasticsearchConfiguration.DefaultIndex + "_clone", c => c .NumberOfReplicas(0) .NumberOfShards(1) .AddMapping<ElasticSearchProject>(m => m.MapFromAttributes()) .AddMapping<Person>(m => m.MapFromAttributes()) .AddMapping<BoolTerm>(m => m.Properties(pp => pp .String(sm => sm.Name(p => p.Name1).Index(FieldIndexOption.not_analyzed)) .String(sm => sm.Name(p => p.Name2).Index(FieldIndexOption.not_analyzed)) )) ); var bulk = new BulkDescriptor(); foreach (var p in projects) bulk.Index<ElasticSearchProject>(i=>i.Object(p)); foreach (var p in people) bulk.Index<Person>(i => i.Object(p)); foreach (var p in boolTerms) bulk.Index<BoolTerm>(i => i.Object(p)); client.Bulk(bulk); client.Refresh(new[] {ElasticsearchConfiguration.DefaultIndex, ElasticsearchConfiguration.DefaultIndex + "_clone"}); }
public string LogData() { string result = string.Empty; try { List<VODDetails> objList = new List<VODDetails>(); objList = DAL.FetchData(); #region ElasticSearch --Begin string indexName = "test"; string typeName = "test01"; var node = new Uri("http://localhost:9200"); var settings = new ConnectionSettings(node); var client = new ElasticClient(settings); BulkDescriptor objbulk = new BulkDescriptor(); foreach (var value in objList) { objbulk.Index<object>(i => i .Index(indexName) .Type(typeName) .Id(value.strAssetID) .Document(value)); client.Bulk(objbulk); } result = "Data successfully inserted"; #endregion ElasticSearch --End } catch (Exception ex) { result = ex.Message; } return result; }
private async Task<ReindexResult> ReindexAsync(ReindexWorkItem workItem, WorkItemContext context, int startProgress = 0, int endProgress = 100, DateTime? startTime = null) { const int pageSize = 100; const string scroll = "5m"; string timestampField = workItem.TimestampField ?? "_timestamp"; long completed = 0; var scanResults = await _client.SearchAsync<JObject>(s => s .Index(workItem.OldIndex) .AllTypes() .Filter(f => startTime.HasValue ? f.Range(r => r.OnField(timestampField).Greater(startTime.Value)) : f.MatchAll()) .From(0).Take(pageSize) .SearchType(SearchType.Scan) .Scroll(scroll)).AnyContext(); if (!scanResults.IsValid || scanResults.ScrollId == null) { Logger.Error().Message("Invalid search result: message={0}", scanResults.GetErrorMessage()).Write(); return new ReindexResult(); } long totalHits = scanResults.Total; var parentMap = workItem.ParentMaps?.ToDictionary(p => p.Type, p => p.ParentPath) ?? new Dictionary<string, string>(); var results = await _client.ScrollAsync<JObject>(scroll, scanResults.ScrollId).AnyContext(); while (results.Documents.Any()) { var bulkDescriptor = new BulkDescriptor(); foreach (var hit in results.Hits) { var h = hit; // TODO: Add support for doing JObject based schema migrations bulkDescriptor.Index<JObject>(idx => { idx .Index(workItem.NewIndex) .Type(h.Type) .Id(h.Id) .Document(h.Source); if (String.IsNullOrEmpty(h.Type)) Logger.Error().Message("Hit type empty. id={0}", h.Id).Write(); if (parentMap.ContainsKey(h.Type)) { if (String.IsNullOrEmpty(parentMap[h.Type])) Logger.Error().Message("Parent map has empty value. id={0} type={1}", h.Id, h.Type).Write(); var parentId = h.Source.SelectToken(parentMap[h.Type]); if (!String.IsNullOrEmpty(parentId?.ToString())) idx.Parent(parentId.ToString()); else Logger.Error().Message("Unable to get parent id. id={0} path={1}", h.Id, parentMap[h.Type]).Write(); } return idx; }); } var bulkResponse = await _client.BulkAsync(bulkDescriptor).AnyContext(); if (!bulkResponse.IsValid) { string message = $"Reindex bulk error: old={workItem.OldIndex} new={workItem.NewIndex} completed={completed} message={bulkResponse.GetErrorMessage()}"; Logger.Warn().Message(message).Write(); // try each doc individually so we can see which doc is breaking us foreach (var hit in results.Hits) { var h = hit; var response = await _client.IndexAsync<JObject>(h.Source, d => { d .Index(workItem.NewIndex) .Type(h.Type) .Id(h.Id); if (parentMap.ContainsKey(h.Type)) { var parentId = h.Source.SelectToken(parentMap[h.Type]); if (!String.IsNullOrEmpty(parentId?.ToString())) d.Parent(parentId.ToString()); else Logger.Error().Message("Unable to get parent id. id={0} path={1}", h.Id, parentMap[h.Type]).Write(); } return d; }).AnyContext(); if (response.IsValid) continue; message = $"Reindex error: old={workItem.OldIndex} new={workItem.NewIndex} id={hit.Id} completed={completed} message={response.GetErrorMessage()}"; Logger.Error().Message(message).Write(); var errorDoc = new JObject(new { h.Type, Content = h.Source.ToString(Formatting.Indented) }); if (parentMap.ContainsKey(h.Type)) { var parentId = h.Source.SelectToken(parentMap[h.Type]); if (!String.IsNullOrEmpty(parentId?.ToString())) errorDoc["ParentId"] = parentId.ToString(); else Logger.Error().Message("Unable to get parent id. id={0} path={1}", h.Id, parentMap[h.Type]).Write(); } // put the document into an error index response = await _client.IndexAsync<JObject>(errorDoc, d => { d .Index(workItem.NewIndex + "-error") .Id(h.Id); return d; }).AnyContext(); if (response.IsValid) continue; throw new ReindexException(response.ConnectionStatus, message); } } completed += bulkResponse.Items.Count(); await context.ReportProgressAsync(CalculateProgress(totalHits, completed, startProgress, endProgress), $"Total: {totalHits} Completed: {completed}").AnyContext(); Logger.Info().Message($"Reindex Progress: {CalculateProgress(totalHits, completed, startProgress, endProgress)} Completed: {completed} Total: {totalHits}").Write(); results = await _client.ScrollAsync<JObject>(scroll, results.ScrollId).AnyContext(); } return new ReindexResult { Total = totalHits, Completed = completed }; }
public static void LoadData(IElasticClient client) { var r = client.CreateIndex("entities", c => c .AddMapping<JsonObject>(m => m .IdField(i => i.SetIndex("not_analyzed")) .TypeName("locations") .Properties(p => p .String(s => s.Name("id")) .String(s => s.Name("name").Index(FieldIndexOption.analyzed).IndexAnalyzer("standard")) .String(s => s.Name("parentId"))) )); var all = new List<JsonObject>(); var reader = new StreamReader(File.OpenRead(@"c:\temp\countries.csv"), new UTF8Encoding()); while (!reader.EndOfStream) { var line = reader.ReadLine(); if (line == null) continue; var values = line.Split(','); values[2] = values[2].Replace("\"", ""); var location = CreateObject.CreateMiniEntity(values[0], values[1], values[2]); all.Add(location); } var allObjects = all.ToDictionary(json => json.Id); foreach (var item in all) { var path = new List<string>(); if (!String.IsNullOrEmpty(item["parentId"].ToString())) { RecGetParent(path, allObjects, item); path.Reverse(); path.Add(item["name"].ToString()); item.Add("fullPath", String.Join("#.#", path)); } else item.Add("fullPath", String.Empty); } var insertCount = 0; var bulker = new BulkDescriptor(); for (var index = 0; index < all.Count; index++) { var item = all[index]; bulker.Index<JsonObject>(op => op.Index("entities") .Id(Convert.ToString(item["id"])) .Type("locations") .Object(item)); insertCount++; if (insertCount != 1000 && index != (all.Count - 1)) continue; //PushToElastic(client, bulker); var result = client.Bulk(bulker); insertCount = 0; bulker = new BulkDescriptor(); } }
public async Task AddTestData() { log.LogInformation("Inside AddTest data"); try { await Task.Delay(5000); for (int i = 0; i < 4; i++) { IPingResponse x = await client.PingAsync(); if (x.ConnectionStatus.Success) { log.LogInformation(DateTime.Now.ToLongTimeString() + " Connection successful to - " + x.ConnectionStatus.RequestUrl); break; } else { log.LogWarning(DateTime.Now.ToLongTimeString() + " Unable to connect to - " + x.ConnectionStatus.RequestUrl); await Task.Delay(i * 1000); } } Category entertainment = new Category() { Id = 1, Name = "Entertainment" }; Category equipment = new Category() { Id = 2, Name = "Equipment" }; Category foodsupply = new Category() { Id = 3, Name = "Food Supply" }; int id = 0; var allProducts = new List<Product>(); allProducts.Add(new Product() { Id = id++, Title = "Pet Rock", Quantity = 20, Price = 5.0M, Description = @"Why be lonely when you can have a pet? The Pet Rock is the lowest maintenance pet you'll ever own", ProductArtUrl = @"https://dockerbook.blob.core.windows.net/images/Pet-Bio-Rock-with-terrarium.jpg", CategoryId = entertainment.Id }); allProducts.Add(new Product() { Id = id++, Title = "Robo Buddy", Quantity = 1, Price = 399.99M, Description = @"Robo Buddy is the ultimate Robot toy that every child and adult needs!", ProductArtUrl = @"https://dockerbook.blob.core.windows.net/images/ROBO-BUDDY.jpg", CategoryId = entertainment.Id }); allProducts.Add(new Product() { Id = id++, Title = "Jet Pack", Quantity = 5, Price = 999.99M, Description = @"Be the envy of your planetary colony with this deluxe hydrogen-powered Jet Pack.", ProductArtUrl = @"https://dockerbook.blob.core.windows.net/images/Jet-Pack.jpg", CategoryId = equipment.Id }); allProducts.Add(new Product() { Id = id++, Title = "Moon Boots", Price = 299.99M, Description = @"Hand crafted and heat moldable, these boots will keep you warm when the temperatures hit below 50 degrees!", ProductArtUrl = @"https://dockerbook.blob.core.windows.net/images/Moon-Boot.jpg", CategoryId = equipment.Id }); allProducts.Add(new Product() { Id = id++, Title = "Indestructible Flag Pole", Quantity = 300, Price = 75.00M, Description = @"This indescructible, high-suction flag pole helps adventurers claim what is righfully theirs.", ProductArtUrl = @"https://dockerbook.blob.core.windows.net/images/Indestructable-High-Suction-flag-pole.jpg", CategoryId = equipment.Id }); allProducts.Add(new Product() { Id = id++, Title = "Emergency Beacon", Quantity = 7, Price = 125.00M, Description = @"This solar powered emergency beacon is a must-have for any adventurer. ", ProductArtUrl = @"https://dockerbook.blob.core.windows.net/images/emergency-beacon.jpg", CategoryId = equipment.Id }); allProducts.Add(new Product() { Id = id++, Title = "Short range Lazer blaster", Quantity = 40, Price = 800.00M, Description = @"The best defense is a good offense and the Lazer blaster gives adventurers piece of mind to handle whatever they may encounter.", ProductArtUrl = @"https://dockerbook.blob.core.windows.net/images/LAzer.jpg", CategoryId = equipment.Id }); allProducts.Add(new Product() { Id = id++, Title = "Crunch Bar", Quantity = 36, Price = 2.75M, Description = @"Organic, gluten free, and flavor free, Crunch Bars provide the perfect ratio of protein, carbs, and fat for hungry adventurers.", ProductArtUrl = @"https://dockerbook.blob.core.windows.net/images/Crunch-Bar.jpg", CategoryId = foodsupply.Id }); allProducts.Add(new Product() { Id = id++, Title = "Hydro Drink", Quantity = 12, Price = 3.50M, Description = @"Hydro Drink packs in double the caffeine of other energy drinks but with potassium and electryolytes to quickly rehydrate.", ProductArtUrl = @"https://dockerbook.blob.core.windows.net/images/Hydo-Drink.jpg", CategoryId = foodsupply.Id }); entertainment.Products = allProducts.Where(c => c.Id == 1).ToList<Product>(); equipment.Products = allProducts.Where(c => c.Id == 2).ToList<Product>(); foodsupply.Products = allProducts.Where(c => c.Id == 3).ToList<Product>(); var descriptor = new BulkDescriptor(); descriptor.Index<Category>(op => op.Document(entertainment)); descriptor.Index<Category>(op => op.Document(equipment)); descriptor.Index<Category>(op => op.Document(foodsupply)); foreach (var p in allProducts) { descriptor.Index<Product>(op => op.Document(p)); } log.LogWarning("before bulk async"); var result = await client.BulkAsync(descriptor); log.LogWarning("after bulk async"); } catch (Exception ex) { log.LogError(DateTime.Now.ToLongTimeString() + " - Ex Caught:" + ex.Message); } }
private void ConfigureIndexItem(BulkDescriptor d, IHit<JObject> hit, string targetIndex) { d.Index<JObject>(idx => ConfigureItem(idx, hit, targetIndex)); }