public void UploadSceneDocuments <T>(IEnumerable <T> sceneDocuments) { if (sceneDocuments == null) { throw new NullReferenceException(); } SearchIndexClient indexClient = new SearchIndexClient(_config.Name, _config.SceneIndexName, _client.SearchCredentials); var actions = new List <IndexAction <T> >(); foreach (var sceneDocument in sceneDocuments) { actions.Add(IndexAction.MergeOrUpload(sceneDocument)); } var batch = IndexBatch.New(actions); try { indexClient.Documents.Index(batch); } catch (IndexBatchException ex) { _logger.Error(ex.Message); throw; } finally { indexClient.Dispose(); } }
private static async Task AddOrUpdateIndex(SearchServiceClient searchClient, IEnumerable <Project> projects, string indexName, ILogger log) { var indexClient = searchClient.Indexes.GetClient(indexName); var indexActions = projects .Select(project => { var projectNameMatch = Regex.Match(project.MarkdownDescription, @"^\s*#(?!#)\s*(.*?)\s*$", RegexOptions.Multiline); var projectName = projectNameMatch.Success ? projectNameMatch.Groups[1].Value : project.Name; var contributor = project.ContributorInfo; var document = new Document { { "id", project.Id }, { "name", projectName }, { "contributorName", contributor.Name }, { "contributorUrl", contributor.Web }, { "contributorLogo", contributor.Logo }, { "descriptionMarkdownFilename", project.Name }, { "descriptionMarkdown", project.MarkdownDescription } }; log.LogInformation("{name}, {contributorName}", projectName, contributor.Name); return(IndexAction.MergeOrUpload(document)); }) .OfType <IndexAction>(); var result = await indexClient.Documents.IndexAsync(new IndexBatch(indexActions)); }
private static async Task UploadToAzureSeearch(ISearchIndexClient indexClient, string documentId, List <string> keyPhrases, string summary, TraceWriter log) { var document = new Document(); document.Add(KeyField, documentId); document.Add(SummaryField, summary); document.Add(KeyPhrasesField, string.Join(", ", keyPhrases)); var indexOperations = new List <IndexAction>() { IndexAction.MergeOrUpload(document) }; try { await indexClient.Documents.IndexAsync(new IndexBatch(indexOperations)); } catch (IndexBatchException e) { // Sometimes when your Search service is under load, indexing will fail for some of the documents in // the batch. Depending on your application, you can take compensating actions like delaying and // retrying. For this simple demo, we just log the failed document keys and continue. log.Info("Failed to index some of the documents: " + string.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key))); } }
static void AddSentencesToIndex(string[] sentences) { var indexClient = _searchClient.Indexes.GetClient(_indexName); var docActions = new List <IndexAction <AliceDocument> >(); for (int i = 0; i < sentences.Length; i++) { var doc = new AliceDocument() { documentId = i.ToString(), standardText = sentences[i], englishText = sentences[i], microsoftText = sentences[i] }; docActions.Add(IndexAction.MergeOrUpload(doc)); } var docBatch = IndexBatch.New(docActions); try { var result = indexClient.Documents.Index(docBatch); } catch (IndexBatchException e) { // Sometimes when your Search service is under load, indexing will fail for some of the documents in // the batch. Depending on your application, you can take compensating actions like delaying and // retrying. For this simple demo, we just log the failed document keys and continue. Console.WriteLine("Failed to index some of the documents: " + string.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key))); } }
public static IndexAction ElitUaApiModelToIndexAction( ElitPriceListRecord apiProduct, long updatedOnUtcTimestamp) { if (apiProduct == null) { return(null); } var source = EkProductSourceEnum.ElitUa; var productKey = new EkProductKey(source, ReplaceInvalidAzureSearchKeySymbolsWithDash(apiProduct.ActiveItemNo)) .ToKey(); var nameRu = GetValueOrFallback(apiProduct.EcatDescription, apiProduct.ItemDescription); var product = new Document() { ["key"] = productKey, ["updatedOnUtcTimestamp"] = updatedOnUtcTimestamp, ["source"] = (int)source, ["sourceId"] = apiProduct.ActiveItemNo, ["partNumber"] = apiProduct.PartNumber, ["cleanedPartNumber"] = PartNumberCleaner.GetCleanedPartNumber(apiProduct.PartNumber), ["brandName"] = apiProduct.Brand, ["cleanedBrandPartNumber"] = PartNumberCleaner.GetCleanedBrandPartNumber( brandName: apiProduct.Brand, partNumber: apiProduct.PartNumber), ["name_ru"] = SearchTextHelpers.TrimNameAndAddBrandIfMissed( productName: nameRu, brandName: apiProduct.Brand), ["price"] = (double)apiProduct.CustomerPrice, }; return(IndexAction.MergeOrUpload(product)); }
public async Task FlushAsync(CancellationToken cancellationToken = default(CancellationToken)) { try { if (_messages.Count == 0) { return; } var uploadAction = _messages.Where(w => w.Insert).Select(s => IndexAction.MergeOrUpload(s.Item)); var deleteAction = _messages.Where(w => !w.Insert).Select(s => IndexAction.Delete(s.Item)); var batch = IndexBatch.New(uploadAction.Union(deleteAction)); var result = await _indexClient.Documents.IndexAsync(batch, cancellationToken : cancellationToken); _messages.Clear(); #if DEBUG foreach (var errorResult in result.Results.Where(w => !w.Succeeded)) { Debug.WriteLine( $"Failed to process id {errorResult.Key} error {errorResult.ErrorMessage} on index {_indexClient.IndexName} "); } #endif } catch (Exception ex) { Debug.WriteLine(ex); throw; } }
/// <summary> /// Adds all new records to index /// </summary> /// <param name="newRecords">The new or existing records which should be indexed.</param> public async Task AddOrUpdateIndexDataAsync(IEnumerable <EmployerSearchModel> newRecords) { if (Disabled) { throw new Exception($"{nameof(AzureEmployerSearchRepository)} is disabled"); } if (newRecords == null || !newRecords.Any()) { throw new ArgumentNullException(nameof(newRecords), "You must supply at least one record to index"); } //Remove all test organisations if (!string.IsNullOrWhiteSpace(SharedOptions.TestPrefix)) { newRecords = newRecords.Where(e => !e.Name.StartsWithI(SharedOptions.TestPrefix)); } //Ensure the records are ordered by name newRecords = newRecords.OrderBy(o => o.Name); //Set the records to add or update var actions = newRecords.Select(r => IndexAction.MergeOrUpload(_autoMapper.Map <AzureEmployerSearchModel>(r))) .ToList(); var batches = new ConcurrentBag <IndexBatch <AzureEmployerSearchModel> >(); while (actions.Any()) { var batchSize = actions.Count > 1000 ? 1000 : actions.Count; var batch = IndexBatch.New(actions.Take(batchSize).ToList()); batches.Add(batch); actions.RemoveRange(0, batchSize); } var indexClient = await _indexClient.Value; Parallel.ForEach( batches, batch => { var retries = 0; retry: try { indexClient.Documents.Index(batch); } catch (IndexBatchException) { if (retries < 30) { retries++; Thread.Sleep(1000); goto retry; } throw; } }); }
private void ImportDocuments(ISearchIndexClient indexClient) { var actions = new IndexAction <RealState>[] { IndexAction.Upload( new RealState() { ListingId = "1", Name = "Madrid", Beds = 2, Baths = "1", Description = "Meilleur hôtel en ville", Status = "Available", Type = "House", City = "Bern", Price = 1200 }), IndexAction.Upload( new RealState() { ListingId = "2", Name = "Real", Beds = 3, Baths = "2", Description = "Hôtel le moins cher en ville", Status = "Hired", Type = "Room", City = "Madrid", Price = 1600 }), IndexAction.MergeOrUpload( new RealState() { ListingId = "3", Name = "Empire", Beds = 3, Baths = "2", Description = "Mansion Le Blank", Status = "Sold", Type = "Castle", City = "London", Price = 1000 }), //IndexAction.Delete(new RealState() { ListingId = "6" }) }; var batch = IndexBatch.New(actions); try { indexClient.Documents.Index(batch); } catch (IndexBatchException e) { Console.WriteLine("Failed to index some of the documents: {0}", String.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key))); } Console.WriteLine("Waiting for documents to be indexed...\n"); Thread.Sleep(2000); }
//private readonly string[] lstExtensions = { "pdf", "doc", "docx" }; public void IndexDocuments(List <BlobStructure> lstBlobs) { object strExtension; object strName; var blbOp = new AzureStorage.Blobs.BlobStorage(); foreach (var blbBlob in lstBlobs) { blbBlob.dctMetadata.TryGetValue(cnsBlobMetadata.Extension, out strExtension); blbBlob.dctMetadata.TryGetValue(cnsBlobMetadata.AlterName, out strName); // If files not are valid documents if (!strExtensions.Equals(strExtension)) { return; } List <IndexAction> lstActions = new List <IndexAction>(); // Create a index action by each page // download blob like stream var using (PdfReader reader = new PdfReader(blbOp.fntGetStreamBlob(blbBlob))) { for (int i = 1; i <= reader.NumberOfPages; i++) { var dcmMetadata = new Document(); // Get all metadata from blob foreach (var meta in blbBlob.dctMetadata) { dcmMetadata.Add(meta.Key, meta.Value); } dcmMetadata.Add("container", blbBlob.cntContainer.fntFullNameStr); dcmMetadata.Add("uri", blbBlob.fntUri); dcmMetadata.Add("fullname", blbBlob.fntNameStr + "." + strExtension); dcmMetadata.Add("id", MakeSafeId(strName + "_" + i + "." + strExtension)); dcmMetadata.Add("content", PdfTextExtractor.GetTextFromPage(reader, i)); dcmMetadata.Add("page", i); lstActions.Add(IndexAction.MergeOrUpload(dcmMetadata)); } } // Initialize client search var client = new SearchIndexClient(ConfigurationManager.AppSettings["SearchServiceName"], ConfigurationManager.AppSettings["IndexName"], new SearchCredentials(ConfigurationManager.AppSettings["SearchApiKey"])); // Index batch process for (int i = 0; i < (int)Math.Ceiling(lstActions.Count / (double)intRegistersSkip); i++) { client.Documents.Index(new IndexBatch(lstActions.Skip(i * intRegistersSkip).Take(lstActions.Count - (i * intRegistersSkip)))); } } }
public async Task AddDocumentAsync(DocumentModel document, ILogger logger, CancellationToken token = default) { await EnsureIndexCreatedAsync(logger, token); var actions = new[] { IndexAction.MergeOrUpload(document) }; var searchIndexClient = _searchServiceClient.Indexes.GetClient(Constants.IndexName); await searchIndexClient.Documents.IndexAsync(IndexBatch.New(actions), cancellationToken : token); }
public void Index(Player player) { var actions = new List <IndexAction <PlayerSearchEntry> > { IndexAction.MergeOrUpload(player.ToSearchEntry()) }; var batch = IndexBatch.New(actions); _playersIndexClient.Documents.Index(batch); }
public static IndexAction OmegaAutoBizApiModelToIndexAction( ProductSearchRecord apiProduct, long updatedOnUtcTimestamp) { if (apiProduct == null) { return(null); } var source = EkProductSourceEnum.OmegaAutoBiz; var productKey = new EkProductKey(source, apiProduct.ProductId.ToString()) .ToKey(); var price = GetValueOrFallback(apiProduct.CustomerPrice, apiProduct.Price); if (price == 0) { return(IndexAction.Delete(new Document() { ["key"] = productKey, })); } var partNumber = apiProduct.Number?.Trim(); var brandName = apiProduct.BrandDescription?.Trim(); var nameRu = apiProduct.Description?.Trim(); var nameUk = GetValueOrFallback(apiProduct.DescriptionUkr?.Trim(), nameRu); var description = apiProduct.Info?.Trim(); var product = new Document() { ["key"] = productKey, ["updatedOnUtcTimestamp"] = updatedOnUtcTimestamp, ["source"] = (int)source, ["sourceId"] = apiProduct.ProductId.ToString(), ["partNumber"] = partNumber, ["cleanedPartNumber"] = PartNumberCleaner.GetCleanedPartNumber(partNumber), ["brandName"] = brandName, ["cleanedBrandPartNumber"] = PartNumberCleaner.GetCleanedBrandPartNumber( brandName: brandName, partNumber: partNumber), ["name_ru"] = SearchTextHelpers.TrimNameAndAddBrandIfMissed( productName: nameRu, brandName: brandName), ["name_uk"] = SearchTextHelpers.TrimNameAndAddBrandIfMissed( productName: nameUk, brandName: brandName), ["description_ru"] = description, ["description_uk"] = description, ["price"] = (double)price, }; return(IndexAction.MergeOrUpload(product)); }
public async Task <DocumentIndexResult> MergeOrUploadDocumentsAsync <T>(string indexName, T[] documents) { var indexClient = GetClient(indexName); var actions = new IndexAction <T>[] { }; foreach (var d in documents) { actions.Append(IndexAction.MergeOrUpload(d)); } var batch = IndexBatch.New(actions); return(await indexClient.Documents.IndexAsync(batch)); }
private async Task LoadDataFileAsync(string file, SearchIndexClient indexClient) { using (var sr = System.IO.File.OpenText(file)) { var json = sr.ReadToEnd(); var documents = JsonConvert.DeserializeObject <List <dynamic> >(json); var actions = new List <IndexAction <dynamic> >(); foreach (var doc in documents) { var x = IndexAction.MergeOrUpload <dynamic>(doc); actions.Add(x); } var batch = new IndexBatch <dynamic>(actions); await indexClient.Documents.IndexAsync(batch); } }
public static void PostSave(PostBase post) { using (var client = CreateClient()) { var indexClient = client.Indexes.GetClient("content"); var body = new StringBuilder(); foreach (var block in post.Blocks) { if (block is HtmlBlock htmlBlock) { body.AppendLine(htmlBlock.Body.Value); } else if (block is HtmlColumnBlock columnBlock) { body.AppendLine(columnBlock.Column1.Value); body.AppendLine(columnBlock.Column2.Value); } } var cleanHtml = new Regex("<[^>]*(>|$)"); var cleanSpaces = new Regex("[\\s\\r\\n]+"); var cleaned = cleanSpaces.Replace(cleanHtml.Replace(body.ToString(), " "), " ").Trim(); var actions = new IndexAction <Content>[] { IndexAction.MergeOrUpload( new Content { Slug = post.Slug, ContentId = post.Id.ToString(), ContentType = "post", Title = post.Title, Category = post.Category.Title, Tags = post.Tags.Select(t => t.Title).ToList(), Body = cleaned } ) }; var batch = IndexBatch.New(actions); indexClient.Documents.Index(batch); } }
public static async Task UploadToAzureSearch(string pageId, string documentName, int pageNumber, string keyPhrases, string text, TraceWriter log) { // Create the index if it doesn't exist if (!_serviceClient.Indexes.Exists(Constants.IndexName)) { var definition = new Index() { Name = Constants.IndexName, Fields = FieldBuilder.BuildForType <DocumentPage>() }; _serviceClient.Indexes.Create(definition); } ISearchIndexClient indexClient = _serviceClient.Indexes.GetClient(Constants.IndexName); var documentPage = new DocumentPage { pageId = pageId, documentName = documentName, pageNumber = pageNumber, keyPhrases = keyPhrases, text = text }; var actions = new IndexAction <DocumentPage>[] { IndexAction.MergeOrUpload(documentPage) }; // Pretty small batch! Still fine for this simple demo var batch = IndexBatch.New(actions); try { await indexClient.Documents.IndexAsync(batch); } catch (IndexBatchException e) { // Sometimes when your Search service is under load, indexing will fail for some of the documents in // the batch. Depending on your application, you can take compensating actions like delaying and // retrying. For this simple demo, we just log the failed document keys and continue. log.Info("Failed to index some of the documents: " + string.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key))); } }
public async Task CreateOrUpdate(PlanTemplateCM planTemplate) { using (var searchClient = CreateAzureSearchClient()) { using (var indexClient = searchClient.Indexes.GetClient(GetPlanTemplateIndexName())) { var document = ConvertToSearchDocument(planTemplate); var batch = IndexBatch.New( new IndexAction[] { IndexAction.MergeOrUpload(document) } ); await indexClient.Documents.IndexAsync(batch); } } }
private IndexAction <KeyedDocument> GetHijackIndexAction( Context context, NuGetVersion version, HijackDocumentChanges changes) { IndexAction <KeyedDocument> indexAction; if (changes.Delete) { indexAction = IndexAction.Delete(_hijack.Keyed( context.PackageId, version.ToNormalizedString())); } else if (!changes.UpdateMetadata) { indexAction = IndexAction.Merge <KeyedDocument>(_hijack.LatestFromCatalog( context.PackageId, version.ToNormalizedString(), lastCommitTimestamp: context.LastCommitTimestamp, lastCommitId: context.LastCommitId, changes: changes)); } else { var leaf = context.GetLeaf(version); var normalizedVersion = VerifyConsistencyAndNormalizeVersion(context, leaf); indexAction = IndexAction.MergeOrUpload <KeyedDocument>(_hijack.FullFromCatalog( normalizedVersion, changes, leaf)); } _logger.LogInformation( "Hijack index action prepared for {PackageId} {PackageVersion}: {IndexAction} with a {DocumentType} document.", context.PackageId, version.ToNormalizedString(), indexAction.ActionType, indexAction.Document.GetType().FullName); return(indexAction); }
public async Task BatchUpdateAsync <T>(IEnumerable <T> uploadOrMerge, IEnumerable <T> upload = null, IEnumerable <T> delete = null) where T : WorkDocument { var serviceClient = CreateSearchServiceClient(); var indexClient = serviceClient.Indexes.GetClient(_indexName); var actions = new List <IndexAction <T> >(); if (uploadOrMerge != null) { foreach (var item in uploadOrMerge) { actions.Add(IndexAction.MergeOrUpload <T>(item)); } } if (upload != null) { foreach (var item in upload) { actions.Add(IndexAction.Upload <T>(item)); } } if (delete != null) { foreach (var item in delete) { actions.Add(IndexAction.Delete <T>(item)); } } var batch = IndexBatch.New(actions); var retryStrategy = new Incremental(3, TimeSpan.FromSeconds(2), TimeSpan.FromSeconds(2)); var retryPolicy = new RetryPolicy <SearchIndexErrorDetectionStrategy>(retryStrategy); //there is a retry policy for the client search now, we might be able to use that instead await retryPolicy.ExecuteAsync(async() => await indexClient.Documents.IndexAsync(batch)); }
/// <summary> /// Creates or updates the searchable content for the /// given post. /// </summary> /// <param name="post">The post</param> public async Task SavePostAsync(PostBase post) { using (var client = CreateClient()) { var indexClient = client.Indexes.GetClient("content"); var body = new StringBuilder(); foreach (var block in post.Blocks) { if (block is ISearchable searchableBlock) { body.AppendLine(searchableBlock.GetIndexedContent()); } } var cleanHtml = new Regex("<[^>]*(>|$)"); var cleanSpaces = new Regex("[\\s\\r\\n]+"); var cleaned = cleanSpaces.Replace(cleanHtml.Replace(body.ToString(), " "), " ").Trim(); var actions = new IndexAction <Content>[] { IndexAction.MergeOrUpload( new Content { Slug = post.Slug, ContentId = post.Id.ToString(), ContentType = "post", Title = post.Title, Category = post.Category.Title, Tags = post.Tags.Select(t => t.Title).ToList(), Body = cleaned } ) }; var batch = IndexBatch.New(actions); await indexClient.Documents.IndexAsync(batch); } }
protected virtual async Task ExecuteBatch <T>(IEnumerable <T> uploadOrMerge, IEnumerable <T> upload = null, IEnumerable <T> delete = null) where T : class { var actions = new List <IndexAction <T> >(); if (uploadOrMerge != null) { foreach (var item in uploadOrMerge) { actions.Add(IndexAction.MergeOrUpload <T>(item)); } } if (upload != null) { foreach (var item in upload) { actions.Add(IndexAction.Upload <T>(item)); } } if (delete != null) { foreach (var item in delete) { actions.Add(IndexAction.Delete <T>(item)); } } var batch = IndexBatch.New(actions); var retryStrategy = new IncrementalRetryStrategy(3, TimeSpan.FromSeconds(2), TimeSpan.FromSeconds(2)); var retryPolicy = new RetryPolicy <SearchIndexErrorDetectionStrategy>(retryStrategy); //there is a retry policy for the client search now, we might be able to use that instead await retryPolicy.ExecuteAsync(async() => await IndexClient.Documents.IndexAsync(batch)); }
private void ButtonIndex_Click(object sender, EventArgs e) { Article article = new Article { Title = InputTitle.Text, Category = InputCategory.Text, Text = InputText.Text }; using (var serviceClient = new SearchServiceClient(SearchService, new SearchCredentials(SearchServiceKey))) { var actions = new IndexAction <Article>[] { IndexAction.MergeOrUpload(article) }; var batch = IndexBatch.New(actions); ISearchIndexClient indexClient = serviceClient.Indexes.GetClient(SearchServiceIndex); indexClient.Documents.Index(batch); } }
static void Main(string[] args) { string searchServiceName = ConfigurationManager.AppSettings["SearchServiceName"]; string adminApiKey = ConfigurationManager.AppSettings["SearchServiceAdminApiKey"]; string index = ConfigurationManager.AppSettings["SearchServiceIndexName"]; SearchServiceClient serviceClient = new SearchServiceClient(searchServiceName, new SearchCredentials(adminApiKey)); ISearchIndexClient indexClient = serviceClient.Indexes.GetClient(index); var kb = ReadKB(); SearchItem[] items = JsonConvert.DeserializeObject <SearchItem[]>(kb); List <IndexAction <SearchItem> > merges = new List <IndexAction <SearchItem> >(); foreach (SearchItem item in items) { merges.Add(IndexAction.MergeOrUpload(item)); } var batch = IndexBatch.New(merges.ToArray()); try { indexClient.Documents.Index <SearchItem>(batch); } catch (IndexBatchException e) { // Sometimes when your Search service is under load, indexing will fail for some of the documents in // the batch. Depending on your application, you can take compensating actions like delaying and // retrying. For this simple demo, we just log the failed document keys and continue. Console.WriteLine( "Failed to index some of the documents: {0}", String.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key))); } Console.WriteLine("Waiting for documents to be indexed...\n"); Thread.Sleep(2000); }
private void ButtonIndex_Click(object sender, EventArgs e) { Article article = new Article { Title = InputTitle.Text, Category = InputCategory.Text, Text = InputText.Text }; // TODO: index new item using (var serviceClient = new SearchServiceClient("azure-search-test11", new SearchCredentials("A70A4FD8B31C177C6A9B86FDD4AD82C6"))) { var actions = new IndexAction<Article>[] { IndexAction.MergeOrUpload(article) }; var batch = IndexBatch.New(actions); ISearchIndexClient indexClient = serviceClient.Indexes.GetClient("article"); indexClient.Documents.Index(batch); } }
private void ButtonIndex_Click(object sender, EventArgs e) { Article article = new Article { Title = InputTitle.Text, Category = InputCategory.Text, Text = InputText.Text }; // index new item using (var serviceClient = new SearchServiceClient("clouddev", new SearchCredentials("5186266EED091A2D879713FCEF9C1F51"))) { var actions = new IndexAction <Article>[] { IndexAction.MergeOrUpload(article) }; var batch = IndexBatch.New(actions); ISearchIndexClient indexClient = serviceClient.Indexes.GetClient("articles"); indexClient.Documents.Index(batch); } }
/// <summary> /// Updates the current dataset document details in Azure Search (without having to /// wait for the indexer to run). /// </summary> /// <param name="dataset">The updated dataset contents.</param> /// <param name="token">Cancellation token.</param> /// <returns></returns> public async Task <IndexingResult> UpdateDatasetDocInSearchIndex(DatasetStorageItem dataset, CancellationToken token) { if (dataset is null) { throw new ArgumentNullException(nameof(dataset)); } var searchDoc = new DatasetSearchItem() { Id = dataset.Id, Name = dataset.Name, Description = dataset.Description, Published = dataset.Published, Created = dataset.Created, Modified = dataset.Modified, License = dataset.License, LicenseId = dataset.LicenseId, Domain = dataset.Domain, DomainId = dataset.DomainId, Tags = dataset.Tags, FileCount = dataset.FileCount, FileTypes = dataset.FileTypes, Size = dataset.Size, IsCompressedAvailable = dataset.IsCompressedAvailable, IsDownloadAllowed = dataset.IsDownloadAllowed, IsFeatured = dataset.IsFeatured }; var actions = new IndexAction <DatasetSearchItem>[] { IndexAction.MergeOrUpload(searchDoc) }; var batch = IndexBatch.New(actions); var results = await Client.Documents.IndexAsync(batch); return(results.Results.FirstOrDefault()); }
public Task IndexAccommodationAsync(Accommodation accommodation, Facility facility) { var action = IndexAction.MergeOrUpload(CreateDocument(accommodation, facility)); return(client.Documents.IndexAsync(new IndexBatch(new[] { action }))); }
private static void UploadDocuments(ISearchIndexClient indexClient) { var actions = new IndexAction <Hotel>[] { IndexAction.Upload( new Hotel() { HotelId = "1", BaseRate = 199.0, Description = "Best hotel in town", DescriptionFr = "Meilleur hôtel en ville", HotelName = "Fancy Stay", Category = "Luxury", Tags = new[] { "pool", "view", "wifi", "concierge" }, ParkingIncluded = false, SmokingAllowed = false, LastRenovationDate = new DateTimeOffset(2010, 6, 27, 0, 0, 0, TimeSpan.Zero), Rating = 5, Location = GeographyPoint.Create(47.678581, -122.131577) }), IndexAction.Upload( new Hotel() { HotelId = "2", BaseRate = 79.99, Description = "Cheapest hotel in town", DescriptionFr = "Hôtel le moins cher en ville", HotelName = "Roach Motel", Category = "Budget", Tags = new[] { "motel", "budget" }, ParkingIncluded = true, SmokingAllowed = true, LastRenovationDate = new DateTimeOffset(1982, 4, 28, 0, 0, 0, TimeSpan.Zero), Rating = 1, Location = GeographyPoint.Create(49.678581, -122.131577) }), IndexAction.MergeOrUpload( new Hotel() { HotelId = "3", BaseRate = 129.99, Description = "Close to town hall and the river" }), IndexAction.Delete(new Hotel() { HotelId = "6" }) }; var batch = IndexBatch.New(actions); try { indexClient.Documents.Index(batch); } catch (IndexBatchException e) { // Sometimes when your Search service is under load, indexing will fail for some of the documents in // the batch. Depending on your application, you can take compensating actions like delaying and // retrying. For this simple demo, we just log the failed document keys and continue. Console.WriteLine( "Failed to index some of the documents: {0}", String.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key))); } Console.WriteLine("Waiting for documents to be indexed...\n"); Thread.Sleep(2000); }
private IndexAction <KeyedDocument> GetSearchIndexAction( Context context, SearchFilters searchFilters, SearchIndexChangeType changeType, string[] owners) { var latestFlags = _search.LatestFlagsOrNull(context.VersionLists, searchFilters); Guard.Assert( changeType == SearchIndexChangeType.Delete || latestFlags != null, "Either the search document is being or there is a latest version."); IndexAction <KeyedDocument> indexAction; if (changeType == SearchIndexChangeType.Delete) { indexAction = IndexAction.Delete(_search.Keyed( context.PackageId, searchFilters)); } else if (changeType == SearchIndexChangeType.UpdateVersionList) { if (owners != null) { // If we have owner information already fetched on behalf of another search document, send the // latest owner information as well. This provides two benefits: // // 1. This keeps all search documents for a package ID in-sync with regards to their owners // fields. // // 2. This means if an admin is reflowing for the purposes of fixing up owner information, all // search documents get the benefit instead of having to reflow the latest version of each // search filter. // indexAction = IndexAction.Merge <KeyedDocument>(_search.UpdateVersionListAndOwnersFromCatalog( context.PackageId, searchFilters, lastCommitTimestamp: context.LastCommitTimestamp, lastCommitId: context.LastCommitId, versions: latestFlags.LatestVersionInfo.ListedFullVersions, isLatestStable: latestFlags.IsLatestStable, isLatest: latestFlags.IsLatest, owners: owners)); } else { indexAction = IndexAction.Merge <KeyedDocument>(_search.UpdateVersionListFromCatalog( context.PackageId, searchFilters, lastCommitTimestamp: context.LastCommitTimestamp, lastCommitId: context.LastCommitId, versions: latestFlags.LatestVersionInfo.ListedFullVersions, isLatestStable: latestFlags.IsLatestStable, isLatest: latestFlags.IsLatest)); } } else if (IsUpdateLatest(changeType)) { var leaf = context.GetLeaf(latestFlags.LatestVersionInfo.ParsedVersion); var normalizedVersion = VerifyConsistencyAndNormalizeVersion(context, leaf); indexAction = IndexAction.MergeOrUpload <KeyedDocument>(_search.UpdateLatestFromCatalog( searchFilters, latestFlags.LatestVersionInfo.ListedFullVersions, latestFlags.IsLatestStable, latestFlags.IsLatest, normalizedVersion, latestFlags.LatestVersionInfo.FullVersion, leaf, owners)); } else { throw new NotImplementedException($"The change type '{changeType}' is not supported."); } _logger.LogInformation( "Search index action prepared for {PackageId} {SearchFilters}: {IndexAction} with a {DocumentType} document.", context.PackageId, searchFilters, indexAction.ActionType, indexAction.Document.GetType().FullName); return(indexAction); }
public Task IndexAccommodationAsync(Contracts.Models.Accommodation accommodation) { var action = IndexAction.MergeOrUpload(CreateDocument(accommodation)); return(client.Documents.IndexAsync(new IndexBatch(new[] { action }))); }