} // DeleteLargeObject /// <summary> /// Search for a csutomer /// </summary> public List <Model.Search.SearchLargeObjectModel> LargeObjectSearch(Interface.GlobalEnum.IndexerIndexName indexName, string largeObjectId) { // only check once per run if (!doesIndexExistsCheck.Contains(indexName.ToString().ToLower())) { CreateIndexIfNotExists(indexName, Interface.GlobalEnum.IndexerRepositoryIndexType.SystemDefined); doesIndexExistsCheck.Add(indexName.ToString().ToLower()); } SearchIndexClient indexClient = serviceClient.Indexes.GetClient(indexName.ToString().ToLower()); SearchParameters searchParameters = new SearchParameters(); List <Model.Search.SearchLargeObjectModel> resultList = new List <Model.Search.SearchLargeObjectModel>(); DocumentSearchResponse <Model.Search.SearchLargeObjectModel> response = indexClient.Documents.Search <Model.Search.SearchLargeObjectModel>(largeObjectId, searchParameters); foreach (SearchResult <Model.Search.SearchLargeObjectModel> item in response) { Model.Search.SearchLargeObjectModel searchDocument = new Model.Search.SearchLargeObjectModel(); searchDocument.LargeObjectId = item.Document.LargeObjectId; searchDocument.Payload = item.Document.Payload; resultList.Add(searchDocument); } return(resultList); }
private void searchRead_Click(object sender, EventArgs e) { int loop = Int32.Parse(txtLoop.Text); Interface.Repository.IIndexerRepository azureSearch = DI.Container.Resolve <Interface.Repository.IIndexerRepository>(); Model.Search.SearchLargeObjectModel searchLargeObjectModel = new Model.Search.SearchLargeObjectModel(); searchLargeObjectModel.LargeObjectId = largeObjectModel.LargeObjectId.ToString(); searchLargeObjectModel.Payload = largeObjectModel.Payload; System.Diagnostics.Stopwatch stopwatch = new System.Diagnostics.Stopwatch(); stopwatch.Start(); for (int i = 1; i <= loop; i++) { azureSearch.LargeObjectSearch(Interface.GlobalEnum.IndexerIndexName.LargeObject, i.ToString()); } stopwatch.Stop(); txtResult.Text = string.Empty; txtResult.Text += "Read Write (" + loop.ToString() + ") took: " + stopwatch.ElapsedMilliseconds.ToString() + " milliseconds." + Environment.NewLine; txtResult.Text += "Read Write (" + loop.ToString() + ") took: " + (stopwatch.ElapsedMilliseconds / 1000).ToString() + " seconds." + Environment.NewLine; }
public void LargeObjectAzureSearch() { Model.LargeObject.LargeObjectModel largeObjectModel = this.CreateLargeObjectModel(); largeObjectModel.LargeObjectId = 1; // This would save to blob or somewhere //Interface.Service.ILargeObjectService largeObjectService = DI.Container.Resolve<Interface.Service.ILargeObjectService>(); //largeObjectService.Save(largeObjectModel); Model.Search.SearchLargeObjectModel searchLargeObjectModel = new Model.Search.SearchLargeObjectModel(); searchLargeObjectModel.LargeObjectId = largeObjectModel.LargeObjectId.ToString(); searchLargeObjectModel.Payload = largeObjectModel.Payload; Interface.Repository.IIndexerRepository azureSearch = DI.Container.Resolve <Interface.Repository.IIndexerRepository>(); azureSearch.UpsertLargeObject(Interface.GlobalEnum.IndexerIndexName.LargeObject, searchLargeObjectModel); List <Model.Search.SearchLargeObjectModel> list = azureSearch.LargeObjectSearch( Interface.GlobalEnum.IndexerIndexName.LargeObject, largeObjectModel.LargeObjectId.ToString()); Assert.AreEqual(list[0].LargeObjectId, largeObjectModel.LargeObjectId.ToString()); } // LargeObjectAzureSearch
} // UpsertLargeObject /// <summary> /// Removes a largeObject /// </summary> /// <param name="indexName"></param> /// <param name="searchDocument"></param> public void DeleteLargeObject(Interface.GlobalEnum.IndexerIndexName indexName, Model.Search.SearchLargeObjectModel searchLargeObjectModel) { // only check once per run if (!doesIndexExistsCheck.Contains(indexName.ToString().ToLower())) { CreateIndexIfNotExists(indexName, Interface.GlobalEnum.IndexerRepositoryIndexType.SystemDefined); doesIndexExistsCheck.Add(indexName.ToString().ToLower()); } SearchIndexClient indexClient = serviceClient.Indexes.GetClient(indexName.ToString().ToLower()); // Can be done in batches, but since we are using batching we can do one by one for retries List <Model.Search.SearchLargeObjectModel> itemsToIndex = new List <Model.Search.SearchLargeObjectModel>(); itemsToIndex.Add(searchLargeObjectModel); indexClient.Documents.Index(IndexBatch.Create(itemsToIndex.Select(doc => IndexAction.Create(IndexActionType.Delete, doc)))); // Sometimes when your Search service is under load, indexing will fail for some of the documents in // the batch. Depending on your application, you can take compensating actions like delaying and // retrying. } // DeleteLargeObject
/// <summary> /// Inserts/Updates a largeObject /// </summary> public void UpsertLargeObject(Interface.GlobalEnum.IndexerIndexName indexName, Model.Search.SearchLargeObjectModel searchLargeObjectModel) { // only check once per run if (!doesIndexExistsCheck.Contains(indexName.ToString().ToLower())) { CreateIndexIfNotExists(indexName, Interface.GlobalEnum.IndexerRepositoryIndexType.SystemDefined); doesIndexExistsCheck.Add(indexName.ToString().ToLower()); } SearchIndexClient indexClient = serviceClient.Indexes.GetClient(indexName.ToString().ToLower()); // Can be done in batches, but since we are using batching we can do one by one for retries List <Model.Search.SearchLargeObjectModel> itemsToIndex = new List <Model.Search.SearchLargeObjectModel>(); itemsToIndex.Add(searchLargeObjectModel); indexClient.Documents.Index(IndexBatch.Create(itemsToIndex.Select(doc => IndexAction.Create(IndexActionType.MergeOrUpload, doc)))); } // UpsertLargeObject