Esempio n. 1
0
        public static async Task UploadDocuments(ISearchIndexClient indexClient, List <Hotel> hotels)
        {
            var batch = IndexBatch.Upload(hotels);

            try
            {
                await indexClient.Documents.IndexAsync(batch);
            }
            catch (IndexBatchException e)
            {
                // When a service is under load, indexing might fail for some documents in the batch.
                // Depending on your application, you can compensate by delaying and retrying.
                // For this simple demo, we just log the failed document keys and continue.
                Console.WriteLine("Failed to index some of the documents: {0}",
                                  String.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key)));
            }
        }
        public void AddFlightData(ISearchIndexClient indexClient)
        {
            var flights = ReturnFlights();

            //batch
            var batch = IndexBatch.Upload(flights);

            try
            {
                indexClient.Documents.Index(batch);
            }
            catch (Exception)
            {
                //Sometimes indexing will fail due to load
                throw;
            }
        }
Esempio n. 3
0
        protected async Task Index(T value)
        {
            var indexClient = await _searchIndexClientProvider.Get <T>();

            var batch = IndexBatch.MergeOrUpload <T>(new List <T> {
                value
            });

            try
            {
                await indexClient.Documents.IndexAsync <T>(batch);
            }
            catch (IndexBatchException)
            {
                _logger.LogError($"Indexing failed for ${value.Id} in index {indexClient.IndexName}");
            }
        }
Esempio n. 4
0
        /// <summary>
        /// Index document.
        /// </summary>
        /// <param name="document">Document.</param>
        /// <param name="merge">If set to <c>true</c> merge otherwise just upload.</param>
        /// <typeparam name="T">The document type.</typeparam>
        public async Task IndexDocumentAsync <T>(T document, bool merge = true) where T : class
        {
            var documents = new List <ShushuIndex> {
                document.MapToIndex()
            };

            if (merge)
            {
                var batch = IndexBatch.MergeOrUpload(documents);
                await _indexClient.Documents.IndexAsync(batch).ConfigureAwait(false);
            }
            else
            {
                var batch = IndexBatch.Upload(documents);
                await _indexClient.Documents.IndexAsync(batch).ConfigureAwait(false);
            }
        }
Esempio n. 5
0
        public async Task FlushAsync(CancellationToken cancellationToken = default)
        {
            var documents = _collection.ToList();

            if (!documents.Any())
            {
                _logger.LogWarning($"{nameof(FlushAsync)}: no items to flush (IndexName: {_searchIndexClient.IndexName})");
                return;
            }

            await _searchIndexClient.Documents.IndexAsync(
                IndexBatch.Upload(documents),
                cancellationToken : cancellationToken);

            _logger.LogInformation($"{nameof(FlushAsync)}: all items uploaded (IndexName: {_searchIndexClient.IndexName})");
            Flush();
        }
        private static void IndexDocuments(string indexName, List <string> groups)
        {
            var actions = new IndexAction <SecuredFiles>[]
            {
                IndexAction.Upload(
                    new SecuredFiles()
                {
                    FileId   = "1",
                    Name     = "secured_file_a",
                    GroupIds = new[] { groups[0] }
                }),
                IndexAction.Upload(
                    new SecuredFiles()
                {
                    FileId   = "2",
                    Name     = "secured_file_b",
                    GroupIds = new[] { groups[0] }
                }),
                IndexAction.Upload(
                    new SecuredFiles()
                {
                    FileId   = "3",
                    Name     = "secured_file_c",
                    GroupIds = new[] { groups[1] }
                })
            };

            var batch = IndexBatch.New(actions);

            try
            {
                _indexClient.Documents.Index(batch);
            }
            catch (IndexBatchException e)
            {
                // Sometimes when your Search service is under load, indexing will fail for some of the documents in
                // the batch. Depending on your application, you can take compensating actions like delaying and
                // retrying. For this simple demo, we just log the failed document keys and continue.
                Console.WriteLine(
                    "Failed to index some of the documents: {0}",
                    String.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key)));
            }

            Console.WriteLine("Waiting for documents to be indexed...\n");
            Thread.Sleep(2000);
        }
Esempio n. 7
0
        public void IndexDoesNotThrowWhenAllActionsSucceed()
        {
            Run(() =>
            {
                SearchIndexClient client = Data.GetSearchIndexClient();

                var batch = IndexBatch.Upload(new[] { new Hotel()
                                                      {
                                                          HotelId = "1"
                                                      } });

                DocumentIndexResult documentIndexResult = client.Documents.Index(batch);

                Assert.Equal(1, documentIndexResult.Results.Count);
                AssertIndexActionSucceeded("1", documentIndexResult.Results[0], 201);
            });
        }
        private void _timer_Elapsed(object sender, ElapsedEventArgs e)
        {
            _timer.Stop();
            //search active directory

            _logger.LogEvent("Fetching Mail enabled users from AD...");
            var client = Services.AzureSearchIndexService.CreateSearchServiceClient();

            Frameworks.AzureSearch.AzureSearchHelper.CreateIndex <Frameworks.ActiveDirectory.ADUserDetail>(client, _indexName, "ObjectGUID",
                                                                                                           new List <string>()
            {
                "ManagerName", "ThumbnailPhoto"
            });
            ISearchIndexClient indexClient = client.Indexes.GetClient(_indexName);

            var adhelper = new Frameworks.ActiveDirectory.ActiveDirectoryHelper();
            var users    = adhelper.GetAllMailUsers();

            _logger.LogEvent("Beginning index upload...");

            for (int i = 0; i < users.Count; i += 999)
            {
                var batch = IndexBatch.Upload(users.Skip(i).Take(999));
                try
                {
                    indexClient.Documents.Index(batch);
                }
                catch (IndexBatchException exc)
                {
                    // Sometimes when your Search service is under load, indexing will fail for some of the documents in
                    // the batch. Depending on your application, you can take compensating actions like delaying and
                    // retrying. For this simple demo, we just log the failed document keys and continue.
                    _logger.LogEvent(string.Format(
                                         "Failed to index some of the documents: {0}",
                                         String.Join(", ", exc.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key))));
                }
            }

            _logger.LogEvent("Job Complete...!");
            _logger.LogEvent("Will resume in 24 hours!");


            _timer.Interval = Double.Parse(System.Configuration.ConfigurationManager.AppSettings["RefreshInterval"]);
            ;
            _timer.Start();
        }
Esempio n. 9
0
        public async Task Remove(List <string> produktionsresultatIdentitetList)
        {
            if (produktionsresultatIdentitetList != null && produktionsresultatIdentitetList.Any())
            {
                try
                {
                    var batch = IndexBatch.Delete("ProduktionsresultatIdentitet", produktionsresultatIdentitetList);

                    var indexClient = _searchServiceClient.Indexes.GetClient(INDEX_EVENTS);
                    await indexClient.Documents.IndexAsync(batch);
                }
                catch (Exception e)
                {
                    throw new ApplicationException(e.Message);
                }
            }
        }
Esempio n. 10
0
        private void UploadDocuments(ISearchIndexClient indexClient)
        {
            BankAppDataContext context = new BankAppDataContext();
            var searchCustomer         = context.Customers.Select(c => new SearchCustomer
            {
                CustomerId = c.CustomerId,
                Id         = c.CustomerId.ToString(),
                NationalId = c.NationalId,
                Name       = c.Givenname + " " + c.Surname,
                Address    = c.Streetaddress,
                City       = c.City
            }).ToArray();

            var batch = IndexBatch.Upload(searchCustomer);

            indexClient.Documents.Index(batch);
        }
        public static void DeleteDocById(string id)
        {
            // Get the doc first
            var indexClient = GetSearchIndexClient();

            SearchDocument document = indexClient.Documents.Get <SearchDocument>(id);

            if (document != null)
            {
                // Remove file from azure
                AzureStorageHelper.DeleteBlob(document.DocFileName);

                // Now remove from index
                var batch = IndexBatch.Delete(new[] { document });
                PerformIndexOperation(batch);
            }
        }
Esempio n. 12
0
 public void Delete(T[] words)
 {
     try
     {
         var batch = IndexBatch.Delete(words);
         _Client.Documents.Index(batch);
     }
     catch (IndexBatchException e)
     {
         // Sometimes when your Search service is under load, indexing will fail for some of the documents in
         // the batch. Depending on your application, you can take compensating actions like delaying and
         // retrying. For this simple demo, we just log the failed document keys and continue.
         Console.WriteLine(
             "Failed to index some of the documents: {0}",
             String.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key)));
     }
 }
Esempio n. 13
0
        public async Task AddOrUpdateIndexDataAsync(IEnumerable <SicCodeSearchModel> newRecords)
        {
            if (newRecords == null || !newRecords.Any())
            {
                throw new ArgumentNullException(nameof(newRecords), "You must supply at least one record to index");
            }

            //Set the records to add or update
            var actions = newRecords.Cast <AzureSicCodeSearchModel>().Select(IndexAction.MergeOrUpload).ToList();

            var batches = new ConcurrentBag <IndexBatch <AzureSicCodeSearchModel> >();

            while (actions.Any())
            {
                var batchSize = actions.Count > 1000 ? 1000 : actions.Count;
                var batch     = IndexBatch.New(actions.Take(batchSize).ToList());
                batches.Add(batch);
                actions.RemoveRange(0, batchSize);
            }

            var searchIndexClient = await _searchIndexClient.Value;

            Parallel.ForEach(
                batches,
                batch =>
            {
                var retries = 0;
                retry:
                try
                {
                    searchIndexClient.Documents.Index(batch);
                }
                catch (IndexBatchException)
                {
                    if (retries < 30)
                    {
                        retries++;
                        Thread.Sleep(1000);
                        goto retry;
                    }

                    throw;
                }
            });
        }
        static void Main(string[] args)
        {
            String searchServiceName          = "ngtsearch1";
            string accesskey                  = "E560A03E5044BB6F9A63338E4511F8E6";
            SearchServiceClient serviceClient = new
                                                SearchServiceClient(searchServiceName,
                                                                    new SearchCredentials(accesskey));
            var definition = new Index()
            {
                Name   = "homes",
                Fields = FieldBuilder.BuildForType <Home>()
            };

            serviceClient.Indexes.DeleteAsync("homes").Wait();
            serviceClient.Indexes.CreateOrUpdate(definition);

            var homes = new Home[]
            {
                new Home()
                {
                    HomeId        = "133",
                    RetailPrice   = Convert.ToDouble("459999.00"),
                    SquareFootage = 3200,
                    Description   =
                        "Single floor, ranch style on 1 acre of property. 4 bedroom,large living room with open kitchen, dining area.",
                    Location = GeographyPoint.Create(47.678581, -122.131577)
                }
            };
            ISearchIndexClient indexClient = serviceClient.Indexes.GetClient("homes");
            var batch = IndexBatch.Upload(homes);

            indexClient.Documents.Index(batch);

            /*SearchParameters parameters =
             *      new SearchParameters()
             *      {
             *          Select = new[] { "SquareFootage" }
             *      };
             * DocumentSearchResult<Home> searchResults = indexClient.Documents.Search<Home>("3200", parameters);
             *
             * foreach (SearchResult<Home> result in searchResults.Results)
             * {
             *  Console.WriteLine(result.Document);
             * }*/
        }
Esempio n. 15
0
        private static void ImportData(SearchServiceClient serviceClient)
        {
            var hotelsText = File.ReadAllLines(hotelFileName);
            var hotels     = new List <Hotel>();

            for (int i = 1; i < hotelsText.Length; i++)
            {
                var hotelText        = hotelsText[i];
                var hotelTextColumns = hotelText.Split("\t");
                hotels.Add(
                    new Hotel()
                {
                    HotelId            = hotelTextColumns[0],
                    HotelName          = hotelTextColumns[1],
                    Description        = hotelTextColumns[2],
                    DescriptionFr      = hotelTextColumns[3],
                    Category           = hotelTextColumns[4],
                    Tags               = hotelTextColumns[5].Split(","),
                    ParkingIncluded    = hotelTextColumns[6] == "0" ? false : true,
                    SmokingAllowed     = hotelTextColumns[7] == "0" ? false : true,
                    LastRenovationDate = Convert.ToDateTime(hotelTextColumns[8]),
                    BaseRate           = Convert.ToDouble(hotelTextColumns[9]),
                    Rating             = (int)Convert.ToDouble(hotelTextColumns[10])
                });
            } // no error checking because demo code

            var actions = new List <IndexAction <Hotel> >();

            foreach (var hotel in hotels)
            {
                actions.Add(IndexAction.Upload(hotel));
            }

            var batch = IndexBatch.New(actions);

            try
            {
                ISearchIndexClient indexClient = serviceClient.Indexes.GetClient("hotels");
                indexClient.Documents.Index(batch);
            }
            catch (Exception e)
            {
                Console.WriteLine(e.ToString());
            }
        }
Esempio n. 16
0
        protected void TestCanSearchWithDateTimeInStaticModel()
        {
            SearchServiceClient serviceClient = Data.GetSearchServiceClient();

            Index index =
                new Index()
            {
                Name   = TestUtilities.GenerateName(),
                Fields = new[]
                {
                    new Field("ISBN", DataType.String)
                    {
                        IsKey = true
                    },
                    new Field("Title", DataType.String)
                    {
                        IsSearchable = true
                    },
                    new Field("Author", DataType.String),
                    new Field("PublishDate", DataType.DateTimeOffset)
                }
            };

            IndexDefinitionResponse createIndexResponse = serviceClient.Indexes.Create(index);
            SearchIndexClient       indexClient         = Data.GetSearchIndexClient(createIndexResponse.Index.Name);

            var doc1 = new Book()
            {
                ISBN = "123", Title = "Lord of the Rings", Author = "J.R.R. Tolkien"
            };
            var doc2 = new Book()
            {
                ISBN = "456", Title = "War and Peace", PublishDate = new DateTime(2015, 8, 18)
            };
            var batch = IndexBatch.Create(IndexAction.Create(doc1), IndexAction.Create(doc2));

            indexClient.Documents.Index(batch);
            SearchTestUtilities.WaitForIndexing();

            DocumentSearchResponse <Book> response = indexClient.Documents.Search <Book>("War and Peace");

            Assert.Equal(HttpStatusCode.OK, response.StatusCode);
            Assert.Equal(1, response.Results.Count);
            Assert.Equal(doc2, response.Results[0].Document);
        }
Esempio n. 17
0
        public static void PostSave(PostBase post)
        {
            using (var client = CreateClient())
            {
                var indexClient = client.Indexes.GetClient("content");
                var body        = new StringBuilder();

                foreach (var block in post.Blocks)
                {
                    if (block is HtmlBlock htmlBlock)
                    {
                        body.AppendLine(htmlBlock.Body.Value);
                    }
                    else if (block is HtmlColumnBlock columnBlock)
                    {
                        body.AppendLine(columnBlock.Column1.Value);
                        body.AppendLine(columnBlock.Column2.Value);
                    }
                }

                var cleanHtml   = new Regex("<[^>]*(>|$)");
                var cleanSpaces = new Regex("[\\s\\r\\n]+");

                var cleaned = cleanSpaces.Replace(cleanHtml.Replace(body.ToString(), " "), " ").Trim();

                var actions = new IndexAction <Content>[]
                {
                    IndexAction.MergeOrUpload(
                        new Content
                    {
                        Slug        = post.Slug,
                        ContentId   = post.Id.ToString(),
                        ContentType = "post",
                        Title       = post.Title,
                        Category    = post.Category.Title,
                        Tags        = post.Tags.Select(t => t.Title).ToList(),
                        Body        = cleaned
                    }
                        )
                };
                var batch = IndexBatch.New(actions);

                indexClient.Documents.Index(batch);
            }
        }
        /// <summary>
        /// Inserts/Updates a customer
        /// </summary>
        public void UpsertCustomer(Interface.GlobalEnum.IndexerIndexName indexName, Model.Search.SearchCustomerModel searchCustomerModel)
        {
            // only check once per run
            if (!doesIndexExistsCheck.Contains(indexName.ToString().ToLower()))
            {
                CreateIndexIfNotExists(indexName, Interface.GlobalEnum.IndexerRepositoryIndexType.SystemDefined);
                doesIndexExistsCheck.Add(indexName.ToString().ToLower());
            }

            SearchIndexClient indexClient = serviceClient.Indexes.GetClient(indexName.ToString().ToLower());

            // Can be done in batches, but since we are using batching we can do one by one for retries
            List <Model.Search.SearchCustomerModel> itemsToIndex = new List <Model.Search.SearchCustomerModel>();

            itemsToIndex.Add(searchCustomerModel);

            indexClient.Documents.Index(IndexBatch.Create(itemsToIndex.Select(doc => IndexAction.Create(IndexActionType.MergeOrUpload, doc))));
        } // UpsertCustomer
        public void AddOrUpdateLocation(Location location)
        {
            // The lat and long of the position Class need to be reversed to get them stored correctly
            var position    = new Position(location.Longitude, location.Latitude);
            var newLocation = new SearchLocation()
            {
                id    = location.Id,
                Name  = location.Name,
                Point = new Point(position),
                Physical_Addresses = location.Physical_Addresses
            };

            var batch = IndexBatch.Upload(new List <SearchLocation> {
                newLocation
            });

            _searchIndexClient.Documents.Index(batch);
        }
Esempio n. 20
0
        public async Task <bool> DeleteDataAsync(IEnumerable <string> ids, CancellationToken token)
        {
            if (ids == null)
            {
                throw new ArgumentNullException(nameof(ids));
            }
            var batch = IndexBatch.Delete(ids.Select(s => new T
            {
                Id = s
            }));
            var result = await IndexClient.Documents.IndexAsync(batch, cancellationToken : token);

            foreach (var errorResult in result.Results.Where(w => !w.Succeeded))
            {
                _logger.Error($"Failed to process id {errorResult.Key} error {errorResult.ErrorMessage} on index {IndexClient.IndexName} ");
            }
            return(result.Results.Count > 0);
        }
Esempio n. 21
0
 public void UploadDocuments <T>(string indexName, T[] documents)
 {
     try
     {
         ISearchIndexClient indexClient = client.Indexes.GetClient(indexName);
         var actions = new List <IndexAction <T> >();
         for (int i = 0; i < documents.Length; i++)
         {
             actions.Add(IndexAction.Upload(documents[i]));
         }
         var batch = IndexBatch.New(actions);
         indexClient.Documents.Index(batch);
     }
     catch (IndexBatchException e)
     {
         Debug.WriteLine("Failed to index some of the documents: {0}", String.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key)));
     }
 }
Esempio n. 22
0
        public void IndexDoesNotThrowWhenAllActionsSucceed()
        {
            Run(() =>
            {
                SearchIndexClient client = Data.GetSearchIndexClient();

                var batch = IndexBatch.Create(new[] { IndexAction.Create(new Hotel()
                    {
                        HotelId = "1"
                    }) });

                DocumentIndexResponse indexResponse = client.Documents.Index(batch);
                Assert.Equal(HttpStatusCode.OK, indexResponse.StatusCode);

                Assert.Equal(1, indexResponse.Results.Count);
                AssertIndexActionSucceeded("1", indexResponse.Results[0]);
            });
        }
        private static void UploadDocuments(ISearchIndexClient indexClient)
        {
            String value = File.ReadAllText(@"KnowledgeBaseRaw.json");

            RawDataFormat[]       DatabaseObjects = JsonConvert.DeserializeObject <RawDataFormat[]>(value);
            List <DatabaseObject> AzureObjects    = new List <DatabaseObject>();


            foreach (RawDataFormat kbobject in DatabaseObjects)
            {
                DatabaseObject newAzureObject = new DatabaseObject();
                newAzureObject.CreatedBy       = string.Format("{0}:Text==", kbobject.CreatedBy);
                newAzureObject.CreatedDateTime = string.Format("{0}:Text==", kbobject.CreatedDateTime);
                newAzureObject.Description     = string.Format("{0}:Text==", kbobject.Description);
                newAzureObject.FoundedYear     = string.Format("{0}:Text==", kbobject.FoundedYear);
                newAzureObject.HelpDesk        = string.Format("{0}:Link==", kbobject.HelpDesk);
                newAzureObject.HomepageUrl     = string.Format("{0}:Link==", kbobject.HomepageUrl);
                newAzureObject.Id  = string.Format("{0}==", kbobject.Id);
                newAzureObject.Lob = string.Format("{0}==", kbobject.Lob);
                newAzureObject.NumberOfEmployees = string.Format("{0}:Text==", kbobject.NumberOfEmployees);
                newAzureObject.PhoneNumber       = string.Format("{0}:Text==", kbobject.PhoneNumber);
                newAzureObject.TwitterHandle     = string.Format("{0}:Text==", kbobject.TwitterHandle);
                newAzureObject.CombinedBase      = new List <string>();

                foreach (CombinedBase combinedBaseObject in kbobject.CombinedBaseSystem)
                {
                    newAzureObject.CombinedBase.Add(combinedBaseObject.Name + ":" + combinedBaseObject.Value + ":" + combinedBaseObject.ValueType + "==");
                }
                AzureObjects.Add(newAzureObject);
            }

            var batch = IndexBatch.MergeOrUpload(AzureObjects);

            try
            {
                indexClient.Documents.Index(batch);
                Console.WriteLine("Indexed");
            }

            catch (IndexBatchException ex)
            {
            }
            Thread.Sleep(2000);
        }
Esempio n. 24
0
        private static void UploadDocuments(ISearchIndexClient indexClient, string filename, bool isFirstLineHeaders)
        {
            using (var streamReader = File.OpenText(filename))
            {
                var count      = 0;
                var lines      = streamReader.ReadToEnd().Split("\r\n".ToCharArray(), StringSplitOptions.RemoveEmptyEntries);
                var documentos = new List <Documento>();
                foreach (var line in lines)
                {
                    Documento documento = null;
                    if (!isFirstLineHeaders || isFirstLineHeaders && count > 0)
                    {
                        documento = ProcessLine(line);
                    }

                    if (documento != null)
                    {
                        documentos.Add(documento);
                    }
                    count++;
                }

                var batch = IndexBatch.Upload(documentos);

                try
                {
                    indexClient.Documents.Index(batch);
                }
                catch (IndexBatchException e)
                {
                    // Sometimes when your Search service is under load, indexing will fail for some of the documents in
                    // the batch. Depending on your application, you can take compensating actions like delaying and
                    // retrying. For this simple demo, we just log the failed document keys and continue.

                    Console.WriteLine(
                        "Failed to index some of the documents: {0}",
                        String.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key))
                        );
                }

                Console.WriteLine("Waiting for documents to be indexed...\n");
                Thread.Sleep(2000);
            }
        }
Esempio n. 25
0
        public static async Task UploadToAzureSearch(string pageId, string documentName, int pageNumber, string keyPhrases, string text, TraceWriter log)
        {
            // Create the index if it doesn't exist
            if (!_serviceClient.Indexes.Exists(Constants.IndexName))
            {
                var definition = new Index()
                {
                    Name   = Constants.IndexName,
                    Fields = FieldBuilder.BuildForType <DocumentPage>()
                };
                _serviceClient.Indexes.Create(definition);
            }

            ISearchIndexClient indexClient = _serviceClient.Indexes.GetClient(Constants.IndexName);

            var documentPage = new DocumentPage
            {
                pageId       = pageId,
                documentName = documentName,
                pageNumber   = pageNumber,
                keyPhrases   = keyPhrases,
                text         = text
            };

            var actions = new IndexAction <DocumentPage>[]
            {
                IndexAction.MergeOrUpload(documentPage)
            };

            // Pretty small batch! Still fine for this simple demo
            var batch = IndexBatch.New(actions);

            try
            {
                await indexClient.Documents.IndexAsync(batch);
            }
            catch (IndexBatchException e)
            {
                // Sometimes when your Search service is under load, indexing will fail for some of the documents in
                // the batch. Depending on your application, you can take compensating actions like delaying and
                // retrying. For this simple demo, we just log the failed document keys and continue.
                log.Info("Failed to index some of the documents: " + string.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key)));
            }
        }
Esempio n. 26
0
        public void CanIndexWithPascalCaseFields()
        {
            Run(() =>
            {
                SearchServiceClient serviceClient = Data.GetSearchServiceClient();

                Index index =
                    new Index()
                {
                    Name   = TestUtilities.GenerateName(),
                    Fields = new[]
                    {
                        new Field("ISBN", DataType.String)
                        {
                            IsKey = true
                        },
                        new Field("Title", DataType.String),
                        new Field("Author", DataType.String)
                    }
                };

                IndexDefinitionResponse createIndexResponse = serviceClient.Indexes.Create(index);
                Assert.Equal(HttpStatusCode.Created, createIndexResponse.StatusCode);

                SearchIndexClient indexClient = Data.GetSearchIndexClient(createIndexResponse.Index.Name);

                var batch =
                    IndexBatch.Create(
                        new[]
                {
                    IndexAction.Create(
                        new Book()
                    {
                        ISBN = "123", Title = "Lord of the Rings", Author = "J.R.R. Tolkien"
                    })
                });

                DocumentIndexResponse indexResponse = indexClient.Documents.Index(batch);
                Assert.Equal(HttpStatusCode.OK, indexResponse.StatusCode);

                Assert.Equal(1, indexResponse.Results.Count);
                AssertIndexActionSucceeded("123", indexResponse.Results[0]);
            });
        }
        private async Task <DocumentIndexResult> IndexBatchWithRetryAsync <T>(Func <T, IndexAction <T> > indexActionFunc, IEnumerable <T> documents, string indexName) where T : SearchDocument
        {
            const int retryCount      = 2;
            var       documentsArray  = documents.ToArray();
            var       documentsChunks = documentsArray.Chunk(BatchMaximumSize);
            var       indexBatches    = documentsChunks.Select(chunk => IndexBatch.New(chunk.Select(indexActionFunc)));
            var       indexingResults = new List <IndexingResult>();

            foreach (var indexBatch in indexBatches)
            {
                var policy = Policy.Handle <IndexBatchException>().WaitAndRetryAsync(retryCount,
                                                                                     retryAttempt => TimeSpan.FromSeconds(retryAttempt),
                                                                                     async(exception, span) =>
                {
                    var indexBatchException = (IndexBatchException)exception;
                    var itemsToRetry        = indexBatchException.FindFailedActionsToRetry(indexBatch, d => d.Id);

                    var now = DateTimeOffset.UtcNow;
                    Array.ForEach(documentsArray, d => d.IndexedAt = now);

                    await ExecuteIndexBatchAsync(itemsToRetry, indexName);
                });

                try
                {
                    var indexBatchResult = await policy.ExecuteAsync(async() =>
                    {
                        var now = DateTimeOffset.UtcNow;

                        Array.ForEach(documentsArray, d => d.IndexedAt = now);

                        return(await ExecuteIndexBatchAsync(indexBatch, indexName));
                    });

                    indexingResults.AddRange(indexBatchResult.Results);
                }
                catch (Exception e)
                {
                    _logger.Log(Level.Error, "Exception when indexing content", e);
                }
            }

            return(new DocumentIndexResult(indexingResults));
        }
Esempio n. 28
0
        protected void TestNullCannotBeConvertedToValueType()
        {
            SearchServiceClient serviceClient = Data.GetSearchServiceClient();

            var index = new Index()
            {
                Name   = SearchTestUtilities.GenerateName(),
                Fields = FieldBuilder.BuildForType <ModelWithNullableValueTypes>()
            };

            serviceClient.Indexes.Create(index);
            SearchIndexClient indexClient = Data.GetSearchIndexClient(index.Name);

            var batch =
                IndexBatch.Upload(new[]
            {
                new ModelWithNullableValueTypes()
                {
                    Key      = "123",
                    IntValue = null,
                    Bucket   = new Bucket()
                    {
                        BucketName = "Z", Count = 1
                    }
                },
                new ModelWithNullableValueTypes()
                {
                    Key      = "456",
                    IntValue = 5,
                    Bucket   = null
                }
            });

            indexClient.Documents.Index(batch);
            SearchTestUtilities.WaitForIndexing();

            SerializationException e = Assert.Throws <SerializationException>(() => indexClient.Documents.Search <ModelWithValueTypes>("123"));

            Assert.Contains("Error converting value {null} to type 'System.Int32'. Path 'IntValue'.", e.ToString());

            e = Assert.Throws <SerializationException>(() => indexClient.Documents.Search <ModelWithValueTypes>("456"));
            Assert.Contains("Error converting value {null} to type 'Microsoft.Azure.Search.Tests.SearchTests+Bucket'. Path 'Bucket'.", e.ToString());
        }
Esempio n. 29
0
        /// <summary>
        /// Creates the asynchronous.
        /// </summary>
        /// <typeparam name="T">The type T</typeparam>
        /// <param name="indexName">Name of the index.</param>
        /// <param name="documents">The documents.</param>
        /// <returns>
        /// Returns Task of bool
        /// </returns>
        /// <exception cref="Exception">Failed to create some documents</exception>
        public async Task <bool> CreateAsync <T>(string indexName, IEnumerable <T> documents)
            where T : class
        {
            try
            {
                using (var serviceClient = this.GetAdminSearchServiceClient())
                {
                    var client = serviceClient.Indexes.GetClient(indexName);
                    var btch   = IndexBatch.Upload <T>(documents);
                    await client.Documents.IndexAsync(btch).ConfigureAwait(false);

                    return(true);
                }
            }
            catch (IndexBatchException ex)
            {
                return(false);
            }
        }
Esempio n. 30
0
        private static void UploadtoIndex(string p_indexName, SearchServiceClient p_serviceClient)
        {
            var l_customer = new customer[]
            {
                new customer()
                {
                    Id       = "1",
                    Name     = "userA",
                    Progress = "20",
                    Comment  = "The couse is good",
                    Course   = "AZ-203 Developing Solutions for Microsoft Azure"
                },
                new customer()
                {
                    Id       = "2",
                    Name     = "userB",
                    Progress = "40",
                    Comment  = "The couse really has a lot of good aspects",
                    Course   = "AZ-103 Microsoft Azure Administrator"
                },
                new customer()
                {
                    Id       = "3",
                    Name     = "userB",
                    Progress = "15",
                    Comment  = "The couse needs improvement",
                    Course   = "AZ-203 Developing Solutions for Microsoft Azure"
                }
            };
            var l_batch = IndexBatch.Upload(l_customer);
            ISearchIndexClient p_indexClient = p_serviceClient.Indexes.GetClient(p_indexName);

            try
            {
                Console.WriteLine("Uploading documents");
                p_indexClient.Documents.Index(l_batch);
                Console.WriteLine("All documents uploaded");
            }
            catch (IndexBatchException e)
            {
                Console.WriteLine(e.Message);
            }
        }
Esempio n. 31
0
        static void Main(string[] args)
        {
            string searchServiceName = args[0];
            var credentials = new SearchCredentials(args[1]);
            var searchClient = new SearchServiceClient(searchServiceName, credentials);
            try
            {
                IndexDefinitionResponse getResponse = searchClient.Indexes.Get(IndexName);
                if (getResponse?.Index != null)
                {
                    Console.WriteLine("Deleting and recreating index " + IndexName);
                    searchClient.Indexes.Delete(IndexName);
                }
            }
            catch (CloudException)
            {
                // We expect this if the index does not yet exist.
            }

            IndexDefinitionResponse createIndexResponse = searchClient.Indexes.Create(new Index(
                IndexName,
                new[]
                {
                    new Field("ItemId", DataType.String) { IsKey = true },
                    new Field("Title", DataType.String) { IsSearchable = true },
                    new Field("Content", DataType.String) { IsSearchable = true },
                    new Field("CommentThreadId", DataType.Int32),
                    new Field("TimelineEntryId", DataType.Int32),
                    new Field("MediaAlbumId", DataType.Int32),
                    new Field("UserMediaId", DataType.Int32)
                }));

            Index index = createIndexResponse.Index;
            var indexClient = new SearchIndexClient(searchServiceName, IndexName, credentials);
            using (var dbContext = new ApplicationDbContext(args[2]))
            {
                IEnumerable<TimelineEntry> timelineEntries = dbContext.TimelineEntries
                    .Include(te => te.Message)
                    .Include(te => te.CommentThread.Comments.Select(c => c.Text));

                foreach (TimelineEntry entry in timelineEntries)
                {
                    var batchActions = new List<IndexAction<MessageIndexEntry>>();

                    batchActions.Add(new IndexAction<MessageIndexEntry>(
                        IndexActionType.Upload,
                        new MessageIndexEntry
                        {
                            ItemId = "timeline-" + entry.TimelineEntryId,
                            Content = entry.Message.Content,
                            TimelineEntryId = entry.TimelineEntryId
                        }));

                    if (entry.CommentThread != null)
                    {
                        foreach (Comment comment in entry.CommentThread.Comments)
                        {
                            batchActions.Add(new IndexAction<MessageIndexEntry>(
                                IndexActionType.Upload,
                                new MessageIndexEntry
                                {
                                    ItemId = "comment-" + comment.CommentId,
                                    Content = comment.Text.Content,
                                    TimelineEntryId = entry.TimelineEntryId,
                                    CommentThreadId = comment.CommentThreadId
                                }));
                        }
                    }
                    var batch = new IndexBatch<MessageIndexEntry>(batchActions);
                    DocumentIndexResponse indexDocumentsResponse = indexClient.Documents.Index(batch);
                }

                IEnumerable<MediaAlbum> albums = dbContext.MediaAlbums
                    .Include(a => a.CommentThread.Comments.Select(c => c.Text));

                foreach (MediaAlbum album in albums)
                {
                    var batchActions = new List<IndexAction<MessageIndexEntry>>();

                    batchActions.Add(new IndexAction<MessageIndexEntry>(
                        IndexActionType.Upload,
                        new MessageIndexEntry
                        {
                            ItemId = "album-" + album.MediaAlbumId,
                            Title = album.Title,
                            Content = album.Description,
                            MediaAlbumId = album.MediaAlbumId
                        }));

                    if (album.CommentThread != null)
                    {
                        foreach (Comment comment in album.CommentThread.Comments)
                        {
                            batchActions.Add(new IndexAction<MessageIndexEntry>(
                                IndexActionType.Upload,
                                new MessageIndexEntry
                                {
                                    ItemId = "comment-" + comment.CommentId,
                                    Content = comment.Text.Content,
                                    MediaAlbumId = album.MediaAlbumId,
                                    CommentThreadId = comment.CommentThreadId
                                }));
                        }
                    }
                    var batch = new IndexBatch<MessageIndexEntry>(batchActions);
                    DocumentIndexResponse indexDocumentsResponse = indexClient.Documents.Index(batch);
                }


                IEnumerable<UserMedia> medias = dbContext.UserMedias
                    .Include(m => m.Description)
                    .Include(m => m.CommentThread.Comments.Select(c => c.Text));

                foreach (UserMedia media in medias)
                {
                    var batchActions = new List<IndexAction<MessageIndexEntry>>();

                    batchActions.Add(new IndexAction<MessageIndexEntry>(
                        IndexActionType.Upload,
                        new MessageIndexEntry
                        {
                            ItemId = "media-" + media.UserMediaId,
                            Title = media.Title,
                            Content = media.Description?.Content,
                            UserMediaId = media.UserMediaId,
                            MediaAlbumId = media.MediaAlbumId
                        }));

                    if (media.CommentThread != null)
                    {
                        foreach (Comment comment in media.CommentThread.Comments)
                        {
                            batchActions.Add(new IndexAction<MessageIndexEntry>(
                                IndexActionType.Upload,
                                new MessageIndexEntry
                                {
                                    ItemId = "comment-" + comment.CommentId,
                                    Content = comment.Text.Content,
                                    UserMediaId = media.UserMediaId,
                                    MediaAlbumId = media.MediaAlbumId,
                                    CommentThreadId = comment.CommentThreadId
                                }));
                        }
                    }
                    var batch = new IndexBatch<MessageIndexEntry>(batchActions);
                    DocumentIndexResponse indexDocumentsResponse = indexClient.Documents.Index(batch);
                }
            }
        }