public async Task <string> CreateIndexAndIndexerAsync() { // Create or Update the data source SearchIndexerDataSourceConnection dataSource = CreateOrUpdateDataSource(_indexerClient); // Create the skills OcrSkill ocrSkill = CreateOcrSkill(); MergeSkill mergeSkill = CreateMergeSkill(); EntityRecognitionSkill entityRecognitionSkill = CreateEntityRecognitionSkill(); LanguageDetectionSkill languageDetectionSkill = CreateLanguageDetectionSkill(); SplitSkill splitSkill = CreateSplitSkill(); KeyPhraseExtractionSkill keyPhraseExtractionSkill = CreateKeyPhraseExtractionSkill(); // Create the skillset List <SearchIndexerSkill> skills = new List <SearchIndexerSkill>(); skills.Add(ocrSkill); skills.Add(mergeSkill); skills.Add(languageDetectionSkill); skills.Add(splitSkill); skills.Add(entityRecognitionSkill); skills.Add(keyPhraseExtractionSkill); SearchIndexerSkillset skillset = CreateOrUpdateDemoSkillSet(_indexerClient, skills, _cognitiveServicesKey); // Create the index SearchIndex demoIndex = await CreateDemoIndexAsync(_indexClient); // Create the indexer, map fields, and execute transformations SearchIndexer demoIndexer = await CreateDemoIndexerAsync(_indexerClient, dataSource, skillset, demoIndex); // Check indexer overall status return(await CheckIndexerOverallStatusAsync(_indexerClient, demoIndexer.Name)); }
/// <summary> /// Creates a new data source or updates an existing data source connection. /// </summary> /// <param name="dataSourceConnection">Required. The <see cref="SearchIndexerDataSourceConnection"/> to create or update.</param> /// <param name="onlyIfUnchanged"> /// True to throw a <see cref="RequestFailedException"/> if the <see cref="SearchIndexerDataSourceConnection.ETag"/> does not match the current service version; /// otherwise, the current service version will be overwritten. /// </param> /// <param name="ignoreCacheResetRequirements"><c>True</c> if the cache reset requirements should be ignored.</param> /// <param name="cancellationToken">Optional <see cref="CancellationToken"/> to propagate notifications that the operation should be canceled.</param> /// <returns> /// The <see cref="Response{T}"/> from the server containing the <see cref="SearchIndexerDataSourceConnection"/> that was created. /// This may differ slightly from what was passed in since the service may return back properties set to their default values. /// </returns> /// <exception cref="ArgumentNullException">Thrown when <paramref name="dataSourceConnection"/> is null.</exception> /// <exception cref="RequestFailedException">Thrown when a failure is returned by the Search service.</exception> public virtual async Task <Response <SearchIndexerDataSourceConnection> > CreateOrUpdateDataSourceConnectionAsync( SearchIndexerDataSourceConnection dataSourceConnection, bool onlyIfUnchanged = false, bool?ignoreCacheResetRequirements = null, CancellationToken cancellationToken = default) { // The REST client uses a different parameter name that would be confusing to reference. Argument.AssertNotNull(dataSourceConnection, nameof(dataSourceConnection)); using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(SearchIndexerClient)}.{nameof(CreateOrUpdateDataSourceConnection)}"); scope.Start(); try { return(await DataSourcesClient.CreateOrUpdateAsync( dataSourceConnection?.Name, dataSourceConnection, onlyIfUnchanged?dataSourceConnection?.ETag?.ToString() : null, null, ignoreCacheResetRequirements, cancellationToken) .ConfigureAwait(false)); } catch (Exception ex) { scope.Failed(ex); throw; } }
private async Task CreateAndRunBlobIndexerAsync(CancellationToken cancellationToken) { SearchIndexerDataSourceConnection blobDataSource = new SearchIndexerDataSourceConnection( name: _azureBlobOptions.Value.Name, type: SearchIndexerDataSourceType.AzureBlob, connectionString: _azureBlobOptions.Value.ConnectionString, container: _searchIndexerDataContainer); await _searchIndexerClient.CreateOrUpdateDataSourceConnectionAsync(blobDataSource, false, cancellationToken); try { await _searchIndexerClient.GetIndexerAsync(_azureBlobIndexer.Name); await _searchIndexerClient.ResetIndexerAsync(_azureBlobIndexer.Name); } catch (RequestFailedException ex) when(ex.Status == 404) { } await _searchIndexerClient.CreateOrUpdateIndexerAsync(_azureBlobIndexer); try { // Run the indexer. await _searchIndexerClient.RunIndexerAsync(_azureBlobIndexer.Name, cancellationToken); } catch (CloudException e) when(e.Response.StatusCode == (HttpStatusCode)429) { Console.WriteLine("Failed to run indexer: {0}", e.Response.Content); } }
public static async Task <SearchIndexerDataSourceConnection> CreateOrUpdateAzureBlobDataSourceAsync(SearchIndexerClient indexerClient, string blobConnectionString, string indexName, string blobContainerName) { SearchIndexerDataSourceConnection dataSource = new SearchIndexerDataSourceConnection( name: indexName + "-azureblobdatasource", type: SearchIndexerDataSourceType.AzureBlob, connectionString: blobConnectionString, container: new SearchIndexerDataContainer(blobContainerName)) { Description = "Files to demonstrate cognitive search capabilities." }; // The data source does not need to be deleted if it was already created // since we are using the CreateOrUpdate method try { await indexerClient.CreateOrUpdateDataSourceConnectionAsync(dataSource); } catch (Exception ex) { Console.WriteLine("Failed to create or update the data source\n Exception message: {0}\n", ex.Message); ExitProgram("Cannot continue without a data source"); } return(dataSource); }
internal HttpMessage CreateCreateOrUpdateRequest(string dataSourceName, SearchIndexerDataSourceConnection dataSource, string ifMatch, string ifNoneMatch, bool?skipIndexerResetRequirementForCache) { var message = _pipeline.CreateMessage(); var request = message.Request; request.Method = RequestMethod.Put; var uri = new RawRequestUriBuilder(); uri.AppendRaw(_endpoint, false); uri.AppendPath("/datasources('", false); uri.AppendPath(dataSourceName, true); uri.AppendPath("')", false); uri.AppendQuery("api-version", _apiVersion, true); if (skipIndexerResetRequirementForCache != null) { uri.AppendQuery("ignoreResetRequirements", skipIndexerResetRequirementForCache.Value, true); } request.Uri = uri; if (ifMatch != null) { request.Headers.Add("If-Match", ifMatch); } if (ifNoneMatch != null) { request.Headers.Add("If-None-Match", ifNoneMatch); } request.Headers.Add("Prefer", "return=representation"); request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); request.Headers.Add("Content-Type", "application/json"); var content = new Utf8JsonRequestContent(); content.JsonWriter.WriteObjectValue(dataSource); request.Content = content; return(message); }
public async Task <Response <SearchIndexerDataSourceConnection> > CreateOrUpdateAsync(string dataSourceName, SearchIndexerDataSourceConnection dataSource, string ifMatch = null, string ifNoneMatch = null, bool?skipIndexerResetRequirementForCache = null, CancellationToken cancellationToken = default) { if (dataSourceName == null) { throw new ArgumentNullException(nameof(dataSourceName)); } if (dataSource == null) { throw new ArgumentNullException(nameof(dataSource)); } using var message = CreateCreateOrUpdateRequest(dataSourceName, dataSource, ifMatch, ifNoneMatch, skipIndexerResetRequirementForCache); await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); switch (message.Response.Status) { case 200: case 201: { SearchIndexerDataSourceConnection value = default; using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, default, cancellationToken).ConfigureAwait(false); value = SearchIndexerDataSourceConnection.DeserializeSearchIndexerDataSourceConnection(document.RootElement); return(Response.FromValue(value, message.Response)); }
private static SearchIndexerDataSourceConnection CreateOrUpdateDataSource(SearchIndexerClient indexerClient, IConfigurationRoot configuration) { SearchIndexerDataSourceConnection dataSource = new SearchIndexerDataSourceConnection( name: "demodata", type: SearchIndexerDataSourceType.AzureBlob, connectionString: configuration["AzureBlobConnectionString"], container: new SearchIndexerDataContainer("cog-search-demo")) { Description = "Demo files to demonstrate cognitive search capabilities." }; // The data source does not need to be deleted if it was already created // since we are using the CreateOrUpdate method try { indexerClient.CreateOrUpdateDataSourceConnection(dataSource); } catch (Exception ex) { Console.WriteLine("Failed to create or update the data source\n Exception message: {0}\n", ex.Message); ExitProgram("Cannot continue without a data source"); } return(dataSource); }
private static async Task CreateAndRunCosmosDbIndexerAsync(string indexName, SearchIndexerClient indexerClient) { // Append the database name to the connection string string cosmosConnectString = configuration["CosmosDBConnectionString"] + ";Database=" + configuration["CosmosDBDatabaseName"]; SearchIndexerDataSourceConnection cosmosDbDataSource = new SearchIndexerDataSourceConnection( name: configuration["CosmosDBDatabaseName"], type: SearchIndexerDataSourceType.CosmosDb, connectionString: cosmosConnectString, container: new SearchIndexerDataContainer("hotels")); // The Cosmos DB data source does not need to be deleted if it already exists, // but the connection string might need to be updated if it has changed. await indexerClient.CreateOrUpdateDataSourceConnectionAsync(cosmosDbDataSource); Console.WriteLine("Creating Cosmos DB indexer...\n"); SearchIndexer cosmosDbIndexer = new SearchIndexer( name: "hotel-rooms-cosmos-indexer", dataSourceName: cosmosDbDataSource.Name, targetIndexName: indexName) { Schedule = new IndexingSchedule(TimeSpan.FromDays(1)) }; // Indexers keep metadata about how much they have already indexed. // If we already ran this sample, the indexer will remember that it already // indexed the sample data and not run again. // To avoid this, reset the indexer if it exists. try { await indexerClient.GetIndexerAsync(cosmosDbIndexer.Name); //Rest the indexer if it exsits. await indexerClient.ResetIndexerAsync(cosmosDbIndexer.Name); } catch (RequestFailedException ex) when(ex.Status == 404) { //if the specified indexer not exist, 404 will be thrown. } await indexerClient.CreateOrUpdateIndexerAsync(cosmosDbIndexer); Console.WriteLine("Running Cosmos DB indexer...\n"); try { await indexerClient.RunIndexerAsync(cosmosDbIndexer.Name); } catch (RequestFailedException ex) when(ex.Status == 429) { Console.WriteLine("Failed to run indexer: {0}", ex.Message); } }
#pragma warning disable AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. public virtual Response <SearchIndexerDataSourceConnection> CreateOrUpdateDataSourceConnection( #pragma warning restore AZC0002 // DO ensure all service methods, both asynchronous and synchronous, take an optional CancellationToken parameter called cancellationToken. SearchIndexerDataSourceConnection dataSourceConnection, bool onlyIfUnchanged, CancellationToken cancellationToken) => CreateOrUpdateDataSourceConnection( dataSourceConnection, onlyIfUnchanged, ignoreCacheResetRequirements: null, cancellationToken);
private static async Task CreateAndRunSQLIndexerAsync(string indexName, SearchIndexerClient indexerClient) { SearchIndexerDataSourceConnection sqlDataSource = new SearchIndexerDataSourceConnection( name: configuration["SQLDatabaseName"], type: SearchIndexerDataSourceType.AzureSql, connectionString: configuration["SQLConnectSctring"], container: new SearchIndexerDataContainer("books")); // The data source does not need to be deleted if it already exists, // but the connection string might need to be updated if it has changed. await indexerClient.CreateOrUpdateDataSourceConnectionAsync(sqlDataSource); Console.WriteLine("Creating SQL indexer...\n"); SearchIndexer sqlIndexer = new SearchIndexer( name: "books-indexer", dataSourceName: sqlDataSource.Name, targetIndexName: indexName) { //here you can set the desired schedule for indexing repetitions Schedule = new IndexingSchedule(TimeSpan.FromDays(1)) }; // Indexers keep metadata about how much they have already indexed. // If we already ran this sample, the indexer will remember that it already // indexed the sample data and not run again. // To avoid this, reset the indexer if it exists. try { await indexerClient.GetIndexerAsync(sqlIndexer.Name); //Rest the indexer if it exsits. await indexerClient.ResetIndexerAsync(sqlIndexer.Name); } catch (RequestFailedException ex) when(ex.Status == 404) { //if the specified indexer not exist, 404 will be thrown. } await indexerClient.CreateOrUpdateIndexerAsync(sqlIndexer); Console.WriteLine("Running SQL indexer...\n"); try { await indexerClient.RunIndexerAsync(sqlIndexer.Name); } catch (RequestFailedException ex) when(ex.Status == 429) { Console.WriteLine("Failed to run sql indexer: {0}", ex.Message); } }
public static void Main(string[] args) { // Create service client IConfigurationBuilder builder = new ConfigurationBuilder().AddJsonFile("appsettings.json"); IConfigurationRoot configuration = builder.Build(); string searchServiceUri = configuration["SearchServiceUri"]; string adminApiKey = configuration["SearchServiceAdminApiKey"]; string cognitiveServicesKey = configuration["CognitiveServicesKey"]; SearchIndexClient indexClient = new SearchIndexClient(new Uri(searchServiceUri), new AzureKeyCredential(adminApiKey)); SearchIndexerClient indexerClient = new SearchIndexerClient(new Uri(searchServiceUri), new AzureKeyCredential(adminApiKey)); // Create or Update the data source Console.WriteLine("Creating or updating the data source..."); SearchIndexerDataSourceConnection dataSource = CreateOrUpdateDataSource(indexerClient, configuration); // Create the skills Console.WriteLine("Creating the skills..."); OcrSkill ocrSkill = CreateOcrSkill(); MergeSkill mergeSkill = CreateMergeSkill(); EntityRecognitionSkill entityRecognitionSkill = CreateEntityRecognitionSkill(); LanguageDetectionSkill languageDetectionSkill = CreateLanguageDetectionSkill(); SplitSkill splitSkill = CreateSplitSkill(); KeyPhraseExtractionSkill keyPhraseExtractionSkill = CreateKeyPhraseExtractionSkill(); // Create the skillset Console.WriteLine("Creating or updating the skillset..."); List <SearchIndexerSkill> skills = new List <SearchIndexerSkill>(); skills.Add(ocrSkill); skills.Add(mergeSkill); skills.Add(languageDetectionSkill); skills.Add(splitSkill); skills.Add(entityRecognitionSkill); skills.Add(keyPhraseExtractionSkill); SearchIndexerSkillset skillset = CreateOrUpdateDemoSkillSet(indexerClient, skills, cognitiveServicesKey); // Create the index Console.WriteLine("Creating the index..."); SearchIndex demoIndex = CreateDemoIndex(indexClient); // Create the indexer, map fields, and execute transformations Console.WriteLine("Creating the indexer and executing the pipeline..."); SearchIndexer demoIndexer = CreateDemoIndexer(indexerClient, dataSource, skillset, demoIndex); // Check indexer overall status Console.WriteLine("Check the indexer overall status..."); CheckIndexerOverallStatus(indexerClient, demoIndexer); }
/// <summary> /// Deletes a data source connection. /// </summary> /// <param name="dataSourceConnection">The <see cref="SearchIndexerDataSourceConnection"/> to delete.</param> /// <param name="onlyIfUnchanged"> /// True to throw a <see cref="RequestFailedException"/> if the <see cref="SearchIndexerDataSourceConnection.ETag"/> does not match the current service version; /// otherwise, the current service version will be overwritten. /// </param> /// <param name="cancellationToken">Optional <see cref="CancellationToken"/> to propagate notifications that the operation should be canceled.</param> /// <returns>The <see cref="Response"/> from the server.</returns> /// <exception cref="ArgumentNullException">Thrown when <paramref name="dataSourceConnection"/> is null.</exception> /// <exception cref="RequestFailedException">Thrown when a failure is returned by the Search service.</exception> public virtual Response DeleteDataSourceConnection( SearchIndexerDataSourceConnection dataSourceConnection, bool onlyIfUnchanged = false, CancellationToken cancellationToken = default) { // The REST client uses a different parameter name that would be confusing to reference. Argument.AssertNotNull(dataSourceConnection, nameof(dataSourceConnection)); return(DeleteDataSourceConnection( dataSourceConnection?.Name, dataSourceConnection?.ETag, onlyIfUnchanged, cancellationToken)); }
public async Task CreateDataSourceConnectionAsync(string dataSourceName, string containerName, string connectionString) { try { SearchIndexerDataContainer searchIndexerDataContainer = new SearchIndexerDataContainer(containerName); SearchIndexerDataSourceConnection searchIndexerDataSourceConnection = new SearchIndexerDataSourceConnection( dataSourceName, SearchIndexerDataSourceType.AzureBlob, connectionString, searchIndexerDataContainer); await _searchIndexerClient.CreateOrUpdateDataSourceConnectionAsync(searchIndexerDataSourceConnection); } catch (Exception) { throw new Exception("Cognitive Search APIs -> Failed To Create Data Source Connection!"); } }
private static async Task <bool> CreateDataSource() { Console.WriteLine("Creating Data Source..."); try { SearchIndexerDataSourceConnection dataSource = SearchResources.GetDataSource(DataSourceName); await _searchIndexerClient.CreateDataSourceConnectionAsync(dataSource); } catch (Exception ex) { if (DebugMode) { Console.WriteLine("Error creating data source: {0}", ex.Message); } return(false); } return(true); }
/// <summary> /// Creates a new data source connection. /// </summary> /// <param name="dataSourceConnection">Required. The <see cref="SearchIndexerDataSourceConnection"/> to create.</param> /// <param name="cancellationToken">Optional <see cref="CancellationToken"/> to propagate notifications that the operation should be canceled.</param> /// <returns> /// The <see cref="Response{T}"/> from the server containing the <see cref="SearchIndexerDataSourceConnection"/> that was created. /// This may differ slightly from what was passed in since the service may return back properties set to their default values. /// </returns> /// <exception cref="ArgumentNullException">Thrown when <paramref name="dataSourceConnection"/> is null.</exception> /// <exception cref="RequestFailedException">Thrown when a failure is returned by the Search service.</exception> public virtual Response <SearchIndexerDataSourceConnection> CreateDataSourceConnection( SearchIndexerDataSourceConnection dataSourceConnection, CancellationToken cancellationToken = default) { // The REST client uses a different parameter name that would be confusing to reference. Argument.AssertNotNull(dataSourceConnection, nameof(dataSourceConnection)); using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(SearchIndexerClient)}.{nameof(CreateDataSourceConnection)}"); scope.Start(); try { return(DataSourcesClient.Create( dataSourceConnection, cancellationToken)); } catch (Exception ex) { scope.Failed(ex); throw; } }
private SearchIndexerDataSourceConnection CreateOrUpdateDataSource(SearchIndexerClient indexerClient) { SearchIndexerDataSourceConnection dataSource = new SearchIndexerDataSourceConnection( name: "demodata", type: SearchIndexerDataSourceType.AzureBlob, connectionString: _storageConnectionString, container: new SearchIndexerDataContainer(_containerName)) { Description = "Demo files to demonstrate cognitive search capabilities." }; // The data source does not need to be deleted if it was already created // since we are using the CreateOrUpdate method try { indexerClient.CreateOrUpdateDataSourceConnection(dataSource); } catch (Exception ex) { throw new Exception("Failed to create or update the data source", ex); } return(dataSource); }
private static SearchIndexerDataSourceConnection CreateOrUpdateDataSource(SearchIndexerClient indexerClient, IConfigurationRoot configuration) { SearchIndexerDataSourceConnection dataSource = new SearchIndexerDataSourceConnection( name: "demodata", type: SearchIndexerDataSourceType.AzureBlob, connectionString: configuration["AzureBlobConnectionString"], container: new SearchIndexerDataContainer(configuration["ContainerName"])) { Description = "Demo files to demonstrate cognitive search capabilities." }; try { indexerClient.CreateOrUpdateDataSourceConnection(dataSource); Console.WriteLine("Data source successfully created"); } catch (Exception ex) { Console.WriteLine("Failed to create or update the data source\n Exception message: {0}\n", ex.Message); ExitProgram("Cannot continue without a data source"); } return(dataSource); }
public async Task CreateDoubleEncryptedIndex() { string keyVaultUrl = TestEnvironment.KeyVaultUrl; if (string.IsNullOrEmpty(keyVaultUrl)) { Assert.Ignore("A Key Vault was not deployed"); } // Create the key and persist the name and version. KeyVaultKey key = await CreateEncryptionKey(keyVaultUrl); Environment.SetEnvironmentVariable("KEYVAULT_URL", keyVaultUrl); Environment.SetEnvironmentVariable("KEYVAULT_KEY_NAME", key.Name); Environment.SetEnvironmentVariable("KEYVAULT_KEY_VERSION", key.Properties.Version); // Persist the service principal. Environment.SetEnvironmentVariable("APPLICATION_ID", TestEnvironment.ClientId); Environment.SetEnvironmentVariable("APPLICATION_SECRET", TestEnvironment.RecordedClientSecret); // Create the blob container and persist connection information. await using SearchResources resources = await SearchResources.CreateWithBlobStorageAndIndexAsync(this, populate : true); Environment.SetEnvironmentVariable("STORAGE_CONNECTION_STRING", resources.StorageAccountConnectionString); Environment.SetEnvironmentVariable("STORAGE_CONTAINER_NAME", resources.BlobContainerName); Environment.SetEnvironmentVariable("SEARCH_ENDPOINT", resources.Endpoint.ToString()); Environment.SetEnvironmentVariable("SEARCH_API_KEY", resources.PrimaryApiKey); // Define clean up tasks to be invoked in reverse order added. Stack <Func <Task> > cleanUpTasks = new Stack <Func <Task> >(); try { #region Snippet:Azure_Search_Tests_Sample06_EncryptedIndex_CreateDoubleEncryptedIndex_Index // Create a credential to connect to Key Vault and use a specific key version created previously. SearchResourceEncryptionKey encryptionKey = new SearchResourceEncryptionKey( new Uri(Environment.GetEnvironmentVariable("KEYVAULT_URL")), Environment.GetEnvironmentVariable("KEYVAULT_KEY_NAME"), Environment.GetEnvironmentVariable("KEYVAULT_KEY_VERSION")) { ApplicationId = Environment.GetEnvironmentVariable("APPLICATION_ID"), ApplicationSecret = Environment.GetEnvironmentVariable("APPLICATION_SECRET"), }; // Create a connection to our storage blob container using the credential. string dataSourceConnectionName = "hotels-data-source"; #if !SNIPPET dataSourceConnectionName = Recording.Random.GetName(); #endif SearchIndexerDataSourceConnection dataSourceConnection = new SearchIndexerDataSourceConnection( dataSourceConnectionName, SearchIndexerDataSourceType.AzureBlob, Environment.GetEnvironmentVariable("STORAGE_CONNECTION_STRING"), new SearchIndexerDataContainer( Environment.GetEnvironmentVariable("STORAGE_CONTAINER_NAME") ) ) { EncryptionKey = encryptionKey }; // Create an indexer to process documents from the blob container into the index. // You can optionally configure a skillset to use cognitive services when processing documents. // Set the SearchIndexerSkillset.EncryptionKey to the same credential if you use a skillset. string indexName = "hotels"; string indexerName = "hotels-indexer"; #if !SNIPPET indexName = resources.IndexName; indexerName = Recording.Random.GetName(); #endif SearchIndexer indexer = new SearchIndexer( indexerName, dataSourceConnectionName, indexName) { EncryptionKey = encryptionKey, // Map the fields in our documents we want to index. FieldMappings = { new FieldMapping("hotelId"), new FieldMapping("hotelName"), new FieldMapping("description"), new FieldMapping("tags"), new FieldMapping("address") }, Parameters = new IndexingParameters { // Tell the indexer to parse each blob as a separate JSON document. IndexingParametersConfiguration = new IndexingParametersConfiguration { ParsingMode = BlobIndexerParsingMode.Json } } }; // Now connect to our Search service and set up the data source and indexer. // Documents already in the storage blob will begin indexing immediately. Uri endpoint = new Uri(Environment.GetEnvironmentVariable("SEARCH_ENDPOINT")); AzureKeyCredential credential = new AzureKeyCredential( Environment.GetEnvironmentVariable("SEARCH_API_KEY")); SearchIndexerClient indexerClient = new SearchIndexerClient(endpoint, credential); #if !SNIPPET indexerClient = resources.GetIndexerClient(); #endif indexerClient.CreateDataSourceConnection(dataSourceConnection); #if !SNIPPET cleanUpTasks.Push(() => indexerClient.DeleteDataSourceConnectionAsync(dataSourceConnectionName)); #endif indexerClient.CreateIndexer(indexer); #if !SNIPPET cleanUpTasks.Push(() => indexerClient.DeleteIndexerAsync(indexerName)); #endif #endregion Snippet:Azure_Search_Tests_Sample06_EncryptedIndex_CreateDoubleEncryptedIndex_Index await WaitForIndexingAsync(indexerClient, indexerName); #region Snippet:Azure_Search_Tests_Sample06_EncryptedIndex_CreateDoubleEncryptedIndex_Query // Create a SearchClient and search for luxury hotels. In production, be sure to use the query key. SearchClient searchClient = new SearchClient(endpoint, "hotels", credential); #if !SNIPPET searchClient = resources.GetSearchClient(); bool found = false; #endif Response <SearchResults <Hotel> > results = searchClient.Search <Hotel>("luxury hotels"); foreach (SearchResult <Hotel> result in results.Value.GetResults()) { Hotel hotel = result.Document; #if !SNIPPET found = true; #endif Console.WriteLine($"{hotel.HotelName} ({hotel.HotelId})"); Console.WriteLine($" Description: {hotel.Description}"); } #endregion Snippet:Azure_Search_Tests_Sample06_EncryptedIndex_CreateDoubleEncryptedIndex_Query Assert.IsTrue(found, "No luxury hotels were found in index"); } finally { // We want to await these individual to create a deterministic order for playing back tests. foreach (Func <Task> cleanUpTask in cleanUpTasks) { await cleanUpTask(); } } }
public async Task CreateIndexerAsync() { await using SearchResources resources = await SearchResources.CreateWithBlobStorageAsync(this, populate : true); Environment.SetEnvironmentVariable("SEARCH_ENDPOINT", resources.Endpoint.ToString()); Environment.SetEnvironmentVariable("SEARCH_API_KEY", resources.PrimaryApiKey); Environment.SetEnvironmentVariable("STORAGE_CONNECTION_STRING", resources.StorageAccountConnectionString); Environment.SetEnvironmentVariable("STORAGE_CONTAINER", resources.BlobContainerName); Environment.SetEnvironmentVariable("COGNITIVE_SERVICES_KEY", resources.CognitiveServicesKey); // Define clean up tasks to be invoked in reverse order added. Stack <Func <Task> > cleanUpTasks = new Stack <Func <Task> >(); try { #region Snippet:Azure_Search_Tests_Samples_CreateIndexerAsync_CreateSynonymMap // Create a new SearchIndexClient Uri endpoint = new Uri(Environment.GetEnvironmentVariable("SEARCH_ENDPOINT")); AzureKeyCredential credential = new AzureKeyCredential( Environment.GetEnvironmentVariable("SEARCH_API_KEY")); SearchIndexClient indexClient = new SearchIndexClient(endpoint, credential); #if !SNIPPET indexClient = resources.GetIndexClient(new SearchClientOptions()); #endif // Create a synonym map from a file containing country names and abbreviations // using the Solr format with entry on a new line using \n, for example: // United States of America,US,USA\n string synonymMapName = "countries"; #if !SNIPPET synonymMapName = Recording.Random.GetName(); #endif string synonymMapPath = "countries.txt"; #if !SNIPPET synonymMapPath = Path.Combine(TestContext.CurrentContext.TestDirectory, "Samples", "countries.txt"); #endif SynonymMap synonyms; #if SNIPPET using (StreamReader file = File.OpenText(synonymMapPath)) { synonyms = new SynonymMap(synonymMapName, file); } #else synonyms = new SynonymMap(synonymMapName, CountriesSolrSynonymMap); #endif await indexClient.CreateSynonymMapAsync(synonyms); #endregion Snippet:Azure_Search_Tests_Samples_CreateIndexerAsync_CreateSynonymMap // Make sure our synonym map gets deleted, which is not deleted when our // index is deleted when our SearchResources goes out of scope. cleanUpTasks.Push(() => indexClient.DeleteSynonymMapAsync(synonymMapName)); #region Snippet:Azure_Search_Tests_Samples_CreateIndexerAsync_CreateIndex // Create the index string indexName = "hotels"; #if !SNIPPET indexName = Recording.Random.GetName(); #endif SearchIndex index = new SearchIndex(indexName) { Fields = { new SimpleField("hotelId", SearchFieldDataType.String) { IsKey = true, IsFilterable = true, IsSortable = true }, new SearchableField("hotelName") { IsFilterable = true, IsSortable = true }, new SearchableField("description") { AnalyzerName = LexicalAnalyzerName.EnLucene }, new SearchableField("descriptionFr") { AnalyzerName = LexicalAnalyzerName.FrLucene }, new SearchableField("tags", collection: true) { IsFilterable = true, IsFacetable = true }, new ComplexField("address") { Fields = { new SearchableField("streetAddress"), new SearchableField("city") { IsFilterable = true, IsSortable = true, IsFacetable = true }, new SearchableField("stateProvince") { IsFilterable = true, IsSortable = true, IsFacetable = true }, new SearchableField("country") { SynonymMapNames = new[] { synonymMapName },IsFilterable = true, IsSortable = true,IsFacetable = true }, new SearchableField("postalCode") { IsFilterable = true, IsSortable = true, IsFacetable = true } } } } }; await indexClient.CreateIndexAsync(index); #endregion Snippet:Azure_Search_Tests_Samples_CreateIndexerAsync_CreateIndex // Make sure our synonym map gets deleted, which is not deleted when our // index is deleted when our SearchResources goes out of scope. cleanUpTasks.Push(() => indexClient.DeleteIndexAsync(indexName)); #region Snippet:Azure_Search_Tests_Samples_CreateIndexerAsync_CreateDataSourceConnection // Create a new SearchIndexerClient SearchIndexerClient indexerClient = new SearchIndexerClient(endpoint, credential); #if !SNIPPET indexerClient = resources.GetIndexerClient(); #endif string dataSourceConnectionName = "hotels"; #if !SNIPPET dataSourceConnectionName = Recording.Random.GetName(); #endif SearchIndexerDataSourceConnection dataSourceConnection = new SearchIndexerDataSourceConnection( dataSourceConnectionName, SearchIndexerDataSourceType.AzureBlob, Environment.GetEnvironmentVariable("STORAGE_CONNECTION_STRING"), new SearchIndexerDataContainer(Environment.GetEnvironmentVariable("STORAGE_CONTAINER"))); await indexerClient.CreateDataSourceConnectionAsync(dataSourceConnection); #endregion Snippet:Azure_Search_Tests_Samples_CreateIndexerAsync_CreateDataSourceConnection // Make sure our data source gets deleted, which is not deleted when our // index is deleted when our SearchResources goes out of scope. cleanUpTasks.Push(() => indexerClient.DeleteDataSourceConnectionAsync(dataSourceConnectionName)); #region Snippet:Azure_Search_Tests_Samples_CreateIndexerAsync_Skillset // Translate English descriptions to French. // See https://docs.microsoft.com/azure/search/cognitive-search-skill-text-translation for details of the Text Translation skill. TextTranslationSkill translationSkill = new TextTranslationSkill( inputs: new[] { new InputFieldMappingEntry("text") { Source = "/document/description" } }, outputs: new[] { new OutputFieldMappingEntry("translatedText") { TargetName = "descriptionFrTranslated" } }, TextTranslationSkillLanguage.Fr) { Name = "descriptionFrTranslation", Context = "/document", DefaultFromLanguageCode = TextTranslationSkillLanguage.En }; // Use the human-translated French description if available; otherwise, use the translated description. // See https://docs.microsoft.com/azure/search/cognitive-search-skill-conditional for details of the Conditional skill. ConditionalSkill conditionalSkill = new ConditionalSkill( inputs: new[] { new InputFieldMappingEntry("condition") { Source = "= $(/document/descriptionFr) == null" }, new InputFieldMappingEntry("whenTrue") { Source = "/document/descriptionFrTranslated" }, new InputFieldMappingEntry("whenFalse") { Source = "/document/descriptionFr" } }, outputs: new[] { new OutputFieldMappingEntry("output") { TargetName = "descriptionFrFinal" } }) { Name = "descriptionFrConditional", Context = "/document", }; // Create a SearchIndexerSkillset that processes those skills in the order given below. string skillsetName = "translations"; #if !SNIPPET skillsetName = Recording.Random.GetName(); #endif SearchIndexerSkillset skillset = new SearchIndexerSkillset( skillsetName, new SearchIndexerSkill[] { translationSkill, conditionalSkill }) { CognitiveServicesAccount = new CognitiveServicesAccountKey( Environment.GetEnvironmentVariable("COGNITIVE_SERVICES_KEY")), KnowledgeStore = new SearchIndexerKnowledgeStore( Environment.GetEnvironmentVariable("STORAGE_CONNECTION_STRING"), new List <SearchIndexerKnowledgeStoreProjection>()), }; await indexerClient.CreateSkillsetAsync(skillset); #endregion Snippet:Azure_Search_Tests_Samples_CreateIndexerAsync_Skillset // Make sure our skillset gets deleted, which is not deleted when our // index is deleted when our SearchResources goes out of scope. cleanUpTasks.Push(() => indexerClient.DeleteSkillsetAsync(skillsetName)); #region Snippet:Azure_Search_Tests_Samples_CreateIndexerAsync_CreateIndexer string indexerName = "hotels"; #if !SNIPPET indexerName = Recording.Random.GetName(); #endif SearchIndexer indexer = new SearchIndexer( indexerName, dataSourceConnectionName, indexName) { // We only want to index fields defined in our index, excluding descriptionFr if defined. FieldMappings = { new FieldMapping("hotelId"), new FieldMapping("hotelName"), new FieldMapping("description"), new FieldMapping("tags"), new FieldMapping("address") }, OutputFieldMappings = { new FieldMapping("/document/descriptionFrFinal") { TargetFieldName = "descriptionFr" } }, Parameters = new IndexingParameters { // Tell the indexer to parse each blob as a separate JSON document. IndexingParametersConfiguration = new IndexingParametersConfiguration { ParsingMode = BlobIndexerParsingMode.Json } }, SkillsetName = skillsetName }; // Create the indexer which, upon successful creation, also runs the indexer. await indexerClient.CreateIndexerAsync(indexer); #endregion Snippet:Azure_Search_Tests_Samples_CreateIndexerAsync_CreateIndexer // Make sure our indexer gets deleted, which is not deleted when our // index is deleted when our SearchResources goes out of scope. cleanUpTasks.Push(() => indexerClient.DeleteIndexerAsync(indexerName)); // Wait till the indexer is done. await WaitForIndexingAsync(indexerClient, indexerName); #region Snippet:Azure_Search_Tests_Samples_CreateIndexerAsync_Query // Get a SearchClient from the SearchIndexClient to share its pipeline. SearchClient searchClient = indexClient.GetSearchClient(indexName); #if !SNIPPET searchClient = InstrumentClient(new SearchClient(endpoint, indexName, credential, GetSearchClientOptions())); #endif // Query for hotels with an ocean view. SearchResults <Hotel> results = await searchClient.SearchAsync <Hotel>("ocean view"); #if !SNIPPET bool found = false; #endif await foreach (SearchResult <Hotel> result in results.GetResultsAsync()) { Hotel hotel = result.Document; #if !SNIPPET if (hotel.HotelId == "6") { Assert.IsNotNull(hotel.DescriptionFr); found = true; } #endif Console.WriteLine($"{hotel.HotelName} ({hotel.HotelId})"); Console.WriteLine($" Description (English): {hotel.Description}"); Console.WriteLine($" Description (French): {hotel.DescriptionFr}"); } #endregion Snippet:Azure_Search_Tests_Samples_CreateIndexerAsync_Query Assert.IsTrue(found, "Expected hotel #6 not found in search results"); } finally { // We want to await these individual to create a deterministic order for playing back tests. foreach (Func <Task> cleanUpTask in cleanUpTasks) { await cleanUpTask(); } } }
public void CreateIndex <T>() { string azureSQLConnectionStr = ConfigurationManager.AppSettings["AzureSqlConnectionString"].ToString(); Uri Uri = new Uri(ConfigurationManager.AppSettings["SearchServiceEndPoint"]); string searchServiceKey = ConfigurationManager.AppSettings["SearchServiceAdminApiKey"]; AzureKeyCredential keyCredential = new AzureKeyCredential(searchServiceKey); SearchIndexClient indexClient = new SearchIndexClient(Uri, keyCredential); SearchIndexerClient indexerClient = new SearchIndexerClient(Uri, keyCredential); Console.WriteLine("Creating index..."); _logger.LogInformation("Creating index for Orion Customer"); FieldBuilder fieldBuilder = new FieldBuilder(); var searchFields = fieldBuilder.Build(typeof(T)); var searchIndex = new SearchIndex(GetIndexName(HdsConstants.Orion, typeof(T).ToString()), searchFields); CleanupSearchIndexClientResources(indexClient, searchIndex); indexClient.CreateOrUpdateIndex(searchIndex); Console.WriteLine("Creating data source..."); _logger.LogInformation("Creating data source for Orion Customer"); var dataSource = new SearchIndexerDataSourceConnection( GetDataSourceName(HdsConstants.Orion, typeof(T).ToString()), SearchIndexerDataSourceType.AzureSql, azureSQLConnectionStr, new SearchIndexerDataContainer($"[{GetTableName(typeof(T).ToString())}]")); indexerClient.CreateOrUpdateDataSourceConnection(dataSource); //Creating indexer Console.WriteLine("Creating Azure SQL indexer..."); _logger.LogInformation("Creating Azure SQL indexer for Orion Customer"); var schedule = new IndexingSchedule(TimeSpan.FromDays(1)) { StartTime = DateTimeOffset.Now }; var parameters = new IndexingParameters() { BatchSize = 100, MaxFailedItems = 0, MaxFailedItemsPerBatch = 0 }; var indexer = new SearchIndexer(GetIndexerName(HdsConstants.Orion, typeof(T).ToString()), dataSource.Name, searchIndex.Name) { Description = "Data indexer", Schedule = schedule, Parameters = parameters, }; indexerClient.CreateOrUpdateIndexerAsync(indexer); Console.WriteLine("Running Azure SQL indexer..."); try { indexerClient.RunIndexerAsync(indexer.Name); } catch (CloudException e) when(e.Response.StatusCode == (HttpStatusCode)429) { Console.WriteLine("Failed to run indexer: {0}", e.Response.Content); _logger.LogError("Failed to run indexer: {0}", e.Response.Content); } }
public static async Task <SearchIndexer> CreateIndexerAsync(SearchIndexerClient indexerClient, SearchIndexerDataSourceConnection dataSource, SearchIndexerSkillset skillSet, SearchIndex index) { IndexingParameters indexingParameters = new IndexingParameters() { MaxFailedItems = -1, MaxFailedItemsPerBatch = -1, }; indexingParameters.IndexingParametersConfiguration = new IndexingParametersConfiguration(); indexingParameters.IndexingParametersConfiguration.DataToExtract = BlobIndexerDataToExtract.ContentAndMetadata; indexingParameters.IndexingParametersConfiguration.ParsingMode = BlobIndexerParsingMode.Text; string indexerName = index.Name + "-indexer"; SearchIndexer indexer = new SearchIndexer(indexerName, dataSource.Name, index.Name) { Description = index.Name + " Indexer", SkillsetName = skillSet.Name, Parameters = indexingParameters }; FieldMappingFunction mappingFunction = new FieldMappingFunction("base64Encode"); mappingFunction.Parameters.Add("useHttpServerUtilityUrlTokenEncode", true); indexer.FieldMappings.Add(new FieldMapping("metadata_storage_path") { TargetFieldName = "metadata_storage_path", MappingFunction = mappingFunction }); //indexer.FieldMappings.Add(new FieldMapping("metadata_storage_name") //{ // TargetFieldName = "FileName" //}); //indexer.FieldMappings.Add(new FieldMapping("content")5 //{ // TargetFieldName = "Content" //}); //indexer.OutputFieldMappings.Add(new FieldMapping("/document/pages/*/organizations/*") //{ // TargetFieldName = "organizations" //}); //indexer.OutputFieldMappings.Add(new FieldMapping("/document/pages/*/keyPhrases/*") //{ // TargetFieldName = "keyPhrases" //}); //indexer.OutputFieldMappings.Add(new FieldMapping("/document/languageCode") //{ // TargetFieldName = "languageCode" //}); try { await indexerClient.GetIndexerAsync(indexer.Name); await indexerClient.DeleteIndexerAsync(indexer.Name); } catch (RequestFailedException ex) when(ex.Status == 404) { //if the specified indexer not exist, 404 will be thrown. } try { await indexerClient.CreateIndexerAsync(indexer); } catch (RequestFailedException ex) { Console.WriteLine("Failed to create the indexer\n Exception message: {0}\n", ex.Message); ExitProgram("Cannot continue without creating an indexer"); } return(indexer); }
public void ParsesETag(string value, string expected) { SearchIndexerDataSourceConnection sut = new SearchIndexerDataSourceConnection(null, null, SearchIndexerDataSourceType.AzureBlob, null, null, null, null, value, null); Assert.AreEqual(expected, sut.ETag?.ToString()); }
private void InitializeConcertsIndex(SearchIndexClient serviceClient) { // Create the index that will contain the searchable data from the concerts. var concertsIndex = new SearchIndex(IndexNameConcerts) { Fields = new[] { new SearchField(nameof(Concert.Id), SearchFieldDataType.String) { IsKey = true, IsSearchable = false }, new SearchField(nameof(Concert.Artist), SearchFieldDataType.String) { AnalyzerName = LexicalAnalyzerName.EnMicrosoft, IsSearchable = true, }, new SearchField(nameof(Concert.Genre), SearchFieldDataType.String) { AnalyzerName = LexicalAnalyzerName.EnMicrosoft, IsSearchable = true, IsFilterable = true, IsFacetable = true }, new SearchField(nameof(Concert.Location), SearchFieldDataType.String) { AnalyzerName = LexicalAnalyzerName.EnMicrosoft, IsSearchable = true, IsFilterable = true, IsFacetable = true }, new SearchField(nameof(Concert.Title), SearchFieldDataType.String) { AnalyzerName = LexicalAnalyzerName.EnMicrosoft, IsSearchable = true, }, new SearchField(nameof(Concert.Description), SearchFieldDataType.String) { AnalyzerName = LexicalAnalyzerName.EnMicrosoft, IsSearchable = true, }, new SearchField(nameof(Concert.Price), SearchFieldDataType.Double) { IsSearchable = false, IsFilterable = true, IsFacetable = true, IsSortable = true, }, new SearchField(nameof(Concert.StartTime), SearchFieldDataType.DateTimeOffset) { IsSearchable = false, IsSortable = true, IsFilterable = true }, }, DefaultScoringProfile = "default-scoring", }; var suggester = new SearchSuggester("default-suggester", new[] { nameof(Concert.Artist), nameof(Concert.Location), nameof(Concert.Title) }); concertsIndex.Suggesters.Add(suggester); concertsIndex.ScoringProfiles.Add(new ScoringProfile("default-scoring") { // Add a lot of weight to the artist and above average weight to the title. TextWeights = new TextWeights(new Dictionary <string, double> { { nameof(Concert.Artist), 2.0 }, { nameof(Concert.Title), 1.5 } }) }); serviceClient.CreateOrUpdateIndex(concertsIndex); var searchIndexerClient = new SearchIndexerClient(this.searchServiceUri, this.azureKeyCredential); // Create the data source that connects to the SQL Database account containing the consult requests. var concertsDataSource = new SearchIndexerDataSourceConnection(IndexNameConcerts, SearchIndexerDataSourceType.AzureSql, this.concertsSqlDatabaseConnectionString, new SearchIndexerDataContainer("Concerts")) { DataChangeDetectionPolicy = new SqlIntegratedChangeTrackingPolicy() }; searchIndexerClient.CreateOrUpdateDataSourceConnection(concertsDataSource); // Create the indexer that will pull the data from the database into the search index. var concertsIndexer = new SearchIndexer(name: IndexNameConcerts, dataSourceName: IndexNameConcerts, targetIndexName: IndexNameConcerts) { Schedule = new IndexingSchedule(TimeSpan.FromMinutes(5)) }; searchIndexerClient.CreateOrUpdateIndexer(concertsIndexer); }
static void Main(string[] args) { Console.BackgroundColor = ConsoleColor.Black; Console.ForegroundColor = ConsoleColor.Cyan; // Step 1 - Create datasource Console.WriteLine("\nStep 1 - Creating the data source..."); IConfigurationBuilder builder = new ConfigurationBuilder().AddJsonFile("appsettings.json"); IConfigurationRoot configuration = builder.Build(); string searchServiceUri = configuration["SearchServiceUri"]; string cognitiveServicesKey = configuration["CognitiveServicesKey"]; string adminApiKey = configuration["SearchServiceAdminApiKey"]; SearchIndexClient indexClient = new SearchIndexClient(new Uri(searchServiceUri), new AzureKeyCredential(adminApiKey)); SearchIndexerClient indexerClient = new SearchIndexerClient(new Uri(searchServiceUri), new AzureKeyCredential(adminApiKey)); SearchIndexerDataSourceConnection dataSource = CreateOrUpdateDataSource(indexerClient, configuration); // Step 2 - Create the skillset Console.WriteLine("\nStep 2 - Creating the skillset..."); Console.WriteLine("\tStep 2.1 - Adding the skills..."); Console.WriteLine("\t\t OCR skill"); OcrSkill ocrSkill = CreateOcrSkill(); Console.WriteLine("\t\t Merge skill"); MergeSkill mergeSkill = CreateMergeSkill(); Console.WriteLine("\t\t Entity recognition skill"); EntityRecognitionSkill entityRecognitionSkill = CreateEntityRecognitionSkill(); Console.WriteLine("\t\t Language detection skill"); LanguageDetectionSkill languageDetectionSkill = CreateLanguageDetectionSkill(); Console.WriteLine("\t\t Split skill"); SplitSkill splitSkill = CreateSplitSkill(); Console.WriteLine("\t\t Key phrase skill"); KeyPhraseExtractionSkill keyPhraseExtractionSkill = CreateKeyPhraseExtractionSkill(); List <SearchIndexerSkill> skills = new List <SearchIndexerSkill>(); skills.Add(ocrSkill); skills.Add(mergeSkill); skills.Add(languageDetectionSkill); skills.Add(splitSkill); skills.Add(entityRecognitionSkill); skills.Add(keyPhraseExtractionSkill); Console.WriteLine("\tStep 2.2 - Building the skillset..."); SearchIndexerSkillset skillset = CreateOrUpdateDemoSkillSet(indexerClient, skills, cognitiveServicesKey); // Step 3 - Create the index Console.WriteLine("\nStep 3 - Creating the index..."); SearchIndex demoIndex = CreateDemoIndex(indexClient); // Step 4 - Create the indexer, map fields, and execute transformations Console.WriteLine("\nStep 4 - Creating the indexer and executing the pipeline..."); SearchIndexer demoIndexer = CreateDemoIndexer(indexerClient, dataSource, skillset, demoIndex); // Step 5 - Monitor the indexing process Console.WriteLine("\nStep 5 - Check the indexer overall status..."); CheckIndexerOverallStatus(indexerClient, demoIndexer); }
private static SearchIndexer CreateDemoIndexer(SearchIndexerClient indexerClient, SearchIndexerDataSourceConnection dataSource, SearchIndexerSkillset skillSet, SearchIndex index) { IndexingParameters indexingParameters = new IndexingParameters() { MaxFailedItems = -1, MaxFailedItemsPerBatch = -1, }; indexingParameters.Configuration.Add("dataToExtract", "contentAndMetadata"); indexingParameters.Configuration.Add("imageAction", "generateNormalizedImages"); SearchIndexer indexer = new SearchIndexer("demoindexer", dataSource.Name, index.Name) { Description = "Demo Indexer", SkillsetName = skillSet.Name, Parameters = indexingParameters }; FieldMappingFunction mappingFunction = new FieldMappingFunction("base64Encode"); mappingFunction.Parameters.Add("useHttpServerUtilityUrlTokenEncode", true); indexer.FieldMappings.Add(new FieldMapping("metadata_storage_path") { TargetFieldName = "id", MappingFunction = mappingFunction }); indexer.FieldMappings.Add(new FieldMapping("content") { TargetFieldName = "content" }); indexer.OutputFieldMappings.Add(new FieldMapping("/document/pages/*/organizations/*") { TargetFieldName = "organizations" }); indexer.OutputFieldMappings.Add(new FieldMapping("/document/pages/*/keyPhrases/*") { TargetFieldName = "keyPhrases" }); indexer.OutputFieldMappings.Add(new FieldMapping("/document/languageCode") { TargetFieldName = "languageCode" }); try { indexerClient.GetIndexer(indexer.Name); indexerClient.DeleteIndexer(indexer.Name); } catch (RequestFailedException ex) when(ex.Status == 404) { //if the specified indexer not exist, 404 will be thrown. } try { indexerClient.CreateIndexer(indexer); } catch (RequestFailedException ex) { Console.WriteLine("Failed to create the indexer\n Exception message: {0}\n", ex.Message); ExitProgram("Cannot continue without creating an indexer"); } return(indexer); }
private static async Task CreateAndRunBlobIndexerAsync(string indexName, SearchIndexerClient indexerClient) { SearchIndexerDataSourceConnection blobDataSource = new SearchIndexerDataSourceConnection( name: configuration["BlobStorageAccountName"], type: SearchIndexerDataSourceType.AzureBlob, connectionString: configuration["BlobStorageConnectionString"], container: new SearchIndexerDataContainer("gapzap-pdf-docs")); // The blob data source does not need to be deleted if it already exists, // but the connection string might need to be updated if it has changed. await indexerClient.CreateOrUpdateDataSourceConnectionAsync(blobDataSource); Console.WriteLine("Creating Blob Storage indexer...\n"); // Add a field mapping to match the Id field in the documents to // the HotelId key field in the index List <FieldMapping> map = new List <FieldMapping> { new FieldMapping("Id") { TargetFieldName = "HotelId" } }; IndexingParameters parameters = new IndexingParameters(); parameters.Configuration.Add("parsingMode", "json"); SearchIndexer blobIndexer = new SearchIndexer( name: "hotel-rooms-blob-indexer", dataSourceName: blobDataSource.Name, targetIndexName: indexName) { Parameters = parameters, Schedule = new IndexingSchedule(TimeSpan.FromDays(1)) }; blobIndexer.FieldMappings.Add(new FieldMapping("Id") { TargetFieldName = "HotelId" }); // Reset the indexer if it already exists try { await indexerClient.GetIndexerAsync(blobIndexer.Name); //Rest the indexer if it exsits. await indexerClient.ResetIndexerAsync(blobIndexer.Name); } catch (RequestFailedException ex) when(ex.Status == 404) { } await indexerClient.CreateOrUpdateIndexerAsync(blobIndexer); Console.WriteLine("Running Blob Storage indexer...\n"); try { await indexerClient.RunIndexerAsync(blobIndexer.Name); } catch (RequestFailedException ex) when(ex.Status == 429) { Console.WriteLine("Failed to run indexer: {0}", ex.Message); } }
public void CreateCustomerIndex() { //creating index SearchIndexClient indexClient = new SearchIndexClient(Uri, keyCredential); SearchIndexerClient indexerClient = new SearchIndexerClient(Uri, keyCredential); Console.WriteLine("Creating index..."); FieldBuilder fieldBuilder = new FieldBuilder(); var searchFields = fieldBuilder.Build(typeof(ServiceOrder)); //var searchIndex = new SearchIndex("serviceorder-sql-idx", searchFields); var searchIndex = new SearchIndex("serviceorder-sql-idx", searchFields); // If we have run the sample before, this index will be populated // We can clear the index by deleting it if it exists and creating // it again CleanupSearchIndexClientResources(indexClient, searchIndex); indexClient.CreateOrUpdateIndex(searchIndex); //Creating data source Console.WriteLine("Creating data source..."); var dataSource = new SearchIndexerDataSourceConnection( "serviceorder-sql-ds", SearchIndexerDataSourceType.AzureSql, azureSQLConnectionStr, new SearchIndexerDataContainer("[ServiceOrder]")); indexerClient.CreateOrUpdateDataSourceConnection(dataSource); //Creating indexer Console.WriteLine("Creating Azure SQL indexer..."); //var schedule = new IndexingSchedule(TimeSpan.FromDays(1)) //{ // StartTime = DateTimeOffset.Now //}; var parameters = new IndexingParameters() { BatchSize = 100, MaxFailedItems = 0, MaxFailedItemsPerBatch = 0 }; // Indexer declarations require a data source and search index. // Common optional properties include a schedule, parameters, and field mappings // The field mappings below are redundant due to how the Hotel class is defined, but // we included them anyway to show the syntax var indexer = new SearchIndexer("serviceorder-sql-idxr", dataSource.Name, searchIndex.Name) { Description = "Service Order indexer", Schedule = new IndexingSchedule(TimeSpan.FromMinutes(5)), Parameters = parameters, }; indexerClient.CreateOrUpdateIndexerAsync(indexer); Console.WriteLine("Running Azure SQL indexer..."); try { indexerClient.RunIndexerAsync(indexer.Name); } catch (CloudException e) when(e.Response.StatusCode == (HttpStatusCode)429) { Console.WriteLine("Failed to run indexer: {0}", e.Response.Content); } // Wait 5 seconds for indexing to complete before checking status Console.WriteLine("Waiting for indexing...\n"); System.Threading.Thread.Sleep(5000); }
public static async Task Main(string[] args) { IConfigurationBuilder builder = new ConfigurationBuilder().AddJsonFile("appsettings.json"); IConfigurationRoot configuration = builder.Build(); if (configuration["SearchServiceEndPoint"] == "Put your search service endpoint here") { Console.Error.WriteLine("Specify SearchServiceEndPoint in appsettings.json"); Environment.Exit(-1); } if (configuration["SearchServiceAdminApiKey"] == "Put your search service admin API key here") { Console.Error.WriteLine("Specify SearchServiceAdminApiKey in appsettings.json"); Environment.Exit(-1); } if (configuration["AzureSQLConnectionString"] == "Put your Azure SQL database connection string here") { Console.Error.WriteLine("Specify AzureSQLConnectionString in appsettings.json"); Environment.Exit(-1); } SearchIndexClient indexClient = new SearchIndexClient(new Uri(configuration["SearchServiceEndPoint"]), new AzureKeyCredential(configuration["SearchServiceAdminApiKey"])); SearchIndexerClient indexerClient = new SearchIndexerClient(new Uri(configuration["SearchServiceEndPoint"]), new AzureKeyCredential(configuration["SearchServiceAdminApiKey"])); Console.WriteLine("Creating index..."); FieldBuilder fieldBuilder = new FieldBuilder(); var searchFields = fieldBuilder.Build(typeof(Hotel)); var searchIndex = new SearchIndex("hotels-sql-idx", searchFields); // If we have run the sample before, this index will be populated // We can clear the index by deleting it if it exists and creating // it again CleanupSearchIndexClientResources(indexClient, searchIndex); indexClient.CreateOrUpdateIndex(searchIndex); Console.WriteLine("Creating data source..."); // The sample data set has a table name of "hotels" // The sample data set table has a "soft delete" column named IsDeleted // When this column is set to true and the indexer sees it, it will remove the // corresponding document from the search service // See this link for more information // https://docs.microsoft.com/en-us/dotnet/api/microsoft.azure.search.models.softdeletecolumndeletiondetectionpolicy // The sample data set uses SQL integrated change tracking for change detection // This means that when the indexer runs, it will be able to detect which data has // changed since the last run using built in change tracking // See this link for more information // https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-tracking-sql-server var dataSource = new SearchIndexerDataSourceConnection( "hotels-sql-ds", SearchIndexerDataSourceType.AzureSql, configuration["AzureSQLConnectionString"], new SearchIndexerDataContainer("hotels")); // The data source does not need to be deleted if it was already created, // but the connection string may need to be updated if it was changed indexerClient.CreateOrUpdateDataSourceConnection(dataSource); Console.WriteLine("Creating Azure SQL indexer..."); var schedule = new IndexingSchedule(TimeSpan.FromDays(1)) { StartTime = DateTimeOffset.Now }; var parameters = new IndexingParameters() { BatchSize = 100, MaxFailedItems = 0, MaxFailedItemsPerBatch = 0 }; // Indexer declarations require a data source and search index. // Common optional properties include a schedule, parameters, and field mappings // The field mappings below are redundant due to how the Hotel class is defined, but // we included them anyway to show the syntax var indexer = new SearchIndexer("hotels-sql-idxr", dataSource.Name, searchIndex.Name) { Description = "Data indexer", Schedule = schedule, Parameters = parameters, FieldMappings = { new FieldMapping("_id") { TargetFieldName = "HotelId" }, new FieldMapping("Amenities") { TargetFieldName = "Tags" } } }; // Indexers contain metadata about how much they have already indexed // If we already ran the sample, the indexer will remember that it already // indexed the sample data and not run again // To avoid this, reset the indexer if it exists CleanupSearchIndexerClientResources(indexerClient, indexer); await indexerClient.CreateOrUpdateIndexerAsync(indexer); // We created the indexer with a schedule, but we also // want to run it immediately Console.WriteLine("Running Azure SQL indexer..."); try { await indexerClient.RunIndexerAsync(indexer.Name); } catch (CloudException e) when(e.Response.StatusCode == (HttpStatusCode)429) { Console.WriteLine("Failed to run indexer: {0}", e.Response.Content); } // Wait 5 seconds for indexing to complete before checking status Console.WriteLine("Waiting for indexing...\n"); System.Threading.Thread.Sleep(5000); // After an indexer run, you can retrieve status. CheckIndexerStatus(indexerClient, indexer); Console.WriteLine("Press any key to continue..."); Console.ReadKey(); Environment.Exit(0); }