public async Task <ActionResult> Search(SearchModel model) { List <AzureSearchResultModel> searchResults = new List <AzureSearchResultModel>(); try { var luisResult = await LuisServiceHelper.SearchIntent(model.SearchText); var entities = luisResult.Entities.Where(e => e.Score > 0.6).Select(t => t).ToArray(); foreach (var entity in entities) { var searchResult = await SearchServiceHelper.SearchAsync(entity.Entity); var docs = searchResult.Results.Select(s => s.Document).ToArray(); searchResults.AddRange(docs); model.DetectedEntities.Add(entity.Entity); model.SearchResults = searchResults; } return(View(model)); } catch (Exception ex) { Console.WriteLine(ex.Message); return(View(model)); } }
public KeywordQueryCamlVisitor(KeywordQuery query, Hashtable bindings) { CommonHelper.ConfirmNotNull(query, "query"); CommonHelper.ConfirmNotNull(bindings, "bindings"); this.query = query; this.bindings = bindings; this.managedPropertyDictionary = SearchServiceHelper.GetManagedPropertyNames(query.Site); }
protected override void ProcessRecord() { try { var client = new RestClient(SearchServiceUri); var helper = new SearchServiceHelper(client, SearchServiceApiKey); helper.DeleteSearchIndex(SearchIndexName, this); var indexCreationJson = JsonFileHelper.ReadJsonFileToString(IndexCreationJsonFile); helper.CreateIndex(SearchIndexName, indexCreationJson, this); } catch (Exception e) { ThrowTerminatingError(new ErrorRecord(e, "101", ErrorCategory.CloseError, null)); } }
protected override void ProcessRecord() { try { WriteVerbose($"Seeding data to search index: {SearchIndexName}"); var client = new RestClient(SearchServiceUri); var helper = new SearchServiceHelper(client, SearchServiceApiKey); var documents = JsonConvert.DeserializeObject <List <Person> >(JsonFileHelper.ReadJsonFileToString(PersonDataJsonFile)); helper.SeedData(SearchIndexName, documents, this); } catch (Exception e) { ThrowTerminatingError(new ErrorRecord(e, "102", ErrorCategory.CloseError, null)); } }
static async Task Main(string[] args) { // Load appsettings.json var config = LoadAppSettings(); if (null == config) { Console.WriteLine("Missing or invalid appsettings.json file. Please see README.md for configuration instructions."); return; } SetGlobalConfig(config); searchServiceHelper = new SearchServiceHelper(SearchServiceName, SearchServiceAdminKey); System.Diagnostics.Trace.TraceWarning("Slow response - database01"); TimeSpan elapsedTime; //Start stopwatch for timing telemtry Stopwatch sw = new Stopwatch(); var timeStart = DateTime.Now; sw.Start(); //Storage var storageAccount = CloudStorageAccount.Parse(StorageConnectionString); var storageClient = storageAccount.CreateCloudBlobClient(); AzureTableStorage azTableStorage = new AzureTableStorage(StorageConnectionString, StorageTableName); AzureTableStorage azTableStorageSpoItems = new AzureTableStorage(StorageConnectionString, SpoItemStorageTableName); CloudBlobContainer container = await AzureBLOBStorage.CreateAzureBLOBContainer(storageClient, BlobContainerName); //Search AzureSearchServiceHelper searchClient = new AzureSearchServiceHelper(SearchServiceName, SearchServiceAdminKey); IDriveItemChildrenCollectionPage docLibItems; IDriveItemDeltaCollectionPage docLibDeltaItems; for (int i = 0; i < args.Length; i++) { if (args[i].ToLower() == "-incrementalcrawl") { IncrementalCrawl = true; Console.WriteLine("Search Crawl mode set to Incremental"); container = await AzureBLOBStorage.CreateAzureBLOBContainer(storageClient, BlobContainerName); } if (args[i].ToLower() == "-fullcrawl") { IncrementalCrawl = false; Console.WriteLine("Search Crawl mode set to Full"); await AzureBLOBStorage.DeleteContainerFromAzureBLOB(container); container = await AzureBLOBStorage.CreateAzureBLOBContainer(storageClient, BlobContainerName); } if (args[i].ToLower() == "-includeacls") { IncludeAcls = true; Console.WriteLine("Search Crawl mode set to Full"); } } SharePointOnlineHelper.metadataFieldsToIgnore = MetadataFieldsToIgnore; SharePointOnlineHelper.metadataJSONStore = MetadataJSONStore; SharePointOnlineHelper.acls = IncludeAcls; SharePointOnlineHelper.azTableStorage = azTableStorageSpoItems; foreach (var metadataFieldToIgnore in MetadataFieldsToIgnore) { Console.WriteLine("Removing key [{0}] from metadata fields to extract", metadataFieldToIgnore); } //Query using Graph SDK (preferred when possible) GraphServiceClient graphClient = SharePointOnlineHelper.GetAuthenticatedGraphClient(config); Site targetSite = await graphClient.Sites.GetByPath(SiteUrl, SPOHostName).Request().GetAsync(); ISiteDrivesCollectionPage drives = graphClient.Sites[targetSite.Id].Drives.Request().GetAsync().Result; //Graph BETA supports site pages //var sitePages = graphClient.Sites[targetSite.Id].Pages.Request().GetAsync().GetAwaiter().GetResult(); //var sitePages = graphClient.Sites[targetSite.Id].Pages.Request().GetAsync().Result; //var a = 1; foreach (var drive in drives) { var driveName = drive.Name; var driveUrl = drive.WebUrl; bool excludedDocLIb = Array.Exists(DocLibsToIgnore, element => element == driveName); if (excludedDocLIb) { Console.WriteLine("Skipping [{0}] as its an excluded docLib", DocLibsToIgnore); continue; } Console.WriteLine("Fetching items from drive [{0}]", driveName); var driveId = drive.Id; var driveContents = new List <DriveItem>(); //Full Crawl Logic if (!IncrementalCrawl) { docLibItems = await graphClient .Drives[driveId] .Root .Children .Request() .GetAsync(); driveContents.AddRange(docLibItems.CurrentPage); if (docLibItems.NextPageRequest != null) { while (docLibItems.NextPageRequest != null) { docLibItems = await docLibItems.NextPageRequest.GetAsync(); driveContents.AddRange(docLibItems.CurrentPage); await SharePointOnlineHelper.GetSpoDocumentItems(graphClient, driveContents, driveId, container, IncludeAcls); } } else { await SharePointOnlineHelper.GetSpoDocumentItems(graphClient, driveContents, driveId, container, IncludeAcls); } } //Incremental Crawl Logic if (IncrementalCrawl) { //Retrieve the last known deltaToken from Table storage, if the value is null it will fetch all items for that drive //Base64 encode the string to remove special characters byte[] byt = System.Text.Encoding.UTF8.GetBytes(driveUrl); var driveUrlEscpaed = Convert.ToBase64String(byt); var lastDeltaToken = await azTableStorage.GetEntitiesInPartion(driveUrlEscpaed); docLibDeltaItems = await graphClient .Drives[driveId] .Root .Delta(lastDeltaToken) .Request() .GetAsync(); var deltaLink = docLibDeltaItems.AdditionalData["@odata.deltaLink"].ToString(); if (deltaLink != null) { var tokenindex = deltaLink.IndexOf("token="); var token = deltaLink.Substring(tokenindex + 7, deltaLink.ToString().Length - tokenindex - 9); driveContents.AddRange(docLibDeltaItems.CurrentPage); if (docLibDeltaItems.NextPageRequest != null) { while (docLibDeltaItems.NextPageRequest != null) { var docLibItems2 = await docLibDeltaItems.NextPageRequest.GetAsync(); driveContents.AddRange(docLibItems2.CurrentPage); await SharePointOnlineHelper.GetSpoDocumentItems(graphClient, driveContents, driveId, container, IncludeAcls); } } else { await SharePointOnlineHelper.GetSpoDocumentItems(graphClient, driveContents, driveId, container, IncludeAcls); //Lets persist the changeToken to storage so we can continue the next incrmental crawl from this point. IndexCrawlEntity indexCrawlEntity = new IndexCrawlEntity(driveUrlEscpaed, token); azTableStorage.InsertEntity(indexCrawlEntity); } //Console.WriteLine("Fetched total of {0} documents from [{1}] data source", DownloadFileCount, driveName); } } } if (!IncrementalCrawl) { //Now lets do a full crawl of all the fetched SPO documents from the BLOB store as the fetching of all documents into storage would have completed by now //Warning this will perform an entire search index rebuild - so while this phase is running search resultset will be impacted await IndexDocumentsAsync(); } sw.Stop(); elapsedTime = sw.Elapsed; var timeEnd = DateTime.Now; Console.WriteLine("Fetched total of {0} documents during crawl", AzureBLOBStorage.DownloadFileCount); Console.WriteLine("Crawl Start time: {0}", timeStart); Console.WriteLine("Crawl Completed time: {0}", timeEnd); Console.WriteLine("Total crawl duration time: {0}", elapsedTime); }