public JsonResult StartIndexProcess(string pageName) { try { CRAWLER_DEPTH = Int16.Parse(ConfigurationManager.AppSettings["DepthLevels"]); string Folder = SearchUtils.GetDirectoryForFile(pageName, -1); string actualPage = System.IO.Path.GetFileName(pageName); //create a record to serve as a groupID for the site or group of pages to index. int siteIndexID = SearchServices.GetNewSiteIndex(Folder, actualPage); //now save the first page so that the parallel functions have links to use. SearchResult csr = SearchUtils.LoadPageContent(pageName, -1, siteIndexID); SearchUtils.GetLinksAndKeywords(csr); csr.PageID = SearchServices.SaveSearchResults(csr); //now everything is ready to run in a loop until all pages have been indexed. return(StartCrawler(-1, siteIndexID)); } catch (Exception ex) { Console.WriteLine(ex.Message); } return(null); }