static public void getMSWSearchResults_Test() { StreamWriter sw = new StreamWriter(Constants.LocalPath_ParsedFields_MSW_Test); string query = ""; try { StreamReader sr = new StreamReader(Constants.LocalPath_Query_Test); query = sr.ReadLine(); while (null != query) { string strWebData = HtmlUtilities.downloadHtmlWithRequestHeaderAndBody(query, Constants.LocalPath_HttpResponse_MSW); strWebData = Utilities.NormalizeJson(strWebData); //sw.WriteLine(strWebData); query = Utilities.GetQuery(strWebData); List <MSWDoc> docList = Utilities.JsonSerializerMSWDoc(strWebData); foreach (MSWDoc doc in docList) { string snippet = doc.source.HitHighlightedSummary; if (snippet != null) { snippet = snippet.Replace("\n", ""); snippet = snippet.Replace("\r", ""); } sw.WriteLine("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}", query, doc.Id, doc.ContentSource, doc.source.FileType, doc.source.Author, doc.source.Created, doc.source.LastModifiedTime, doc.source.Path, doc.source.Title, snippet); } query = sr.ReadLine(); } sw.Flush(); sw.Close(); sr.Close(); } catch (IOException e) { } }
static public void getTermSearchResults_Personal() { StreamWriter sw = new StreamWriter(Constants.LocalPath_ParsedFields_TermSearch); string query = ""; try { StreamReader sr = new StreamReader(Constants.LocalPath_Query_Personal); query = sr.ReadLine(); while (null != query) { string url = Constants.HttpRequest_TermSearch_Prefix + query.Replace(" ", "+") + Constants.HttpRequest_TermSearch_Suffix; string strWebData = HtmlUtilities.downloadHtml(url, Constants.LocalPath_HttpResponse_TermSearch); List <Doc> docList = Utilities.JsonSerializer(strWebData); foreach (Doc doc in docList) { string snippet = doc.Summary; if (snippet != null) { snippet = snippet.Replace("\n", ""); snippet = snippet.Replace("\r", ""); } sw.WriteLine("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}\t{11}\t{12}\t{13}\t{14}\t{15}\t{16}\t{17}\t{18}\t{19}\t{20}\t{21}\t{22}\t{23}\t{24}\t{25}\t{26}\t{27}\t{28}\t{29}", query, doc.Author, doc.FileType, doc.Id, doc.CreatedTime, doc.LastModifiedTime, doc.LastModifiedByMeTime, doc.LastViewedByMeTime, doc.Path, doc.RedirectUrl, doc.HashedPath, doc.Title, doc.Views, doc.Size, doc.ViewLast7days, doc.PiSearchResultId, doc.GraphRankingScore, doc.SearchRankingScore, doc.MergedRankingScore, doc.ReUseScore, doc.TrendingAroundMeScore, doc.ModifiedByMeScore, doc.ViewedByMeScore, doc.IsExternalContent, doc.ObjectEmbeddings, doc.SPWebUrl, doc.RankInSearchResult, doc.StatusKey, doc.FastRankRankingScore, snippet); } query = sr.ReadLine(); } sw.Flush(); sw.Close(); sr.Close(); } catch (IOException e) { } }
static public List <Doc> getOLSSearchResults() { StreamWriter sw = new StreamWriter(Constants.LocalPath_ParsedFields_OLS); string strWebData = HtmlUtilities.downloadHtmlWithHeader(Constants.HttpRequest_OLS, Constants.Flight_OLS, Constants.LocalPath_HttpResponse_OLS); List <Doc> docList = Utilities.JsonSerializer(strWebData); foreach (Doc doc in docList) { sw.WriteLine("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}\t{11}\t{12}\t{13}\t{14}\t{15}\t{16}\t{17}\t{18}\t{19}\t{20}\t{21}\t{22}\t{23}\t{24}\t{25}\t{26}\t{27}", doc.Author, doc.FileType, doc.Id, doc.CreatedTime, doc.LastModifiedTime, doc.LastModifiedByMeTime, doc.LastViewedByMeTime, doc.Path, doc.RedirectUrl, doc.HashedPath, //doc.Summary, doc.Title, doc.Views, doc.Size, doc.ViewLast7days, doc.PiSearchResultId, doc.GraphRankingScore, doc.SearchRankingScore, doc.MergedRankingScore, doc.ReUseScore, doc.TrendingAroundMeScore, doc.ModifiedByMeScore, doc.ViewedByMeScore, doc.IsExternalContent, doc.ObjectEmbeddings, doc.SPWebUrl, doc.RankInSearchResult, doc.StatusKey, doc.FastRankRankingScore); } sw.Flush(); sw.Close(); return(docList); }