public async Task CanLoadRequestWithPostData(string url) { const string data = "Testing123"; //To use LoadRequest we must first load a web page using (var browser = new ChromiumWebBrowser(new HtmlString("Testing"))) { await browser.LoadPageAsync(); var request = new Request(); request.Url = "http://httpbin.org/post"; request.Method = "POST"; var postData = new PostData(); postData.AddElement(new PostDataElement { Bytes = Encoding.UTF8.GetBytes(data) }); request.PostData = postData; await browser.LoadRequestAsync(request); var mainFrame = browser.GetMainFrame(); Assert.Equal(url, mainFrame.Url); var navEntry = await browser.GetVisibleNavigationEntryAsync(); Assert.Equal((int)HttpStatusCode.OK, navEntry.HttpStatusCode); Assert.True(navEntry.HasPostData); var source = await browser.GetTextAsync(); Assert.Contains(data, source); } }
public async Task CanLoadRequestWithPostData(string url) { const string data = "Testing123"; //When Chromium Site Isolation is enabled we must first navigate to //a web page of the same origin to use LoadRequest //When Site Isolation is disabled we can navigate to any web page //https://magpcss.org/ceforum/viewtopic.php?f=10&t=18672&p=50266#p50249 using (var browser = new ChromiumWebBrowser("http://httpbin.org/")) { var response = await browser.LoadUrlAsync(); Assert.True(response.Success); var request = new Request { Url = "http://httpbin.org/post", Method = "POST" }; var postData = new PostData(); postData.AddElement(new PostDataElement { Bytes = Encoding.UTF8.GetBytes(data) }); request.PostData = postData; await browser.LoadRequestAsync(request); var mainFrame = browser.GetMainFrame(); Assert.Equal(url, mainFrame.Url); var navEntry = await browser.GetVisibleNavigationEntryAsync(); Assert.Equal((int)HttpStatusCode.OK, navEntry.HttpStatusCode); Assert.True(navEntry.HasPostData); var source = await browser.GetTextAsync(); Assert.Contains(data, source); } }
public string[] ExtractSearchResults(string[] topics, string searchURL) { PostData pd = new PostData(); foreach (string topic in topics) { pd.AddElement("topicBeanCount.topicString", topic); } string response = wb.Navigate(pd, searchURL); //Log("Parsez rezultatele cautarii: "); //aflam numarul maxim de articole gasite Regex r = new Regex(@"startValue=(.*?)\"""); string[] pagini_cautare = ExtractDataUsingRegex(r, response); int maxPage = 0; List <string> url_cautare = new List <string>(); List <string> verificate = new List <string>(); foreach (string tmp in pagini_cautare) { string ps = tmp.Substring(tmp.LastIndexOf('=') + 1); ps = ps.Substring(0, ps.Length - 1); int pag = int.Parse(ps); if (pag > maxPage) { maxPage = pag; } if (!verificate.Contains(pag.ToString())) { verificate.Add(pag.ToString()); url_cautare.Add("http://www.europarl.europa.eu/news/archive/search/topicSearch.do?language=RO&startValue=" + pag); } } //Log("\tPagina maxima de cautare este :" + maxPage.ToString() + " => 30*" + maxPage.ToString() + "=" + 30 * maxPage + " articole"); //Log("\tGenerez lista de cautare:"); //foreach (string s in url_cautare) { // Log("\t\t" + s); //} List <string> articole = new List <string>(); //Log("\tGenerez lista de articole pentru fiecare pagina:"); r = new Regex(@"pubRef=(.*?)&", RegexOptions.IgnoreCase); string[] tmpList = ExtractDataUsingRegex(r, response); //articole.AddRange(tmpList); //Log("\t\tPagina 1:"); foreach (string s in tmpList) { if (!articole.Contains(s)) { //Log("\t\t\t" + s); articole.Add(s); } } int i = 1; foreach (string s in url_cautare) { response = wb.Navigate(pd, s); r = new Regex(@"pubRef=(.*?)&", RegexOptions.IgnoreCase); tmpList = ExtractDataUsingRegex(r, response); //articole.AddRange(tmpList); i++; //Log("\t\tPagina " + i + ":"); foreach (string surl in tmpList) { if (!articole.Contains(surl)) { //Log("\t\t\t" + surl); articole.Add(surl); } } } //postprocesare linkuri for (int idx = 0; idx < articole.Count; idx++) { string s = "http://www.europarl.europa.eu/sides/getDoc.do?pubRef=" + articole[idx].Substring(7, articole[idx].Length - 14); articole[idx] = s; } return(articole.ToArray()); }