private string DownloadHtml(string url) { var webClient = new WebDownloader(120000); Byte[] pageData = webClient.DownloadData(url); return(Encoding.ASCII.GetString(pageData)); }
/// <summary> /// Get band's page /// </summary> /// <returns>Parsed band's page</returns> public BandResult GetFullBand() { WebDownloader downloader = new WebDownloader(BandUrl); string content = downloader.DownloadData(); return(WebContentParser.Parse <BandResult>(content)); }
private void CommandOpenUri_OnExecute(object objUri) { if (!CommandOpenUri_OnCanExecute(objUri)) { return; } try { if (m_vm == null) { return; } var uri = (Uri)objUri; if (uri.IsLoopback) { OpenFile(uri.LocalPath, true); } else { var downloader = new WebDownloader(); var webFile = downloader.Download(uri); using (var ms = new MemoryStream(webFile.Content)) { OpenStream(webFile.FileName, ms); } } } catch (Exception ex) { Logger.Error(ex); m_userMessage.Error(ex); } }
/// <summary> /// Get band's page async /// </summary> /// <returns>Parsed band's page</returns> public async Task <BandResult> GetFullBandAsync() { WebDownloader downloader = new WebDownloader(BandUrl); string content = await downloader.DownloadDataAsync(); return(WebContentParser.Parse <BandResult>(content)); }
/// <summary> /// Gets list of band's albums simple list async /// </summary> public async Task <IEnumerable <AlbumBandResult> > GetAlbumsAsync(AlbumListType type) { WebDownloader wd = new WebDownloader($@"https://www.metal-archives.com/band/discography/id/{Id}/tab/" + type.ToString().ToLower()); string content = await wd.DownloadDataAsync(); return(WebContentParser.ParseList <AlbumBandResult>(content)); }
/// <summary> /// Get album's page async /// </summary> /// <returns>Parsed album's page</returns> public async Task <AlbumResult> GetFullAlbumAsync() { WebDownloader downloader = new WebDownloader(AlbumUrl); string content = await downloader.DownloadDataAsync(); return(WebContentParser.Parse <AlbumResult>(content)); }
public void WebDownloader_DownLoad() { // ARRANGE using var writer = new StringWriter(); Console.SetOut(writer); var webClientFactory = new TestableWebClientFactory(); // ACT var downloader = new WebDownloader(webClientFactory); downloader.DownLoad("some web file", "myfile.csv"); webClientFactory.TestClient .InvokeDownloadProgressChanged(10, 1024) .InvokeDownloadProgressChanged(100, 1024 * 100) .InvokeDownloadCompleted(); writer.Flush();// Ensure writer is flushed // ASSERT var expectedOutput = new[] { "Downloading some web file...", "Downloading...10% complete (1,024 bytes)", "Downloading...100% complete (102,400 bytes)", "Downloaded to myfile.csv ", string.Empty }; var actualConsoleLines = writer.ToString().Split(Environment.NewLine); CollectionAssert.AreEqual(expectedOutput, actualConsoleLines); }
/// <summary> /// Get album's page /// </summary> /// <returns>Parsed album's page</returns> public AlbumResult GetFullAlbum() { WebDownloader downloader = new WebDownloader(AlbumUrl); string content = downloader.DownloadData(); return(WebContentParser.Parse <AlbumResult>(content)); }
private bool InvokeDownload(string remoteFile, string destFile, short chunkCount) { string downloadUrl = DownloadDir + remoteFile + ".txt"; WebDownloader downloader = DownloaderFactory.GetDownloader(new TransferParameter() { ChunkCount = chunkCount, Environment = _environment, TransferUrl = downloadUrl, LocalFile = destFile, RemoteFile = remoteFile, SupportBrokenResume = _supportBorkenResume, SupportDebug = _supportDebug }); try { bool hasFinished = false; downloader.Completed += new CompletedEventHandler(OnCompleted); downloader.Completed += new CompletedEventHandler( delegate(object sender, CompletedEventArgs args) { hasFinished = true; }); downloader.ExceptionError += new ExceptionEventHandle( delegate(object sender, ExceptionEventArgs args) { hasFinished = true; }); downloader.ExceptionError += new ExceptionEventHandle(OnException); downloader.Progress += new ProgressEventHandle(OnProgress); downloader.Start(); while (!hasFinished) { Thread.Sleep(100); } return(true); } catch (Exception ex) { _errmsg = ex.Message; throw new ApplicationException(ex.Message); } }
/// <summary> /// 初始化url信息 /// </summary> private void GatherInitUrls() { //域名 string strPagePre = this.txtDomainUrl.Text.Trim(); if (strPagePre.EndsWith("/")) { strPagePre = strPagePre.Remove(strPagePre.Length - 1); } //相对路径,分页参数需用占位符替换 string strPagePost = this.txtRelativeUrl.Text.Trim(); if (!strPagePost.StartsWith("/")) { strPagePost = "/" + strPagePost; } //拼接成完整的请求url string strPage = string.Concat(strPagePre, strPagePost); //请求开始的页码 int startPageIndex = 1; //请求结束的页码 int endPageIndex = 1; if (!string.IsNullOrEmpty(this.txtStartPageIndex.Text.Trim())) { int.TryParse(this.txtStartPageIndex.Text.Trim(), out startPageIndex); } if (!string.IsNullOrEmpty(this.txtEndPageIndex.Text.Trim())) { int.TryParse(this.txtEndPageIndex.Text.Trim(), out endPageIndex); } int articleCategoryId = Convert.ToInt32(this.cobArticleCategory.SelectedValue); //初始化下载器 downloader = new WebDownloader(AppHelper.RequestTimeSpan, articleCategoryId); string preUrl = ""; for (int i = startPageIndex; i <= endPageIndex; i++) { //添加到下载队列 string strUrl = string.Format(strPage, i); if (string.IsNullOrEmpty(preUrl)) { downloader.AddUrlQueue(strUrl, strUrl); } else { downloader.AddUrlQueue(strUrl, preUrl); } preUrl = strUrl; } }
public void Run() { var csvData = new StandardCsvRepository().Read("StandardPages.csv"); var htmlData = new WebDownloader().GetAll(csvData); var linkUris = new ScreenScraper().GetLinkUris(htmlData); repository.Save(linkUris); }
private Response Search(string searchType, string release, S4ULimits limits) { string limitsString = _limitsBuilder.BuildString(limits); var uri = new Uri(_baseUrl + searchType + "/" + release + limitsString); using (var xmlStream = new WebDownloader().OpenRead(uri)) { return (Response) _xmlSerializer.Deserialize(xmlStream); } }
public void GetPages_CalledWithMultipleUrls_ShouldReturnContentFetchFromEachUrl() { string[] urls = new string[] { "http://myserver.com/", "http:anotherserver.com/" }; string[] expectedContent = new string[] { urls[0], urls[1] }; var sut = new WebDownloader(u => TestingAsyncUtil.FromResult <string>(u)); string[] content = sut.GetPages(urls); CollectionAssert.AreEquivalent(expectedContent, content); }
private void Window_Initialized(object sender, EventArgs e) { Instance = this; WebDownloader.IfBlankSet(); WebDownloader.GetAllData(); SetupRepo(); RefreshModList(); RefreshDownloadsList(); RefreshAchievements(); }
public void DownloadSyncTest() { // Arrange string filePath = String.Empty; string url = "http://somepage.com"; var webClientStub = MockRepository.GenerateStub <WebClient>(); // Act var wd = new WebDownloader(webClientStub); wd.DownloadSync(filePath, url); // Assert webClientStub.AssertWasCalled(wc => wc.DownloadFile(Arg<Uri>.Is.Equal(new Uri(url)), Arg<String>.Is.Equal(filePath))); }
public void ExistsFileTest() { string uri = "http://www.pilotvision.co.jp/index.php"; var target = new WebDownloader(); var actual = target.ExistsFile(uri); Assert.AreEqual(true, actual); uri = "http://www.pilotvision.co.jp/foo.bar"; actual = target.ExistsFile(uri); Assert.AreEqual(false, actual); }
/// <summary> /// Download raw bundle bytes /// </summary> /// <param name="bundleUri">Uri from where to retreive the bundle</param> /// <returns>Byte array</returns> public byte[] DownloadRaw(Uri bundleUri) { if (bundleUri == null) { throw new ArgumentNullException("bundleUri"); } using (WebDownloader client = new WebDownloader()) { client.TimeoutMS = this.TimeoutMS; client.MaxRetries = m_maxRetries; return(client.DownloadDataWithRetry(bundleUri)); } }
public void DownloadTest() { string url = @"https://foobar.com/"; string downloadFileNameFormat = "aaa{0:yyyyMMdd}.zip"; var target = new WebDownloader(); DateTime transactionDate = new DateTime(2013, 3, 5); string fileName = string.Format(downloadFileNameFormat, transactionDate); MemoryStream actual = null; actual = target.Download(string.Format("{0}/{1}", url, fileName)); Assert.IsNotNull(actual); Assert.IsTrue(actual.Length > 0); }
private string DownloadSwidtagToFile(string filename, Uri location) { if (_request.ProviderServices == null) { // during initialization, the pluggable downloader isn't available. // luckily, it's built into this assembly, so we'll create and use it directly. filename = new WebDownloader().DownloadFile(location, filename, SwidDownloadTimeout, false, DownloadRequest.As <Request>()); } else { // otherwise, we can just use the pluggable one. filename = _request.ProviderServices.DownloadFile(location, filename, SwidDownloadTimeout, false, DownloadRequest); } return(filename); }
private static void Sequence_AppListDownloadStart() { /* 自動更新が有効ではないときは無視 */ if (!ConfigManager.System.ApplicationCore.NewVersionAutoUpdate.Value) { return; } /* バージョンリストのダウンロード開始 */ downloader_ = new WebDownloader(); downloader_.DownloadString(ConfigManager.Fixed.ApplicationListUrl.Value); /* シーケンス更新 */ seq_++; }
public override IList<CommonEvent> GetEvents(int ym, string keyword) { var apiUrl = string.Format(BaseUrl + "&count={0}&keyword={1}&ym={2}", ReadCount, keyword, ym); var downloader = new WebDownloader { Encoding = Encoding.UTF8 }; try { var str = downloader.DownloadString(apiUrl); return EventAtndJsonParser.Parse(str); } catch (WebException e) { return new List<CommonEvent>(); } }
/// <summary> /// Download raw bundle bytes /// </summary> /// <param name="bundleUri">Uri from where to retreive the bundle</param> /// <returns>Byte array</returns> public byte[] DownloadRaw(Uri bundleUri) { if (bundleUri == null) { throw new ArgumentNullException("bundleUri"); } using (WebDownloader client = new WebDownloader()) { client.TimeoutMS = this.TimeoutMS; client.MaxRetries = m_maxRetries; client.Headers.Add(HttpRequestHeader.Accept, "application/octet-stream,application/x-pkcs7-certificates,application/pkcs7-mime"); return(client.DownloadDataWithRetry(bundleUri)); } }
/// <summary> /// Gets actual song's lyrics async /// </summary> /// <returns>Lyrics or string.Empty if not exists</returns> public async Task <string> GetLyricsAsync() { string lyrics = string.Empty; if (HasLyrics) { WebDownloader wd = new WebDownloader($@"https://www.metal-archives.com/release/ajax-view-lyrics/id/{Id}"); HtmlDocument document = new HtmlDocument(); document.LoadHtml(await wd.DownloadDataAsync()); lyrics = document.DocumentNode.InnerText.Trim(); } return(lyrics); }
/// <summary> /// If <see cref="NotesFullUrl"/> is not null, it returns full band's notes async /// </summary> /// <returns>Band's notes</returns> public async Task <string> GetFullNotesAsync() { if (string.IsNullOrEmpty(NotesFullUrl)) { return(string.Empty); } WebDownloader downloader = new WebDownloader(NotesFullUrl); string content = await downloader.DownloadDataAsync(); HtmlDocument doc = new HtmlDocument(); doc.LoadHtml(content); return(doc.DocumentNode.InnerText); }
public string actionGet(string[] args) { string result = string.Empty; IWebDownloader downloader = new WebDownloader(); string webpage = downloader.download(args [2]); if (args.Length >= 4 && args [3] == "-save") { saveInFile(webpage, args[4]); } else { result = webpage; } return(result); }
public override IList<CommonEvent> GetEvents(int ym, string keyword) { var since = new DateTime((int)Math.Floor(ym / (decimal)100), ym % 100, 1); var until = since.AddMonths(1).AddDays(-1); var apiUrl = string.Format(BaseUrl + "&since={0}&until={1}&q={2}", since.ToString("O"), until.ToString("O"), keyword); var downloader = new WebDownloader {Encoding = Encoding.UTF8}; try { var str = downloader.DownloadString(apiUrl); return DoorkeeperJsonParser.Parse(str); } catch (WebException e) { return new List<CommonEvent>(); } }
private static void Sequence_AppListCheck() { /* アプリケーションリストをダウンロードするまで待つ */ if (!downloader_.IsComplete) { return; } var app_infos = SystemInfo.ParseFromXml(downloader_.ResultString); /* バージョンリストが解析できなかった場合は終了 */ if (app_infos == null) { seq_ = Sequence.Complete; return; } /* 現在より新しいバージョンのアプリ情報を降順で取得 */ var app_infos_new = from info in app_infos where info.Name == ConfigManager.Fixed.ApplicationName.Value from ver in info.Versions where ver.IsNewVersion(Program.Version) orderby ver.ToVersionCode() descending select ver.DownloadUrl; /* 現在より新しいバージョンがなければ終了 */ if (app_infos_new.Count() == 0) { seq_ = Sequence.Complete; return; } /* ワークスペースを初期化 */ Shell.mkdir(WorkspacePath); /* ファイルダウンロード開始 */ downloader_ = new WebDownloader(); downloader_.DownloadFile(app_infos_new, DownloadSavePath); /* シーケンス更新 */ seq_++; }
/// <summary> /// Searches item by name async. /// </summary> /// <param name="name">Item's name</param> /// <returns>List of items result - without pagination, all rows at once</returns> public async Task <IEnumerable <T> > ByNameAsync(string name) { List <T> items = new List <T>(); _configurator.Parameters["query"] = name; var wd = new WebDownloader(_configurator.Url, _configurator.Parameters); IEnumerable <T> itemsToAdd; int page = 0; do { _configurator.Parameters["iDisplayStart"] = page++.ToString(); var response = await wd.DownloadDataAsync(); itemsToAdd = ProcessParse(response); items.AddRange(itemsToAdd); }while (itemsToAdd.Count() != 0); return(items); }
public string getURLMultipleTime(string url, string multiple) { Stopwatch timer = new Stopwatch(); IWebDownloader downloader = new WebDownloader(); string result = string.Empty; int nb = 0; TimeSpan compteur = new TimeSpan(); while (nb < Int32.Parse(multiple)) { timer.Start(); downloader.download(url); timer.Stop(); TimeSpan ts = timer.Elapsed; result += ts + Environment.NewLine; compteur += ts; timer.Reset(); nb++; } return(result); }
public FileInfo DownloadSubtitle(string downloadLink, string fileName) { if (null == downloadLink) throw new ArgumentNullException(nameof(downloadLink)); string subtitleFile = Path.Combine(Path.GetTempPath(), fileName); string tempZipName = Path.GetTempFileName(); try { var webClient = new WebDownloader(); var data = webClient.DownloadData(downloadLink); using(var fileStream = new MemoryStream(data)) { UnzipSubtitleToFile(fileStream, subtitleFile); } } finally { File.Delete(tempZipName); } return new FileInfo(subtitleFile); }
void IDownloadHandler.OnBeforeDownload(IWebBrowser chromiumWebBrowser, IBrowser browser, DownloadItem downloadItem, IBeforeDownloadCallback callback) { if (Listener == null) { return; } try { if (!callback.IsDisposed) { using (callback) { var loader = new WebDownloader(); Listener.OnDownload(downloadItem.Url, downloadItem.SuggestedFileName, downloadItem.TotalBytes, loader); if (loader.Data != null) { callback.Continue(loader.Data.Destination, false); _downloads[downloadItem.Id] = loader.Data; UpdateIsAnyDownloadActive(); } } } } catch (Exception e) { Logging.Error(e); } }
void AddToPendingImageDownloaders(string k, WebDownloader downloader, Action<byte[]> callback) { lock (PendingImageDownloaders) { PendingImageDownloaders[k] = downloader; if (PendingImageCallbacks.ContainsKey(k)) { PendingImageCallbacks[k].Add(callback); } else { PendingImageCallbacks[k] = new[] { callback }.ToList(); } } }
public void GetContent_UrlIsNotFound_ResponseStatusIsUrlNotFound() { WebDownloaderResult result = WebDownloader.GetContent("http://laboru.co/hello.tx"); Assert.AreEqual(true, result.Status == DownloadResultStatus.UrlNotFound); }
static void Main(string[] args) { try { Console.WriteLine("Please enter the city you would like to scrape information from:"); var craigsListCity = Console.ReadLine() ?? String.Empty; WebData webData = new WebDataBuilder() .WithCity(craigsListCity) .Build(); WebDownloader downloadContent = new WebDownloader(); Content = downloadContent.DownloadContentFrom(webData); CategoryScraper scrapeCategory = new CategoryScraper(); Categories = scrapeCategory.GetCategoryFrom(Content); var userCategory = "sss"; if (Categories.Any()) { int x = Categories.Count; for (int c = 0; c < x; c += 2) { Console.WriteLine("Category: {0}, Value: {1}", Categories[c + 1], Categories[c]); Console.WriteLine(); } Console.Write("Please enter the \"Value\" of the category you'd like to scrape elements from:"); userCategory = Console.ReadLine() ?? String.Empty; } else { Console.WriteLine("There were no elements found in the category list."); Console.Write("A default category will be chosen for you."); } webData = new WebDataBuilder() .WithCity(craigsListCity) .WithCategory(userCategory) .Build(); Content = downloadContent.DownloadContentFrom(webData); //Need to check for errors on userCategory // https://boston.craigslist.org/search //link example for city only // https://boston.craigslist.org/search/cta //link example w/ category ScrapeCriteria scrapeCriteria = new ScrapeCriteriaBuilder() .WithData(Content) .WithRegex(@"<a href=""(.*?)"" data-id=""(.*?)"" class=""(.*?)"">(.*?)</a>") //this regex pattern works .WithRegexOption(RegexOptions.ExplicitCapture) .WithParts(new ScrapeCriteriaPartBuilder() .WithRegex(@">(.*?)<") .WithRegexOption(RegexOptions.Singleline) .Build()) .WithParts(new ScrapeCriteriaPartBuilder() .WithRegex(@"href=""(.*?)""") .WithRegexOption(RegexOptions.Singleline) .Build()) .Build(); Scraper scraper = new Scraper(); var scrapedElements = scraper.Scrape(scrapeCriteria); if (scrapedElements.Any()) { int count = 1; foreach (var scrapedElement in scrapedElements) { Console.WriteLine(scrapedElement); if (count % 2 == 0) { Console.WriteLine(); } count++; } } else { Console.WriteLine("There were no matches found for the specified scrape Criteria."); } } catch (Exception ex) { Console.WriteLine("There was an error found: {0}", ex.Message); } Console.WriteLine(); Console.WriteLine("The program will close shortly, please acknowledge by pressing any key."); Console.ReadKey(); }
private static IEnumerable<FileInfo> DownloadSubtitle(string downloadLink, string fileName) { string address = SiteUrl + downloadLink; string subtitleFilePath = Path.Combine(Path.GetTempPath(), fileName); var data = new WebDownloader().DownloadData(address); using (var fileStream = new MemoryStream(data)) { UnzipSubtitleToFile(fileStream, subtitleFilePath); } return new[] { new FileInfo(subtitleFilePath) }; }
/// <summary> /// Creates a category browser. /// </summary> /// <returns>Returns a category browser.</returns> private static CategoryScraper CreateChannel9CategoryBrowser() { var webDownloader = new WebDownloader(); return(new CategoryScraper(webDownloader)); }
/// <summary> /// Gets an image with the given path and given parameters /// when done the given callback is called /// </summary> public void GetImage(string path, Action<byte[]> callback, IDictionary<string, object> parameters = null) { // TODO: this must be more intelligent to avoid multiple download thread of the same image var cacheKey = path; if (parameters == null) parameters = new Dictionary<string, object>(); foreach (var par in parameters) { cacheKey += "_{0}_{1}".FormatWith(par.Key, par.Value); } var cached = GetCache(cacheKey); if (cached != null) { callback.Invoke(Convert.FromBase64String(cached)); return; } var client = CreateClient(); parameters.Add("access_token", client.AccessToken); var url = client.ResolveUrl(path, parameters); var downloader = new WebDownloader { Url = url, ReadBinary = true }; downloader.Completed += (a, b) => { var bytes = downloader.DataBytes; SetCache(cacheKey, Convert.ToBase64String(bytes)); //callback.Invoke(bytes); CompletePendingImageDownloader(cacheKey, bytes); }; downloader.Failed += (a, b) => { // nothing if (System.Diagnostics.Debugger.IsAttached) { System.Diagnostics.Debugger.Break(); } }; downloader.Download(); // add to pending list AddToPendingImageDownloaders(cacheKey, downloader, callback); }
private string DownloadSwidtagToFile(string filename, Uri location) { if (_request.ProviderServices == null) { // during initialization, the pluggable downloader isn't available. // luckily, it's built into this assembly, so we'll create and use it directly. filename = new WebDownloader().DownloadFile(location, filename, SwidDownloadTimeout, false, DownloadRequest.As<Request>()); } else { // otherwise, we can just use the pluggable one. filename = _request.ProviderServices.DownloadFile(location, filename, SwidDownloadTimeout, false, DownloadRequest); } return filename; }
public void TestDownload() { WebDownloader downloader = new WebDownloader(); }