public FormDownloadTags() { InitializeComponent(); client = new ExtendedWebClient(); client.DownloadProgressChanged += new System.Net.DownloadProgressChangedEventHandler(client_DownloadProgressChanged); client.DownloadFileCompleted += new AsyncCompletedEventHandler(client_DownloadFileCompleted); }
public void TestDownloadTagsXml() { string url = @"https://yande.re/tag/index.xml?limit=0"; string filename = @"test-tag.xml"; ExtendedWebClient client = new ExtendedWebClient(); client.DownloadFile(url, filename); Assert.IsTrue(File.Exists(filename)); }
public void DoBatchJob(BindingList<DanbooruBatchJob> batchJob) { ToggleBatchJobButtonDelegate bjd = new ToggleBatchJobButtonDelegate(ToggleBatchJobButton); UpdateUiDelegate del = new UpdateUiDelegate(UpdateUi); UpdateUiDelegate2 del2 = new UpdateUiDelegate2(UpdateUi); ExtendedWebClient _clientPost = new ExtendedWebClient(); if (batchJob != null) { UpdateStatus2("Starting Batch Job"); for (int i = 0; i < batchJob.Count; i++) { batchJob[i].CurrentPage = 0; if (!batchJob[i].isCompleted) { UpdateLog("DoBatchJob", "Processing Batch Job#" + i); DanbooruPostDao prevDao = null; bool flag = true; int currPage = 0; int postCount = 0; do { // stop/pause event handling outside _pauseEvent.WaitOne(Timeout.Infinite); if (_shutdownEvent.WaitOne(0)) { batchJob[i].Status = " ==> Stopped."; // toggle button BeginInvoke(bjd, new object[] { true }); UpdateLog("DoBatchJob", "Batch Job Stopped."); UpdateStatus2("Batch Job Stopped."); return; } DanbooruPostDao d = null; int imgCount = 0; int skipCount = 0; string url; string query = ""; #region Construct the searchParam if (batchJob[i].Provider.BoardType == BoardType.Danbooru || batchJob[i].Provider.BoardType == BoardType.Shimmie2) { currPage = batchJob[i].CurrentPage; } else if (batchJob[i].Provider.BoardType == BoardType.Gelbooru) { if (batchJob[i].Provider.Preferred == PreferredMethod.Html) { currPage = batchJob[i].CurrentPage * postCount; } else { currPage = batchJob[i].CurrentPage; } } DanbooruSearchParam searchParam = GetSearchParamsFromJob(batchJob[i], currPage); url = batchJob[i].Provider.GetQueryUrl(searchParam); #endregion Construct the searchParam try { #region Get and load the image list batchJob[i].Status = "Getting list for page: " + searchParam.Page; BeginInvoke(del); UpdateLog("DoBatchJob", "Downloading list: " + url); d = GetBatchImageList(url, query, batchJob[i]); if (d == null) { // Cannot get list. UpdateLog("DoBatchJob", "Cannot load list"); batchJob[i].Status = "Cannot load list."; batchJob[i].isCompleted = false; batchJob[i].isError = true; flag = false; } else if (d.Posts == null || d.Posts.Count == 0) { // No more image UpdateLog("DoBatchJob", "No more image."); batchJob[i].Status = "No more image."; flag = false; //break; } else { if (prevDao != null) { // identical data returned, probably no more new image. if (prevDao.RawData != null && prevDao.RawData.Equals(d.RawData)) { UpdateLog("DoBatchJob", "Identical list, probably last page."); batchJob[i].Status = "Identical list, probably last page."; flag = false; //break; } } prevDao = d; batchJob[i].Total = d.PostCount; batchJob[i].CurrentPageTotal = d.Posts.Count; batchJob[i].CurrentPageOffset = d.Offset; #endregion Get and load the image list postCount = d.Posts.Count; foreach (DanbooruPost post in d.Posts) { // Update progress bar object[] myArray = new object[2]; myArray[0] = batchJob[i].ProcessedTotal; myArray[1] = d.PostCount < batchJob[i].Limit ? d.PostCount : batchJob[i].Limit; BeginInvoke(del2, myArray); // thread handling _pauseEvent.WaitOne(Timeout.Infinite); if (_shutdownEvent.WaitOne(0)) { batchJob[i].Status = " ==> Stopped."; // toggle button BeginInvoke(bjd, new object[] { true }); UpdateLog("DoBatchJob", "Batch Job Stopped."); UpdateStatus2("Batch Job Stopped."); return; } // check if have url and post is not deleted if (string.IsNullOrWhiteSpace(post.FileUrl) && (post.Status != "deleted" || chkProcessDeletedPost.Checked)) { ResolveFileUrlBatch(_clientPost, post); } //Choose the correct urls var targetUrl = post.FileUrl; if (_ImageSize == "Thumb" && !String.IsNullOrWhiteSpace(post.PreviewUrl)) { targetUrl = post.PreviewUrl; } else if (_ImageSize == "Jpeg" && !String.IsNullOrWhiteSpace(post.JpegUrl)) { targetUrl = post.JpegUrl; } else if (_ImageSize == "Sample" && !String.IsNullOrWhiteSpace(post.SampleUrl)) { targetUrl = post.SampleUrl; } batchJob[i].Status = "Downloading: " + targetUrl; BeginInvoke(del); //if (post.Provider == null) post.Provider = cbxProvider.Text; //if (post.Query == null) post.Query = txtQuery.Text; //if (post.SearchTags == null) post.SearchTags = txtTags.Text; bool download = true; // check if blacklisted if (download && post.Hidden) { ++skipCount; ++batchJob[i].Skipped; download = false; UpdateLog("DoBatchJob", "Download skipped, contains blacklisted tag: " + post.Tags + " Url: " + targetUrl); } string filename = ""; if (download && !string.IsNullOrWhiteSpace(targetUrl)) { var format = new DanbooruFilenameFormat() { FilenameFormat = batchJob[i].SaveFolder, Limit = Convert.ToInt32(txtFilenameLength.Text), BaseFolder = txtSaveFolder.Text, MissingTagReplacement = txtTagReplacement.Text, ArtistGroupLimit = Convert.ToInt32(txtArtistTagGrouping.Text), CharacterGroupLimit = Convert.ToInt32(txtCharaTagGrouping.Text), CopyrightGroupLimit = Convert.ToInt32(txtCopyTagGrouping.Text), CircleGroupLimit = Convert.ToInt32(txtCircleTagGrouping.Text), FaultsGroupLimit = Convert.ToInt32(txtFaultsTagGrouping.Text), IgnoredTags = DanbooruTagsDao.Instance.ParseTagsString(txtIgnoredTags.Text.Replace(Environment.NewLine, " ")), IgnoredTagsRegex = txtIgnoredTags.Text.Trim().Replace(Environment.NewLine, "|"), IgnoreTagsUseRegex = chkIgnoreTagsUseRegex.Checked, IsReplaceMode = chkReplaceMode.Checked, IgnoredTagsOnlyForGeneral = chkIgnoreForGeneralTag.Checked, TagReplaceUnderscoreToSpace = chkIsReplaceUnderscoreTag.Checked }; string extension = Helper.getFileExtensions(targetUrl); filename = Helper.MakeFilename(format, post) + extension; } // check if exist if (download && !chkOverwrite.Checked) { if (File.Exists(filename)) { ++skipCount; ++batchJob[i].Skipped; download = false; UpdateLog("DoBatchJob", "Download skipped, file exists: " + filename); } } if (download && String.IsNullOrWhiteSpace(targetUrl)) { ++skipCount; ++batchJob[i].Skipped; download = false; UpdateLog("DoBatchJob", "Download skipped, ID: " + post.Id + " No file_url, probably deleted"); } Uri uri = null; if (download && !Uri.TryCreate(targetUrl, UriKind.RelativeOrAbsolute, out uri)) { ++skipCount; ++batchJob[i].Skipped; download = false; UpdateLog("DoBatchJob", "Download skipped, ID: " + post.Id + " Invalid URL: " + targetUrl); } #region download if (download) { imgCount = DoDownloadBatch(targetUrl, batchJob[i], post, filename); } #endregion download // check if more than available post if (batchJob[i].ProcessedTotal >= d.PostCount && d.PostCount != 0) { UpdateLog("DoBatchJob", "No more post."); flag = false; break; } // check if over given limit if (batchJob[i].ProcessedTotal >= batchJob[i].Limit) { UpdateLog("DoBatchJob", "Limit Reached."); flag = false; break; } // check batch job delay int delay = 0; Int32.TryParse(Properties.Settings.Default.BatchJobDelay, out delay); if ((Properties.Settings.Default.DelayIncludeSkipped || download) && delay > 0) { UpdateLog("DoBatchJob", String.Format("Waiting for {0}ms for the next post.", delay)); Thread.Sleep(delay); } } } batchJob[i].Status = " ==> Done."; } catch (Exception ex) { string message = ex.Message; string responseMessage = ""; if (ex.InnerException != null) { message += Environment.NewLine + "Inner: " + ex.InnerException.Message; } message += Environment.NewLine + "Stack Trace: " + Environment.NewLine + ex.StackTrace; message += Environment.NewLine + "Query: " + batchJob[i].TagQuery; batchJob[i].isError = true; batchJob[i].isCompleted = false; if (ex.GetType() == typeof(System.Net.WebException)) { System.Net.WebException wex = (System.Net.WebException)ex; if (wex.Status == WebExceptionStatus.ProtocolError && wex.Response.Headers.AllKeys.Contains("Status") && wex.Response.Headers["Status"].ToString() == "500") { using (var response = wex.Response.GetResponseStream()) { if (response != null) { var option = new DanbooruPostDaoOption() { Provider = _currProvider, Query = query, SearchTags = batchJob[i].TagQuery, Url = url, Referer = "", BlacklistedTags = TagBlacklist, BlacklistedTagsRegex = TagBlacklistRegex, BlacklistedTagsUseRegex = chkBlacklistTagsUseRegex.Checked, IgnoredTags = TagIgnore, IgnoredTagsRegex = TagIgnoreRegex, IgnoredTagsUseRegex = chkIgnoreTagsUseRegex.Checked, IsBlacklistOnlyForGeneral = chkBlacklistOnlyGeneral.Checked }; var resp = new DanbooruPostDao(response, option); responseMessage = resp.ResponseMessage; flag = false; } } } } if (ex.Message.Contains("(400)") || ex.Message.Contains("(403)") || ex.Message.Contains("(500)") || ex.Message.Contains("resolved")) { flag = false; } batchJob[i].Status = " ==> Error: " + (string.IsNullOrWhiteSpace(responseMessage) ? ex.Message : responseMessage) + Environment.NewLine; if (!string.IsNullOrWhiteSpace(responseMessage)) UpdateLog("DoBatchJob", "Server Message: " + responseMessage, ex); else UpdateLog("DoBatchJob", "Error: " + message, ex); if (cbxAbortOnError.Checked) { MessageBox.Show(message, "Batch Download"); break; } } finally { BeginInvoke(del); { // Update progress bar object[] myArray = new object[2]; myArray[0] = batchJob[i].ProcessedTotal; if (d != null) { myArray[1] = d.PostCount < batchJob[i].Limit ? d.PostCount : batchJob[i].Limit; } else { myArray[1] = -1; } BeginInvoke(del2, myArray); } } ++batchJob[i].CurrentPage; } while (flag); UpdateLog("DoBatchJob", "Batch Job #" + i + ": Done"); if (batchJob[i].isError) { batchJob[i].isCompleted = false; } else { batchJob[i].isCompleted = true; } BeginInvoke(del); } } } BeginInvoke(bjd, new object[] { true }); UpdateStatus2("Batch Job Completed!", true); { // hide progress bar object[] myArray = new object[2]; myArray[0] = -1; myArray[1] = -1; BeginInvoke(del2, myArray); } }
public FormMain() { if (Properties.Settings.Default.UpdateRequired) { Program.Logger.Info("Upgrading configuration"); Properties.Settings.Default.Upgrade(); Properties.Settings.Default.UpdateRequired = false; Properties.Settings.Default.Save(); } InitializeComponent(); // Get assembly version Assembly assembly = Assembly.GetExecutingAssembly(); FileVersionInfo fvi = FileVersionInfo.GetVersionInfo(assembly.Location); this.Text += fvi.ProductVersion; #if DEBUG this.Text += " DEBUG"; #endif Program.Logger.Info("Starting up " + this.Text); #region init webclients _clientList = new ExtendedWebClient(); _clientList.DownloadProgressChanged += new DownloadProgressChangedEventHandler(clientList_DownloadProgressChanged); _clientList.DownloadFileCompleted += new AsyncCompletedEventHandler(clientList_DownloadFileCompleted); _clientList.DownloadDataCompleted += new DownloadDataCompletedEventHandler(clientList_DownloadDataCompleted); _clientThumb = new ExtendedWebClient(); _clientThumb.DownloadDataCompleted += new DownloadDataCompletedEventHandler(clientThumb_DownloadDataCompleted); _clientThumb.DownloadProgressChanged += new DownloadProgressChangedEventHandler(clientThumb_DownloadProgressChanged); _clientFile = new ExtendedWebClient(); _clientFile.DownloadFileCompleted += new AsyncCompletedEventHandler(clientFile_DownloadFileCompleted); _clientFile.DownloadProgressChanged += new DownloadProgressChangedEventHandler(clientFile_DownloadProgressChanged); _clientBatch = new ExtendedWebClient(); //_clientBatch.DownloadProgressChanged += new DownloadProgressChangedEventHandler(_clientBatch_DownloadProgressChanged); //_clientBatch.DownloadFileCompleted += new AsyncCompletedEventHandler(_clientBatch_DownloadFileCompleted); #endregion init webclients LoadProviderList(); //Auto populate Order and Rating cbxOrder.DataSource = new BindingSource(Constants.OrderBy, null); cbxOrder.DisplayMember = "Key"; cbxOrder.ValueMember = "Value"; cbxRating.DataSource = new BindingSource(Constants.Rating, null); cbxRating.DisplayMember = "Key"; cbxRating.ValueMember = "Value"; txtFilenameHelp.Text = "%provider% = provider Name" + Environment.NewLine + "%id% = Image ID" + Environment.NewLine + "%tags% = Image Tags" + Environment.NewLine + "%rating% = Image Rating" + Environment.NewLine + "%md5% = MD5 Hash" + Environment.NewLine + "%artist% = Artist Tag" + Environment.NewLine + "%copyright% = Copyright Tag" + Environment.NewLine + "%character% = Character Tag" + Environment.NewLine + "%circle% = Circle Tag" + Environment.NewLine + "%faults% = Faults Tag" + Environment.NewLine + "%originalFilename% = Original Filename" + Environment.NewLine + "%searchtag% = Search tag"; pbLoading.Image = DanbooruDownloader3.Properties.Resources.AJAX_LOADING; _retry = Convert.ToInt32(txtRetry.Text); CheckProxyLoginInput(); SetProxy(chkUseProxy.Checked, txtProxyAddress.Text, Convert.ToInt32(txtProxyPort.Text), txtProxyUsername.Text, txtProxyPassword.Text); SetTagColors(); //SetTagAutoComplete(); ParseTagBlacklist(); Program.Logger.Debug(this.Text + " loaded."); dgvList.AutoGenerateColumns = false; dgvDownload.AutoGenerateColumns = false; _ImageSize = cbxImageSize.Text; ExtendedWebClient.EnableCookie = Properties.Settings.Default.enableCookie; ExtendedWebClient.EnableCompression = Properties.Settings.Default.EnableCompression; ExtendedWebClient.AcceptLanguage = Properties.Settings.Default.AcceptLanguage; UpdateImageSizeOption(); ToggleTagsColor(); }
private void ResolveFileUrl() { if (resolveQueue.Count > 0) { isResolverRunning = true; var post = resolveQueue.Dequeue(); UpdateLog("SankakuComplexParser", "Trying to resolve: " + post.Referer); ExtendedWebClient _clientPost = new ExtendedWebClient(); _clientPost.DownloadStringAsync(new Uri(post.Referer), post); _clientPost.DownloadStringCompleted += new DownloadStringCompletedEventHandler(_clientPost_DownloadStringCompleted); } else { isResolverRunning = false; } }
/// <summary> /// Get File Url for batch job /// </summary> /// <param name="_clientPost"></param> /// <param name="post"></param> private void ResolveFileUrlBatch(ExtendedWebClient _clientPost, DanbooruPost post) { if (!string.IsNullOrWhiteSpace(post.Referer)) { UpdateLog("DoBatchJob", "Getting file_url from " + post.Referer); int currRetry = 0; int maxRetry = Convert.ToInt32(txtRetry.Text); int delay = Convert.ToInt32(txtDelay.Text); while (currRetry < maxRetry) { try { string html = _clientPost.DownloadString(post.Referer); _clientPost.Timeout = Convert.ToInt32(txtTimeout.Text); if (post.Provider.BoardType == BoardType.Danbooru) { post = SankakuComplexParser.ParsePost(post, html, !chkUseGlobalProviderTags.Checked); //post.FileUrl = tempPost.FileUrl; //post.PreviewUrl = tempPost.PreviewUrl; } else if (post.Provider.BoardType == BoardType.Gelbooru) { var tempPost = GelbooruHtmlParser.ParsePost(post, html); post.FileUrl = tempPost.FileUrl; post.PreviewUrl = tempPost.PreviewUrl; } else { UpdateLog("DoBatchJob", "No HTML Parser available for : " + post.Provider.Name + "(" + post.Provider.BoardType.ToString() + ")"); } break; } catch (Exception ex) { if (currRetry >= maxRetry) { UpdateLog("DoBatchJob", "Giving Up Resolving FileUrl: " + ex.StackTrace, ex); post.FileUrl = ""; post.JpegUrl = ""; post.SampleUrl = ""; break; } ++currRetry; UpdateLog("DoBatchJob", "Error Resolving FileUrl (" + currRetry + " of " + maxRetry + "): " + ex.Message + " Wait for " + delay + "s.", ex); for (int wait = 0; wait < delay; ++wait) { //UpdateLog("DoBatchJob", "Wait for " + wait + " of " + delay); Thread.Sleep(1000); } UpdateLog("DoBatchJob", "Retrying..."); } } } }