/// <summary> /// Get Search Param from Batch Job /// </summary> /// <param name="job"></param> /// <returns></returns> public DanbooruSearchParam GetSearchParamsFromJob(DanbooruBatchJob job, int currPage) { var option = new DanbooruPostDaoOption() { BlacklistedTags = TagBlacklist, BlacklistedTagsRegex = TagBlacklistRegex, BlacklistedTagsUseRegex = chkBlacklistTagsUseRegex.Checked, IgnoredTags = TagIgnore, IgnoredTagsRegex = TagIgnoreRegex, IgnoredTagsUseRegex = chkIgnoreTagsUseRegex.Checked, Provider = _currProvider, SearchTags = !String.IsNullOrWhiteSpace(job.TagQuery) ? job.TagQuery : "", IsBlacklistOnlyForGeneral = chkBlacklistOnlyGeneral.Checked }; DanbooruSearchParam searchParam = new DanbooruSearchParam(); searchParam.Provider = option.Provider; searchParam.Tag = option.SearchTags; searchParam.Source = ""; // check if given limit is more than the hard limit if (job.Limit > job.Provider.HardLimit) searchParam.Limit = job.Provider.HardLimit; else searchParam.Limit = job.Limit; // reflect to current page searchParam.Page = job.StartPage + currPage; searchParam.IsNotRating = false; searchParam.Rating = job.Rating; searchParam.OrderBy = ""; searchParam.Option = option; return searchParam; }
/// <summary> /// Get image list, return null if failed /// </summary> /// <param name="url"></param> /// <param name="searchParam"></param> /// <param name="job"></param> /// <returns></returns> private DanbooruPostDao GetBatchImageList(String url, String query, DanbooruBatchJob job) { DanbooruPostDao d = null; int currRetry = 0; int maxRetry = Convert.ToInt32(txtRetry.Text); int delay = Convert.ToInt32(txtDelay.Text); while (currRetry < maxRetry) { try { var strs = _clientBatch.DownloadString(url); using (MemoryStream ms = new MemoryStream(_clientBatch.DownloadData(url))) { var option = new DanbooruPostDaoOption() { Provider = job.Provider, Query = query, SearchTags = job.TagQuery, Referer = url, BlacklistedTags = TagBlacklist, BlacklistedTagsRegex = TagBlacklistRegex, BlacklistedTagsUseRegex = chkBlacklistTagsUseRegex.Checked, IgnoredTags = TagIgnore, IgnoredTagsRegex = TagIgnoreRegex, IgnoredTagsUseRegex = chkIgnoreTagsUseRegex.Checked, IsBlacklistOnlyForGeneral = chkBlacklistOnlyGeneral.Checked }; d = new DanbooruPostDao(ms, option); } break; } catch (System.Net.WebException ex) { ++currRetry; UpdateLog("DoBatchJob", "Error Getting List (" + currRetry + " of " + maxRetry + "): " + ex.Message + " Wait for " + delay + "s.", ex); for (int wait = 0; wait < delay; ++wait) { //UpdateLog("DoBatchJob", "Wait for " + wait + " of " + delay); Thread.Sleep(1000); } UpdateLog("DoBatchJob", "Retrying..."); } } return d; }
private void btnOK_Click(object sender, EventArgs e) { bool providerFlag = false; this.DialogResult = DialogResult.OK; Jobs = new List<DanbooruBatchJob>(); foreach (CheckBox c in chkList) { if (c.Checked) { var p = providerList.Where(x => x.Name == c.Text).FirstOrDefault(); if (p != null) { providerFlag = true; DanbooruBatchJob Job = new DanbooruBatchJob(); Job.Provider = p; try { if (!string.IsNullOrWhiteSpace(txtLimit.Text)) Job.Limit = Convert.ToInt32(txtLimit.Text); } catch (Exception ex) { MessageBox.Show("Error at Limit." + Environment.NewLine + ex.Message); txtLimit.Focus(); txtLimit.SelectAll(); return; } try { if (!string.IsNullOrWhiteSpace(txtPage.Text)) Job.StartPage = Convert.ToInt32(txtPage.Text); else Job.StartPage = -1; } catch (Exception ex) { MessageBox.Show("Error at StartPage." + Environment.NewLine + ex.Message); txtPage.Focus(); txtPage.SelectAll(); return; } if (cbxRating.SelectedValue != null && chkNotRating.Checked) Job.Rating = "-" + cbxRating.SelectedValue; else Job.Rating = (string)cbxRating.SelectedValue; // do encoding later on main form. Job.TagQuery = txtTagQuery.Text; if (string.IsNullOrWhiteSpace(txtFilenameFormat.Text)) { MessageBox.Show("Filename Format is empty!"); txtFilenameFormat.Focus(); return; } Job.SaveFolder = txtFilenameFormat.Text; Jobs.Add(Job); } } } if (!providerFlag) { MessageBox.Show("Please select at least 1 provider."); pnlProvider.Focus(); this.DialogResult = DialogResult.None; this.Jobs = null; } }
private int DoDownloadBatch(string targetUrl, DanbooruBatchJob job, DanbooruPost post, string filename) { UpdateLog("DoBatchJob", "Download: " + targetUrl); _clientBatch.Referer = post.Referer; int currRetry = 0; int maxRetry = Convert.ToInt32(txtRetry.Text); int delay = Convert.ToInt32(txtDelay.Text); while (currRetry <= maxRetry) { try { var filename2 = filename + ".!tmp"; #if DEBUG UpdateLog("DoBatchJob", "DEBUG Saved To: " + filename); Thread.Sleep(100); #else if (File.Exists(filename2)) { UpdateLog("DoBatchJob", "Deleting temporary file: " + filename2); File.Delete(filename2); } _clientBatch.DownloadFile(targetUrl, filename2); File.Move(filename2, filename); UpdateLog("DoBatchJob", "Saved To: " + filename); #endif ++job.Downloaded; if (Properties.Settings.Default.WriteDownloadedFile) { // write to text file for downloaded file. Helper.WriteTextFile(filename + Environment.NewLine); } return 1; } catch (System.Net.WebException ex) { if (currRetry < maxRetry && cbxAbortOnError.Checked) throw; else { var message = ex.Message; if (ex.InnerException != null) message = ex.InnerException.Message; UpdateLog("DoBatchJob", "Error Download Batch Image (" + currRetry + " of " + maxRetry + "): " + message + " Wait for " + delay + "s.", ex); for (int wait = 0; wait < delay; wait++) { //UpdateLog("DoBatchJob", "Wait for " + wait + " of " + delay); Thread.Sleep(1000); } UpdateLog("DoBatchJob", "Retrying..."); } ++currRetry; if (currRetry > Convert.ToInt32(txtRetry.Text)) ++job.Error; } } return 0; // failed }