public void TestShimmie2Parser() { var shimmie2Provider = DanbooruProviderDao.GetInstance().Read(sourceProvider).Where<DanbooruProvider>(x => x.BoardType == BoardType.Shimmie2).First<DanbooruProvider>(); var xml = ""; using (StreamReader reader = new StreamReader(@"../../../DanbooruDownloader3.test/TestXml/shimmie2.xml")) { xml = reader.ReadToEnd(); } DanbooruPostDaoOption option = new DanbooruPostDaoOption() { Provider = shimmie2Provider, Query = "", SearchTags = "", BlacklistedTags = null, BlacklistedTagsRegex = new Regex("$^"), BlacklistedTagsUseRegex = false, IsBlacklistOnlyForGeneral = false, }; var list = ShimmieEngine.ParseRSS(xml, option); Assert.IsNotNull(list); Assert.IsTrue(list.Count == 9); }
public static bool CheckBlacklistedTag(DanbooruPost post, DanbooruPostDaoOption option) { if (option.BlacklistedTagsUseRegex) { if (option.IsBlacklistOnlyForGeneral) { return post.TagsEntity.Any(x => x.Type == DanbooruTagType.General && option.BlacklistedTagsRegex.IsMatch(x.Name)); } else return post.TagsEntity.Any(x => option.BlacklistedTagsRegex.IsMatch(x.Name)); } else { if (option.IsBlacklistOnlyForGeneral) { foreach (var tag in option.BlacklistedTags) { if (post.TagsEntity.Any(x => x.Type == DanbooruTagType.General && x.Name.Equals(tag.Name, StringComparison.InvariantCultureIgnoreCase))) return true; } } else { foreach (var tag in option.BlacklistedTags) { if (post.TagsEntity.Any(x => x.Name.Equals(tag.Name, StringComparison.InvariantCultureIgnoreCase))) return true; } } } return false; }
/// <summary> /// parse xml/json list file /// </summary> /// <param name="option"></param> public DanbooruPostDao(DanbooruPostDaoOption option) { this.Option = option; if (option.Url.ToLower().EndsWith(".xml")) ReadXML(option.Url); else ReadJSON(option.Url); }
/// <summary> /// Get search parameter from Main Tab and Option Panels /// </summary> /// <returns></returns> public DanbooruSearchParam GetSearchParams() { var option = new DanbooruPostDaoOption() { BlacklistedTags = TagBlacklist, BlacklistedTagsRegex = TagBlacklistRegex, BlacklistedTagsUseRegex = chkBlacklistTagsUseRegex.Checked, IgnoredTags = TagIgnore, IgnoredTagsRegex = TagIgnoreRegex, IgnoredTagsUseRegex = chkIgnoreTagsUseRegex.Checked, Provider = _currProvider, SearchTags = !String.IsNullOrWhiteSpace(txtTags.Text) ? txtTags.Text : "", IsBlacklistOnlyForGeneral = chkBlacklistOnlyGeneral.Checked }; DanbooruSearchParam searchParam = new DanbooruSearchParam(); searchParam.Provider = option.Provider; searchParam.Tag = option.SearchTags; searchParam.Source = txtSource.Text.Trim(); int limit = 0; if (Int32.TryParse(txtLimit.Text, out limit) && limit > 0) searchParam.Limit = limit; else searchParam.Limit = null; int page = _currProvider.BoardType == BoardType.Gelbooru ? 0 : 1; if (Int32.TryParse(txtPage.Text, out page) && page > 0) searchParam.Page = page; else searchParam.Page = null; searchParam.IsNotRating = chkNotRating.Checked; if (cbxRating.SelectedValue != null) { if (cbxRating.SelectedValue.GetType() == typeof(string)) searchParam.Rating = cbxRating.SelectedValue.ToString(); else { var rating = (KeyValuePair<string, string>)cbxRating.SelectedValue; searchParam.Rating = rating.Value; } } if (cbxOrder.SelectedValue != null) { if (cbxOrder.SelectedValue.GetType() == typeof(string)) searchParam.OrderBy = cbxOrder.SelectedValue.ToString(); else { var order = (KeyValuePair<string, string>)cbxOrder.SelectedValue; searchParam.OrderBy = order.Value; } } searchParam.Option = option; return searchParam; }
/// <summary> /// parse xml/json list stream and close it. /// </summary> /// <param name="input"></param> /// <param name="option"></param> public DanbooruPostDao(Stream input, DanbooruPostDaoOption option) { string rawData = ""; this.Option = option; try { using (StreamReader reader = new StreamReader(input)) { rawData = reader.ReadToEnd(); } this.Option = option; switch (option.Provider.Preferred) { case PreferredMethod.Xml: ReadXML(rawData, option); break; case PreferredMethod.Json: ReadJSON(rawData, option); break; case PreferredMethod.Html: DanbooruSearchParam param = new DanbooruSearchParam() { Provider = option.Provider, Tag = option.SearchTags, Option = option }; if (option.Provider.BoardType == BoardType.Danbooru) { SankakuComplexParser parser = new SankakuComplexParser(); posts = parser.Parse(rawData, param); } else if (option.Provider.BoardType == BoardType.Gelbooru) { GelbooruHtmlParser parser = new GelbooruHtmlParser(); posts = parser.Parse(rawData, param); } else { throw new NotImplementedException("No HTML Parser for: " + option.Provider.Name); } break; } } catch (Exception) { Helper.DumpRawData(rawData, Option.Provider, option.Query); throw; } }
public static BindingList<DanbooruPost> ParseRSS(string xmldoc, DanbooruPostDaoOption option) { BindingList<DanbooruPost> posts = new BindingList<DanbooruPost>(); try { ReadRssMethod1(option, posts, xmldoc); } catch (Exception ex) { Program.Logger.Error("Using method2", ex); ReadRssMethod2(option, posts, xmldoc); } return posts; }
private static void PostProcess(DanbooruPostDaoOption option, DanbooruPost post) { post.TagsEntity = Helper.ParseTags(post.Tags, option.Provider); if (option.BlacklistedTagsUseRegex) { post.Hidden = IsBlacklisted(post, option.BlacklistedTagsRegex); } else { post.Hidden = IsBlacklisted(post, option.BlacklistedTags); } post.Query = option.Query; post.SearchTags = option.SearchTags; post.Provider = option.Provider; }
private static void ReadRssMethod2(DanbooruPostDaoOption option, BindingList<DanbooruPost> posts, string xmldoc) { using (StringReader strReader = new StringReader(xmldoc)) { using (XmlReader reader = new XmlTextReader(strReader)) { XDocument doc = XDocument.Load(reader); var feeds = doc.Descendants("item"); XNamespace dc = "http://purl.org/dc/elements/1.1/"; foreach (var item in feeds) { DanbooruPost post = new DanbooruPost(); var titleData = item.Element("title").Value.Split(new char[] { '-' }, 2); post.Id = titleData[0].Trim(); post.Tags = titleData[1].Trim(); post.Referer = AppendHttp(item.Element("link").Value, option.Provider); post.CreatedAt = item.Element("pubDate").Value; post.CreatorId = item.Element(dc + "creator").Value; post.FileUrl = item.Element("enclosure").Attribute("url").Value; try { HtmlAgilityPack.HtmlDocument description = new HtmlAgilityPack.HtmlDocument(); description.LoadHtml(HttpUtility.HtmlDecode(item.Element("description").Value)); //post.Width = Convert.ToInt32(matches.Groups[1].Value); //post.Height = Convert.ToInt32(matches.Groups[2].Value); var img = description.DocumentNode.SelectSingleNode("//img"); post.PreviewWidth = Convert.ToInt32(img.Attributes["width"].Value); post.PreviewHeight = Convert.ToInt32(img.Attributes["height"].Value); post.PreviewUrl = img.Attributes["src"].Value; } catch (Exception) { } PostProcess(option, post); posts.Add(post); } } } }
private void ReadXML(string rawData, DanbooruPostDaoOption option) { posts = new BindingList<DanbooruPost>(); actualCount = 0; // Issue #60 // modify xml to insert html entity rawData = Regex.Replace(rawData, @"(<\?xml.*\?>)", "$1<!DOCTYPE document SYSTEM \"xhtml.ent\">"); ProcessXML(rawData); }
/// <summary> /// Get Search Param from Batch Job /// </summary> /// <param name="job"></param> /// <returns></returns> public DanbooruSearchParam GetSearchParamsFromJob(DanbooruBatchJob job, int currPage) { var option = new DanbooruPostDaoOption() { BlacklistedTags = TagBlacklist, BlacklistedTagsRegex = TagBlacklistRegex, BlacklistedTagsUseRegex = chkBlacklistTagsUseRegex.Checked, IgnoredTags = TagIgnore, IgnoredTagsRegex = TagIgnoreRegex, IgnoredTagsUseRegex = chkIgnoreTagsUseRegex.Checked, Provider = _currProvider, SearchTags = !String.IsNullOrWhiteSpace(job.TagQuery) ? job.TagQuery : "", IsBlacklistOnlyForGeneral = chkBlacklistOnlyGeneral.Checked }; DanbooruSearchParam searchParam = new DanbooruSearchParam(); searchParam.Provider = option.Provider; searchParam.Tag = option.SearchTags; searchParam.Source = ""; // check if given limit is more than the hard limit if (job.Limit > job.Provider.HardLimit) searchParam.Limit = job.Provider.HardLimit; else searchParam.Limit = job.Limit; // reflect to current page searchParam.Page = job.StartPage + currPage; searchParam.IsNotRating = false; searchParam.Rating = job.Rating; searchParam.OrderBy = ""; searchParam.Option = option; return searchParam; }
public void ReadJSON(string rawData, DanbooruPostDaoOption option) { posts = new BindingList<DanbooruPost>(); actualCount = 0; using (StringReader reader = new StringReader(rawData)) { ProcessJson(rawData); //RawData = json; RawData = rawData; } }
private static void ReadRssMethod1(DanbooruPostDaoOption option, BindingList<DanbooruPost> posts, string xmldoc) { using (StringReader strReader = new StringReader(xmldoc)) { using (XmlReader reader = new XmlTextReader(strReader)) { XDocument doc = XDocument.Load(reader); string media = doc.Root.Attribute("{http://www.w3.org/2000/xmlns/}media").Value; foreach (var item in doc.Descendants("item")) { DanbooruPost post = new DanbooruPost(); var titleData = item.Element("title").Value.Split(new char[] { '-' }, 2); post.Id = titleData[0].Trim(); post.Tags = titleData[1].Trim(); post.Referer = AppendHttp(item.Element("link").Value, option.Provider); post.CreatedAt = item.Element("pubDate").Value; var data = item.Element("{" + media + "}thumbnail"); post.PreviewUrl = AppendHttp(data.Attribute("url").Value, option.Provider); data = item.Element("{" + media + "}content"); post.FileUrl = AppendHttp(data.Attribute("url").Value, option.Provider); try { string description = HttpUtility.HtmlDecode(item.Element("description").Value); Match matches = imageResolutionRegex.Match(description); post.Width = Convert.ToInt32(matches.Groups[1].Value); post.Height = Convert.ToInt32(matches.Groups[2].Value); post.PreviewWidth = Convert.ToInt32(matches.Groups[3].Value); post.PreviewHeight = Convert.ToInt32(matches.Groups[4].Value); post.CreatorId = matches.Groups[5].Value; } catch (Exception) { } PostProcess(option, post); posts.Add(post); } } } }
private void ReadXML(string rawData, DanbooruPostDaoOption option) { posts = new BindingList<DanbooruPost>(); actualCount = 0; ProcessXML(rawData); }
private void clientList_DownloadDataCompleted(object sender, DownloadDataCompletedEventArgs e) { try { Program.Logger.Debug("Download list completed"); tsProgressBar.Visible = false; MemoryStream ms = new MemoryStream(e.Result); var option = new DanbooruPostDaoOption() { Provider = _currProvider, //Url = txtListFile.Text, Referer = _clientList.Referer, Query = txtQuery.Text, SearchTags = txtTags.Text, BlacklistedTags = TagBlacklist, BlacklistedTagsRegex = TagBlacklistRegex, BlacklistedTagsUseRegex = chkBlacklistTagsUseRegex.Checked, IgnoredTags = TagIgnore, IgnoredTagsRegex = TagIgnoreRegex, IgnoredTagsUseRegex = chkIgnoreTagsUseRegex.Checked, IsBlacklistOnlyForGeneral = chkBlacklistOnlyGeneral.Checked }; tsStatus.Text = "Loading downloaded list..."; tsProgressBar.Style = ProgressBarStyle.Marquee; tsProgressBar.Visible = true; _isLoadingList = true; backgroundWorker1 = new BackgroundWorker(); backgroundWorker1.DoWork += new DoWorkEventHandler(backgroundWorker1_DoWork); backgroundWorker1.RunWorkerCompleted += new RunWorkerCompletedEventHandler(backgroundWorker1_RunWorkerCompleted); backgroundWorker1.RunWorkerAsync(new object[] { ms, option }); } catch (Exception ex) { var message = ex.Message; if (ex.InnerException != null) { message = ex.InnerException.Message; var wex = ex.InnerException as System.Net.WebException; if (wex != null && wex.Status == WebExceptionStatus.ProtocolError) { var status = wex.Response != null ? wex.Response.Headers["Status"] : null; var response = wex.Response as HttpWebResponse; if (status == "403" || (response != null && response.StatusCode == HttpStatusCode.Forbidden)) { message += Environment.NewLine + "Please check your login information."; } else { using (var responseStream = wex.Response.GetResponseStream()) { if (responseStream != null) { var option = new DanbooruPostDaoOption() { Provider = _currProvider, //Url = "", Referer = "", Query = "", SearchTags = "", BlacklistedTags = TagBlacklist, BlacklistedTagsRegex = TagBlacklistRegex, BlacklistedTagsUseRegex = chkBlacklistTagsUseRegex.Checked, IgnoredTags = TagIgnore, IgnoredTagsRegex = TagIgnoreRegex, IgnoredTagsUseRegex = chkIgnoreTagsUseRegex.Checked, IsBlacklistOnlyForGeneral = chkBlacklistOnlyGeneral.Checked }; var resp = new DanbooruPostDao(responseStream, option); message = "Server Message: " + resp.ResponseMessage; if (status != "200") { message += "\nStatus Code: " + wex.Status.ToString() + " (" + status + ")"; } } } } } } MessageBox.Show(message, "Download List"); Program.Logger.Error(message, ex); UpdateLog("clientList_DownloadDataCompleted", "Error: " + (ex.InnerException == null ? ex.Message : ex.InnerException.Message), ex); UpdateLog("clientList_DownloadDataCompleted", "Referer: " + _clientList.Referer); chkAutoLoadNext.Checked = false; btnGet.Enabled = true; btnListCancel.Enabled = false; _isLoadingList = false; } }
private void clientList_DownloadFileCompleted(object sender, AsyncCompletedEventArgs e) { tsProgressBar.Visible = false; UpdateLog("clientList_DownloadFileCompleted", "Download Complete: " + e.UserState); txtListFile.Text = saveFileDialog1.FileName; gbxSearch.Enabled = true; gbxList.Enabled = true; btnGet.Enabled = true; btnListCancel.Enabled = false; if (chkAutoLoadList.Checked) { var option = new DanbooruPostDaoOption() { Provider = _currProvider, Url = txtListFile.Text, Referer = _clientList.Referer, Query = txtListFile.Text.Split('\\').Last(), SearchTags = "", BlacklistedTags = TagBlacklist, BlacklistedTagsRegex = TagBlacklistRegex, BlacklistedTagsUseRegex = chkBlacklistTagsUseRegex.Checked, IgnoredTags = TagIgnore, IgnoredTagsRegex = TagIgnoreRegex, IgnoredTagsUseRegex = chkIgnoreTagsUseRegex.Checked, IsBlacklistOnlyForGeneral = chkBlacklistOnlyGeneral.Checked }; DanbooruPostDao newPosts = new DanbooruPostDao(option); LoadList(newPosts); } _isLoadingList = false; }
public void DoBatchJob(BindingList<DanbooruBatchJob> batchJob) { ToggleBatchJobButtonDelegate bjd = new ToggleBatchJobButtonDelegate(ToggleBatchJobButton); UpdateUiDelegate del = new UpdateUiDelegate(UpdateUi); UpdateUiDelegate2 del2 = new UpdateUiDelegate2(UpdateUi); ExtendedWebClient _clientPost = new ExtendedWebClient(); if (batchJob != null) { UpdateStatus2("Starting Batch Job"); for (int i = 0; i < batchJob.Count; i++) { batchJob[i].CurrentPage = 0; if (!batchJob[i].isCompleted) { UpdateLog("DoBatchJob", "Processing Batch Job#" + i); DanbooruPostDao prevDao = null; bool flag = true; int currPage = 0; int postCount = 0; do { // stop/pause event handling outside _pauseEvent.WaitOne(Timeout.Infinite); if (_shutdownEvent.WaitOne(0)) { batchJob[i].Status = " ==> Stopped."; // toggle button BeginInvoke(bjd, new object[] { true }); UpdateLog("DoBatchJob", "Batch Job Stopped."); UpdateStatus2("Batch Job Stopped."); return; } DanbooruPostDao d = null; int imgCount = 0; int skipCount = 0; string url; string query = ""; #region Construct the searchParam if (batchJob[i].Provider.BoardType == BoardType.Danbooru || batchJob[i].Provider.BoardType == BoardType.Shimmie2) { currPage = batchJob[i].CurrentPage; } else if (batchJob[i].Provider.BoardType == BoardType.Gelbooru) { if (batchJob[i].Provider.Preferred == PreferredMethod.Html) { currPage = batchJob[i].CurrentPage * postCount; } else { currPage = batchJob[i].CurrentPage; } } DanbooruSearchParam searchParam = GetSearchParamsFromJob(batchJob[i], currPage); url = batchJob[i].Provider.GetQueryUrl(searchParam); #endregion Construct the searchParam try { #region Get and load the image list batchJob[i].Status = "Getting list for page: " + searchParam.Page; BeginInvoke(del); UpdateLog("DoBatchJob", "Downloading list: " + url); d = GetBatchImageList(url, query, batchJob[i]); if (d == null) { // Cannot get list. UpdateLog("DoBatchJob", "Cannot load list"); batchJob[i].Status = "Cannot load list."; batchJob[i].isCompleted = false; batchJob[i].isError = true; flag = false; } else if (d.Posts == null || d.Posts.Count == 0) { // No more image UpdateLog("DoBatchJob", "No more image."); batchJob[i].Status = "No more image."; flag = false; //break; } else { if (prevDao != null) { // identical data returned, probably no more new image. if (prevDao.RawData != null && prevDao.RawData.Equals(d.RawData)) { UpdateLog("DoBatchJob", "Identical list, probably last page."); batchJob[i].Status = "Identical list, probably last page."; flag = false; //break; } } prevDao = d; batchJob[i].Total = d.PostCount; batchJob[i].CurrentPageTotal = d.Posts.Count; batchJob[i].CurrentPageOffset = d.Offset; #endregion Get and load the image list postCount = d.Posts.Count; foreach (DanbooruPost post in d.Posts) { // Update progress bar object[] myArray = new object[2]; myArray[0] = batchJob[i].ProcessedTotal; myArray[1] = d.PostCount < batchJob[i].Limit ? d.PostCount : batchJob[i].Limit; BeginInvoke(del2, myArray); // thread handling _pauseEvent.WaitOne(Timeout.Infinite); if (_shutdownEvent.WaitOne(0)) { batchJob[i].Status = " ==> Stopped."; // toggle button BeginInvoke(bjd, new object[] { true }); UpdateLog("DoBatchJob", "Batch Job Stopped."); UpdateStatus2("Batch Job Stopped."); return; } // check if have url and post is not deleted if (string.IsNullOrWhiteSpace(post.FileUrl) && (post.Status != "deleted" || chkProcessDeletedPost.Checked)) { ResolveFileUrlBatch(_clientPost, post); } //Choose the correct urls var targetUrl = post.FileUrl; if (_ImageSize == "Thumb" && !String.IsNullOrWhiteSpace(post.PreviewUrl)) { targetUrl = post.PreviewUrl; } else if (_ImageSize == "Jpeg" && !String.IsNullOrWhiteSpace(post.JpegUrl)) { targetUrl = post.JpegUrl; } else if (_ImageSize == "Sample" && !String.IsNullOrWhiteSpace(post.SampleUrl)) { targetUrl = post.SampleUrl; } batchJob[i].Status = "Downloading: " + targetUrl; BeginInvoke(del); //if (post.Provider == null) post.Provider = cbxProvider.Text; //if (post.Query == null) post.Query = txtQuery.Text; //if (post.SearchTags == null) post.SearchTags = txtTags.Text; bool download = true; // check if blacklisted if (download && post.Hidden) { ++skipCount; ++batchJob[i].Skipped; download = false; UpdateLog("DoBatchJob", "Download skipped, contains blacklisted tag: " + post.Tags + " Url: " + targetUrl); } string filename = ""; if (download && !string.IsNullOrWhiteSpace(targetUrl)) { var format = new DanbooruFilenameFormat() { FilenameFormat = batchJob[i].SaveFolder, Limit = Convert.ToInt32(txtFilenameLength.Text), BaseFolder = txtSaveFolder.Text, MissingTagReplacement = txtTagReplacement.Text, ArtistGroupLimit = Convert.ToInt32(txtArtistTagGrouping.Text), CharacterGroupLimit = Convert.ToInt32(txtCharaTagGrouping.Text), CopyrightGroupLimit = Convert.ToInt32(txtCopyTagGrouping.Text), CircleGroupLimit = Convert.ToInt32(txtCircleTagGrouping.Text), FaultsGroupLimit = Convert.ToInt32(txtFaultsTagGrouping.Text), IgnoredTags = DanbooruTagsDao.Instance.ParseTagsString(txtIgnoredTags.Text.Replace(Environment.NewLine, " ")), IgnoredTagsRegex = txtIgnoredTags.Text.Trim().Replace(Environment.NewLine, "|"), IgnoreTagsUseRegex = chkIgnoreTagsUseRegex.Checked, IsReplaceMode = chkReplaceMode.Checked, IgnoredTagsOnlyForGeneral = chkIgnoreForGeneralTag.Checked, TagReplaceUnderscoreToSpace = chkIsReplaceUnderscoreTag.Checked }; string extension = Helper.getFileExtensions(targetUrl); filename = Helper.MakeFilename(format, post) + extension; } // check if exist if (download && !chkOverwrite.Checked) { if (File.Exists(filename)) { ++skipCount; ++batchJob[i].Skipped; download = false; UpdateLog("DoBatchJob", "Download skipped, file exists: " + filename); } } if (download && String.IsNullOrWhiteSpace(targetUrl)) { ++skipCount; ++batchJob[i].Skipped; download = false; UpdateLog("DoBatchJob", "Download skipped, ID: " + post.Id + " No file_url, probably deleted"); } Uri uri = null; if (download && !Uri.TryCreate(targetUrl, UriKind.RelativeOrAbsolute, out uri)) { ++skipCount; ++batchJob[i].Skipped; download = false; UpdateLog("DoBatchJob", "Download skipped, ID: " + post.Id + " Invalid URL: " + targetUrl); } #region download if (download) { imgCount = DoDownloadBatch(targetUrl, batchJob[i], post, filename); } #endregion download // check if more than available post if (batchJob[i].ProcessedTotal >= d.PostCount && d.PostCount != 0) { UpdateLog("DoBatchJob", "No more post."); flag = false; break; } // check if over given limit if (batchJob[i].ProcessedTotal >= batchJob[i].Limit) { UpdateLog("DoBatchJob", "Limit Reached."); flag = false; break; } // check batch job delay int delay = 0; Int32.TryParse(Properties.Settings.Default.BatchJobDelay, out delay); if ((Properties.Settings.Default.DelayIncludeSkipped || download) && delay > 0) { UpdateLog("DoBatchJob", String.Format("Waiting for {0}ms for the next post.", delay)); Thread.Sleep(delay); } } } batchJob[i].Status = " ==> Done."; } catch (Exception ex) { string message = ex.Message; string responseMessage = ""; if (ex.InnerException != null) { message += Environment.NewLine + "Inner: " + ex.InnerException.Message; } message += Environment.NewLine + "Stack Trace: " + Environment.NewLine + ex.StackTrace; message += Environment.NewLine + "Query: " + batchJob[i].TagQuery; batchJob[i].isError = true; batchJob[i].isCompleted = false; if (ex.GetType() == typeof(System.Net.WebException)) { System.Net.WebException wex = (System.Net.WebException)ex; if (wex.Status == WebExceptionStatus.ProtocolError && wex.Response.Headers.AllKeys.Contains("Status") && wex.Response.Headers["Status"].ToString() == "500") { using (var response = wex.Response.GetResponseStream()) { if (response != null) { var option = new DanbooruPostDaoOption() { Provider = _currProvider, Query = query, SearchTags = batchJob[i].TagQuery, Url = url, Referer = "", BlacklistedTags = TagBlacklist, BlacklistedTagsRegex = TagBlacklistRegex, BlacklistedTagsUseRegex = chkBlacklistTagsUseRegex.Checked, IgnoredTags = TagIgnore, IgnoredTagsRegex = TagIgnoreRegex, IgnoredTagsUseRegex = chkIgnoreTagsUseRegex.Checked, IsBlacklistOnlyForGeneral = chkBlacklistOnlyGeneral.Checked }; var resp = new DanbooruPostDao(response, option); responseMessage = resp.ResponseMessage; flag = false; } } } } if (ex.Message.Contains("(400)") || ex.Message.Contains("(403)") || ex.Message.Contains("(500)") || ex.Message.Contains("resolved")) { flag = false; } batchJob[i].Status = " ==> Error: " + (string.IsNullOrWhiteSpace(responseMessage) ? ex.Message : responseMessage) + Environment.NewLine; if (!string.IsNullOrWhiteSpace(responseMessage)) UpdateLog("DoBatchJob", "Server Message: " + responseMessage, ex); else UpdateLog("DoBatchJob", "Error: " + message, ex); if (cbxAbortOnError.Checked) { MessageBox.Show(message, "Batch Download"); break; } } finally { BeginInvoke(del); { // Update progress bar object[] myArray = new object[2]; myArray[0] = batchJob[i].ProcessedTotal; if (d != null) { myArray[1] = d.PostCount < batchJob[i].Limit ? d.PostCount : batchJob[i].Limit; } else { myArray[1] = -1; } BeginInvoke(del2, myArray); } } ++batchJob[i].CurrentPage; } while (flag); UpdateLog("DoBatchJob", "Batch Job #" + i + ": Done"); if (batchJob[i].isError) { batchJob[i].isCompleted = false; } else { batchJob[i].isCompleted = true; } BeginInvoke(del); } } } BeginInvoke(bjd, new object[] { true }); UpdateStatus2("Batch Job Completed!", true); { // hide progress bar object[] myArray = new object[2]; myArray[0] = -1; myArray[1] = -1; BeginInvoke(del2, myArray); } }
private void btnList_Click(object sender, EventArgs e) { if (txtListFile.Text.Length > 0) { var option = new DanbooruPostDaoOption() { Provider = _currProvider, Url = txtListFile.Text, Referer = _currProvider.Url, Query = txtListFile.Text.Split('\\').Last(), SearchTags = "", BlacklistedTags = TagBlacklist, BlacklistedTagsRegex = TagBlacklistRegex, BlacklistedTagsUseRegex = chkBlacklistTagsUseRegex.Checked, IgnoredTags = TagIgnore, IgnoredTagsRegex = TagIgnoreRegex, IgnoredTagsUseRegex = chkIgnoreTagsUseRegex.Checked }; DanbooruPostDao newPosts = new DanbooruPostDao(option); LoadList(newPosts); } }
/// <summary> /// Get image list, return null if failed /// </summary> /// <param name="url"></param> /// <param name="searchParam"></param> /// <param name="job"></param> /// <returns></returns> private DanbooruPostDao GetBatchImageList(String url, String query, DanbooruBatchJob job) { DanbooruPostDao d = null; int currRetry = 0; int maxRetry = Convert.ToInt32(txtRetry.Text); int delay = Convert.ToInt32(txtDelay.Text); while (currRetry < maxRetry) { try { var strs = _clientBatch.DownloadString(url); using (MemoryStream ms = new MemoryStream(_clientBatch.DownloadData(url))) { var option = new DanbooruPostDaoOption() { Provider = job.Provider, Query = query, SearchTags = job.TagQuery, Referer = url, BlacklistedTags = TagBlacklist, BlacklistedTagsRegex = TagBlacklistRegex, BlacklistedTagsUseRegex = chkBlacklistTagsUseRegex.Checked, IgnoredTags = TagIgnore, IgnoredTagsRegex = TagIgnoreRegex, IgnoredTagsUseRegex = chkIgnoreTagsUseRegex.Checked, IsBlacklistOnlyForGeneral = chkBlacklistOnlyGeneral.Checked }; d = new DanbooruPostDao(ms, option); } break; } catch (System.Net.WebException ex) { ++currRetry; UpdateLog("DoBatchJob", "Error Getting List (" + currRetry + " of " + maxRetry + "): " + ex.Message + " Wait for " + delay + "s.", ex); for (int wait = 0; wait < delay; ++wait) { //UpdateLog("DoBatchJob", "Wait for " + wait + " of " + delay); Thread.Sleep(1000); } UpdateLog("DoBatchJob", "Retrying..."); } } return d; }