public override string ToString() { var builder = new StringBuilder(); builder.AppendLine("Processed " + ProcessedSamplesCount + " samples"); var listsSizes = ProcessingResults.ToArray(); var interestingSamples = InterestingSamples.ToDictionary(x => x.Key, x => x.Value.ToArray()); foreach (var listSize in listsSizes) { builder.AppendLine("Frequency decoding list size " + listSize.Key.Item1 + ", time decoding list size " + listSize.Key.Item2 + ", " + listSize.Value + " samples"); Tuple <FieldElement, FieldElement>[][] collectedSamples; if (interestingSamples.TryGetValue(listSize.Key, out collectedSamples)) { builder.AppendLine("\tInteresting samples were collected:"); foreach (var collectedSample in collectedSamples) { builder.AppendLine("\t\t[" + string.Join(",", collectedSample.Select(x => new Tuple <FieldElement, FieldElement>(x.Item1, x.Item2))) + "]"); //??? } } } return(builder.ToString()); }
public override string ToString() { var builder = new StringBuilder(); builder.AppendLine($"Processed {ProcessedSamplesCount} samples"); var listsSizes = ProcessingResults.ToArray(); var interestingSamples = InterestingSamples.ToDictionary(x => x.Key, x => x.Value.ToArray()); foreach (var listSize in listsSizes) { builder.AppendLine($"Frequency decoding list size {listSize.Key.Item1}, time decoding list size {listSize.Key.Item2}, {listSize.Value} samples"); Tuple <FieldElement, FieldElement>[][] collectedSamples; if (interestingSamples.TryGetValue(listSize.Key, out collectedSamples)) { builder.AppendLine("\tInteresting samples were collected:"); foreach (var collectedSample in collectedSamples) { builder.AppendLine("\t\t[" + string.Join(",", collectedSample.Select(x => $"({x.Item1},{x.Item2})")) + "]"); } } } return(builder.ToString().TrimEnd('\n')); }
public static Order GenerateNewOrder(ProcessingResults results, long userId, long bidId, string gifts, long supplierId, decimal totalPrice, Source source) { // var messageId = BIdMessageController.AddNewMessage(bidId, supplierId, 0, BIdMessageController.ADMIN_STAGE); var messageId = BIdMessageController.AddNewMessage(bidId, supplierId); var order = new Order { AppUserId = userId, BidId = bidId, CreateDate = DateTime.UtcNow, Last4Digits = results.Last4Digits, UserPaySupplierStatus = UserPaymentStatus.NotPayed, TotalPrice = totalPrice, Transaction = results.CardToken, ExpiryDate = results.CardExpiration, AuthNumber = results.AuthNumber, Gifts = gifts, SpecialInstructions = results.SpecialInstructions ?? "", NumOfPayments = results.NumOfPayments, Source = (int)source, }; order.Save(); AppUserCard paymentToken = AppUserCard.FetchByAppUserId(userId); if (paymentToken == null) { paymentToken = new AppUserCard(); } paymentToken.AppUserId = userId; paymentToken.CardToken = results.CardToken; paymentToken.ExpiryDate = results.CardExpiration; paymentToken.Last4Digit = results.Last4Digits; if (!String.IsNullOrEmpty(results.PersonalId)) { paymentToken.IdNumber = results.PersonalId; } paymentToken.Save(); AppSupplier supplier = AppSupplier.FetchByID(supplierId); if (supplier != null) { supplier.MaxWinningsNum = (supplier.MaxWinningsNum > 0 ? supplier.MaxWinningsNum - 1 : 0); if (supplier.MaxWinningsNum == 0) { // SupplierNotification.SendNotificationMaxAutoModeMessage(supplier.SupplierId); } supplier.Save(); } SMSController.sendNewBidSMS(AppUser.FetchByID(userId).Phone); return(order); }
public async Task Run() { DateTime startTime, endTime; int runTimeSeconds; startTime = DateTime.Now; // Hope it works, what are we gonna do, log it? await this._logger.WriteLog(new LogEvent("DocEngine has started!", LogLevels.Info)); // TODO: Get path from Config string path = @"F:\Work\Test"; List <JobInfo> jobs = GetJobList(path); await this._logger.WriteLog(new LogEvent( $"There are {jobs.Count} in the directory: \"{path}\" that are ready to be processed.", LogLevels.Info)); if (jobs.Count > 0) { await this._logger.WriteLog(new LogEvent($"Processing of {jobs.Count} files initiated.", LogLevels.Info)); ProcessingResults results = await ProcessFiles(jobs); if (results.SuccessCount > 0) { await this._logger.WriteLog(new LogEvent( $"{results.SuccessCount} documents successfully processed.\n\nList of jobs:\n" + $"{results.SuccessJobs}", LogLevels.Info)); } if (results.FailCount > 0) { await this._logger.WriteLog(new LogEvent( $"{results.FailCount} documents failed to process.\n\nListof jobs:\n" + $"{results.FailJobs}", LogLevels.Warning)); } } endTime = DateTime.Now; runTimeSeconds = (int)(endTime - startTime).TotalSeconds; await this._logger.WriteLog(new LogEvent( $"DocEngine has finished! Total elapsed time: {runTimeSeconds} seconds", LogLevels.Info)); }
/// <summary> /// Метод для регистрации результата декодирования /// </summary> /// <param name="decodedCodeword">Декодируемое кодовое слово</param> /// <param name="frequencyDecodingListSize">Размер списка при декодировании в частотной области</param> /// <param name="timeDecodingListSize">Размер списка при декодировании во временной области</param> public void ReportDecodingListsSizes(Tuple <FieldElement, FieldElement>[] decodedCodeword, int frequencyDecodingListSize, int timeDecodingListSize) { var listsSizes = Tuple.Create(frequencyDecodingListSize, timeDecodingListSize); ProcessingResults.AddOrUpdate(listsSizes, 1, (key, value) => value + 1); if (timeDecodingListSize > 1) { var clonnedCodeword = decodedCodeword.Select(x => new Tuple <FieldElement, FieldElement>(x.Item1, new FieldElement(x.Item2))).ToArray(); InterestingSamples.AddOrUpdate(listsSizes, new List <Tuple <FieldElement, FieldElement>[]> { clonnedCodeword }, (key, value) => { value.Add(clonnedCodeword); return(value); }); } Interlocked.Increment(ref _processedSamplesCount); }
public async Task <ProcessingResults> ProcessSubmissionsList(List <string> subs, bool needDescription) { Console.WriteLine("Processing submissions list..."); ProcessingResults res = new ProcessingResults(); // iterate over all the submissions in list for (int i = subs.Count - 1; i >= 0; i--) { string subId = subs[i]; // don't care about empty strings if (string.IsNullOrEmpty(subId)) { continue; } Console.WriteLine("> Processing submission #" + subId); // check if in DB already try { if (SubmissionsDB.DB.Exists(uint.Parse(subId)) && GlobalSettings.Settings.downloadOnlyOnce) { Console.WriteLine("Skipped (present in DB)"); continue; } } catch { Console.WriteLine("Unexpected error (DB presence check failed)!"); continue; } string subUrl = "https://www.furaffinity.net/view/" + subId; // get submission page int attempts = 3; string cpage = ""; beforeawait: try { Console.WriteLine("Getting page: " + subUrl); cpage = await http.GetStringAsync(subUrl); } catch (Exception E) { Console.WriteLine("GET request error (" + subUrl + "): " + E.Message); attempts--; System.Threading.Thread.Sleep(2000); if (attempts > 0) { goto beforeawait; } else { Console.WriteLine("Giving up on #" + subId); res.failedToGetPage.Add(subId); continue; } } // process submission page string downbtnkey = "<a href=\"//d.facdn.net/"; SubmissionProps sp = new SubmissionProps(); sp.SUBMID = subId; int keypos = cpage.IndexOf(downbtnkey, StringComparison.Ordinal); if (keypos < 0) { Console.WriteLine("[Warning] got page, but it doesn't contain any download links."); res.failedToGetPage.Add(subId); continue; } cpage = cpage.Substring(keypos); cpage = cpage.Substring(cpage.IndexOf("/", StringComparison.Ordinal)); sp.URL = "https:" + cpage.Substring(0, cpage.IndexOf("\"", StringComparison.Ordinal)); // processing submission description; also extracts submission date and title { Utils.FillPropertiesFromDateTime(DateTime.Now, sp); // set Now as a fallback date sp.TITLE = "Unknown"; // fallback title // title const string key_title = @"<div class=""submission-title"">"; const string key_enddiv = "</div>"; cpage = cpage.Substring(cpage.IndexOf(key_title, StringComparison.Ordinal)); string sub_title_div = cpage.Substring(0, cpage.IndexOf(key_enddiv, cpage.IndexOf(key_enddiv, StringComparison.Ordinal) + 1, StringComparison.Ordinal) + key_enddiv.Length); var titleMatch = Regex.Match(sub_title_div, "<h2><p>(.+?)</p></h2>", RegexOptions.CultureInvariant); if (titleMatch.Success) { sp.TITLE = Utils.StripIllegalFilenameChars(titleMatch.Groups[1].Value); Console.WriteLine("Title: " + sp.TITLE); } else { Console.WriteLine("Warning :: no submission title found!"); } // replace relative date with the absolute one string sub_date_strong = ""; var dateMatch = Regex.Match(cpage, "<strong.+?title=\"(.+?)\" class=\"popup_date\">(.+?)<.+?</strong>", RegexOptions.CultureInvariant); if (dateMatch.Success) { string dateMatchVal = dateMatch.Value; string dateTimeStr = dateMatch.Groups[1].Value; // fixed format date string dateTimeStrFuzzy = dateMatch.Groups[2].Value; // depending on user settings, fuzzy and fixed times may be swapped if (dateTimeStrFuzzy.Contains(" PM") || dateTimeStrFuzzy.Contains(" AM")) { var temporary = dateTimeStr; dateTimeStr = dateTimeStrFuzzy; dateTimeStrFuzzy = temporary; } // replace relative date with a fixed format one sub_date_strong = dateMatchVal.Replace(dateTimeStrFuzzy, dateTimeStr); // parse date dateTimeStr = dateTimeStr.Replace(",", ""); { const string dateFormat = "MMM d yyyy hh:mm tt"; try { DateTime dateTime = DateTime.ParseExact(dateTimeStr, dateFormat, CultureInfo.InvariantCulture); Utils.FillPropertiesFromDateTime(dateTime, sp); } catch (Exception e) { Console.WriteLine("Warning :: cannot parse date :: " + e.Message); Console.WriteLine("Info :: date string :: " + dateTimeStr); } } } else { Console.WriteLine("Warning :: unable to extact submission date"); } // extract description const string key_desc = @"<div class=""submission-description user-submitted-links"">"; cpage = cpage.Substring(cpage.IndexOf(key_desc, StringComparison.Ordinal)); cpage = cpage.Substring(0, cpage.IndexOf(key_enddiv, cpage.IndexOf(key_enddiv, StringComparison.Ordinal) + 1, StringComparison.Ordinal) + key_enddiv.Length); cpage = cpage.Replace("href=\"/", "href=\"https://furaffinity.net/"); cpage = cpage.Replace("src=\"//", "src=\"https://"); cpage = @"<div class=""submission-description-container link-override""> <div class=""submission-title""> <h2 class=""submission-title-header"">{{{title}}}</h2> Posted {{{date}}} </div><hr>".Replace("{{{title}}}", sp.TITLE).Replace("{{{date}}}", sub_date_strong) + cpage; } sp.ARTIST = sp.URL.Substring(sp.URL.LastIndexOf(@"/art/") + 5); sp.ARTIST = sp.ARTIST.Substring(0, sp.ARTIST.IndexOf('/')); sp.FILEFULL = sp.URL.Substring(sp.URL.LastIndexOf('/') + 1); sp.FILEFULL = Utils.StripIllegalFilenameChars(sp.FILEFULL); sp.FILEID = sp.FILEFULL.Substring(0, sp.FILEFULL.IndexOf('.')); if (sp.FILEFULL.IndexOf('_') >= 0) // valid filename (some names on FA are corrupted and contain nothing but '.' after ID) { sp.FILEPART = sp.FILEFULL.Substring(sp.FILEFULL.IndexOf('_') + 1); if (sp.FILEFULL.LastIndexOf('.') >= 0) // has extension { sp.EXT = (sp.FILEFULL + " ").Substring(sp.FILEFULL.LastIndexOf('.') + 1).TrimEnd(); if (sp.EXT.CompareTo("") == 0) { sp.EXT = @"jpg"; } } else { sp.EXT = @"jpg"; } } else { sp.FILEPART = @"unknown.jpg"; sp.EXT = @"jpg"; } // apply template(s) string fname = GlobalSettings.Settings.filenameTemplate; string dfname = GlobalSettings.Settings.descrFilenameTemplate; foreach (FieldInfo fi in sp.GetType().GetFields( BindingFlags.Instance | BindingFlags.Public).ToArray() ) { if (fi.FieldType == typeof(string)) { fname = fname.Replace("%" + fi.Name + "%", (string)fi.GetValue(sp)); dfname = dfname.Replace("%" + fi.Name + "%", (string)fi.GetValue(sp)); } } // make sure directories exist string fnamefull = Path.Combine(GlobalSettings.Settings.downloadPath, fname); string dfnamefull = Path.Combine(GlobalSettings.Settings.downloadPath, dfname); try { Directory.CreateDirectory(Path.GetDirectoryName(fnamefull)); Directory.CreateDirectory(Path.GetDirectoryName(dfnamefull)); } catch { Console.WriteLine("Failed to make sure target directories do exist."); break; } // save description if (needDescription) { try { File.WriteAllText(dfnamefull, cpage); Console.WriteLine("description saved to filename:" + dfname); } catch (Exception E) { Console.WriteLine("Error saving description:" + E.Message); } } // download file Console.WriteLine("target filename: " + fname); if (File.Exists(fnamefull)) { SubmissionsDB.DB.AddSubmission(uint.Parse(subId)); Console.WriteLine("Already exists, continuing~"); continue; } int fattempts = 3; fbeforeawait: try { Console.WriteLine("Downloading: " + sp.URL); using ( Stream contentStream = await( await http.GetAsync(sp.URL, HttpCompletionOption.ResponseHeadersRead) ).Content.ReadAsStreamAsync(), stream = new FileStream( fnamefull, FileMode.Create, FileAccess.Write, FileShare.None, 1024 * 1024 /*Mb*/, true ) ) { await ReadNetworkStream(contentStream, stream, 5000); // await contentStream.CopyToAsync(stream); // this works, but may hang forever in case of network errors SubmissionsDB.DB.AddSubmission(uint.Parse(subId)); } } catch (Exception E) { // write error message if (E is ObjectDisposedException) { Console.WriteLine("Network error (data receive timeout)"); } else { Console.WriteLine("GET request error (file " + sp.FILEID + "): " + E.Message); } // remove incomplete download if (File.Exists(fnamefull)) { File.Delete(fnamefull); } // try again or abort operation fattempts--; System.Threading.Thread.Sleep(2000); if (fattempts > 0) { goto fbeforeawait; } { Console.WriteLine("Giving up on downloading {0}", subId); res.failedToDownload.Add(subId); continue; } } Console.WriteLine("Done: #" + subId); TaskbarProgress.SetValue(currentConsoleHandle, subs.Count - i, subs.Count); res.processedPerfectly++; } // writing results try { if (res.failedToGetPage.Count > 0 || res.failedToDownload.Count > 0) { File.WriteAllLines(Path.Combine(GlobalSettings.Settings.systemPath, "get_sub_page_failed.log"), res.failedToGetPage); File.WriteAllLines(Path.Combine(GlobalSettings.Settings.systemPath, "download_failed.log"), res.failedToGetPage); } } catch (Exception E) { Console.WriteLine("Failed to save list of subs with issues: " + E.Message); } // save DB SubmissionsDB.Save(); // stop progress indicating TaskbarProgress.SetState(currentConsoleHandle, TaskbarProgress.TaskbarStates.NoProgress); // return result, actually return(res); }
public async Task <ProcessingResults> ProcessSubmissionsList(List <string> subs, bool needDescription) { Console.WriteLine("Processing submissions list..."); ProcessingResults res = new ProcessingResults(); // iterate over all the submissions in list for (int i = subs.Count - 1; i >= 0; i--) { string subId = subs[i]; // don't care about empty strings if (subId == null || subId.CompareTo("") == 0) { continue; } Console.WriteLine("> Processing submission #" + subId); // check if in DB already try { if (SubmissionsDB.DB.Exists(uint.Parse(subId)) && GlobalSettings.Settings.downloadOnlyOnce) { Console.WriteLine("Skipped (present in DB)"); continue; } } catch { Console.WriteLine("Unexpected error (DB presence check failed)!"); continue; } string subUrl = "https://www.furaffinity.net/view/" + subId; // get submission page int attempts = 3; string cpage = ""; beforeawait: try { Console.WriteLine("Getting page: " + subUrl); cpage = await http.GetStringAsync(subUrl); } catch (Exception E) { Console.WriteLine("GET request error (" + subUrl + "): " + E.Message); attempts--; System.Threading.Thread.Sleep(2000); if (attempts > 0) { goto beforeawait; } else { Console.WriteLine("Giving up on #" + subId); res.failedToGetPage.Add(subId); continue; } } // process submission page string downbtnkey = "<a href=\"//d.facdn.net/"; string desckey = "<div class=\"submission-description-container"; SubmissionProps sp = new SubmissionProps(); sp.SUBMID = subId; int keypos = cpage.IndexOf(downbtnkey, StringComparison.Ordinal); if (keypos < 0) { Console.WriteLine("[Warning] got page, but it doesn't contain any download links."); res.failedToGetPage.Add(subId); continue; } cpage = cpage.Substring(keypos); cpage = cpage.Substring(cpage.IndexOf("/", StringComparison.Ordinal)); sp.URL = "https:" + cpage.Substring(0, cpage.IndexOf("\"", StringComparison.Ordinal)); if (needDescription) { cpage = cpage.Substring(cpage.IndexOf(desckey, StringComparison.Ordinal)); string desckeyend = "</div>"; cpage = cpage.Substring(0, cpage.IndexOf(desckeyend, cpage.IndexOf(desckeyend) + 1) + desckeyend.Length ); cpage = cpage.Replace("href=\"/", "href=\"https://furaffinity.net/"); cpage = cpage.Replace("src=\"//", "src=\"https://"); } sp.ARTIST = sp.URL.Substring(sp.URL.LastIndexOf(@"/art/") + 5); sp.ARTIST = sp.ARTIST.Substring(0, sp.ARTIST.IndexOf('/')); sp.FILEFULL = sp.URL.Substring(sp.URL.LastIndexOf('/') + 1); sp.FILEFULL = string.Concat(sp.FILEFULL.Split(Path.GetInvalidFileNameChars(), StringSplitOptions.RemoveEmptyEntries)); sp.FILEID = sp.FILEFULL.Substring(0, sp.FILEFULL.IndexOf('.')); if (sp.FILEFULL.IndexOf('_') >= 0) // valid filename (some names on FA are corrupted and contain nothing but '.' after ID) { sp.FILEPART = sp.FILEFULL.Substring(sp.FILEFULL.IndexOf('_') + 1); if (sp.FILEFULL.LastIndexOf('.') >= 0) // has extension { sp.EXT = (sp.FILEFULL + " ").Substring(sp.FILEFULL.LastIndexOf('.') + 1).TrimEnd(); if (sp.EXT.CompareTo("") == 0) { sp.EXT = @"jpg"; } } else { sp.EXT = @"jpg"; } } else { sp.FILEPART = @"unknown.jpg"; sp.EXT = @"jpg"; } // apply template(s) string fname = GlobalSettings.Settings.filenameTemplate; string dfname = GlobalSettings.Settings.descrFilenameTemplate; foreach (FieldInfo fi in sp.GetType().GetFields( BindingFlags.Instance | BindingFlags.Public).ToArray() ) { if (fi.FieldType == typeof(string)) { fname = fname.Replace("%" + fi.Name + "%", (string)fi.GetValue(sp)); dfname = dfname.Replace("%" + fi.Name + "%", (string)fi.GetValue(sp)); } } // make sure directories exist string fnamefull = Path.Combine(GlobalSettings.Settings.downloadPath, fname); string dfnamefull = Path.Combine(GlobalSettings.Settings.downloadPath, dfname); try { Directory.CreateDirectory(Path.GetDirectoryName(fnamefull)); Directory.CreateDirectory(Path.GetDirectoryName(dfnamefull)); } catch { Console.WriteLine("Failed to make sure target directories do exist."); break; } // save description if (needDescription) { try { File.WriteAllText(dfnamefull, cpage); Console.WriteLine("description saved to filename:" + dfname); } catch (Exception E) { Console.WriteLine("Error saving description:" + E.Message); } } // download file Console.WriteLine("target filename: " + fname); if (File.Exists(fnamefull)) { SubmissionsDB.DB.AddSubmission(uint.Parse(subId)); Console.WriteLine("Already exists, continuing~"); continue; } int fattempts = 3; fbeforeawait: try { Console.WriteLine("Downloading: " + sp.URL); using ( Stream contentStream = await( await http.GetAsync(sp.URL, HttpCompletionOption.ResponseHeadersRead) ).Content.ReadAsStreamAsync(), stream = new FileStream( fnamefull, FileMode.Create, FileAccess.Write, FileShare.None, 1024 * 1024 /*Mb*/, true ) ) { await ReadNetworkStream(contentStream, stream, 5000); // await contentStream.CopyToAsync(stream); // this works, but may hang forever in case of network errors SubmissionsDB.DB.AddSubmission(uint.Parse(subId)); } } catch (Exception E) { // write error message if (E is ObjectDisposedException) { Console.WriteLine("Network error (data receive timeout)"); } else { Console.WriteLine("GET request error (file " + sp.FILEID + "): " + E.Message); } // remove incomplete download if (File.Exists(fnamefull)) { File.Delete(fnamefull); } // try again or abort operation fattempts--; System.Threading.Thread.Sleep(2000); if (fattempts > 0) { goto fbeforeawait; } { Console.WriteLine("Giving up on downloading {0}", subId); res.failedToDownload.Add(subId); continue; } } Console.WriteLine("Done: #" + subId); TaskbarProgress.SetValue(currentConsoleHandle, subs.Count - i, subs.Count); res.processedPerfectly++; } // writing results try { if (res.failedToGetPage.Count > 0 || res.failedToDownload.Count > 0) { File.WriteAllLines(Path.Combine(GlobalSettings.Settings.systemPath, "get_sub_page_failed.log"), res.failedToGetPage); File.WriteAllLines(Path.Combine(GlobalSettings.Settings.systemPath, "download_failed.log"), res.failedToGetPage); } } catch (Exception E) { Console.WriteLine("Failed to save list of subs with issues: " + E.Message); } // save DB SubmissionsDB.Save(); // stop progress indicating TaskbarProgress.SetState(currentConsoleHandle, TaskbarProgress.TaskbarStates.NoProgress); // return result, actually return(res); }
public async Task <ProcessingResults> ProcessSubmissionsList(List <string> subs, bool needDescription, bool updateMode = false) { Console.WriteLine("Processing submissions list..."); ProcessingResults res = new ProcessingResults(); // iterate over all the submissions in list for (int i = subs.Count - 1; i >= 0; i--) { // expected format: ID#FileID@attributes // everything except for ID is optional string subStr = subs[i]; if (string.IsNullOrEmpty(subStr)) { continue; // don't care about empty strings } string subId; uint subIdInt = 0; uint subFid = 0; uint subInitFid = 0; bool aScraps = false; const string subIdRegex = @"^(?<id>[0-9]+?)(#(?<fid>[0-9]+?)){0,1}(@(?<attr>.+?)){0,1}$"; var subIdMatch = Regex.Match(subStr, subIdRegex); if (subIdMatch.Success) { subId = subIdMatch.Groups["id"].Value; uint.TryParse(subId, out subIdInt); if (subIdMatch.Groups["fid"].Success) { uint.TryParse(subIdMatch.Groups["fid"].Value, out subFid); } /// Attributes section has only been used for (terrible) scraps detection; /// a better method is now implemented, making the section useless //if (subIdMatch.Groups["attr"].Success) //{ // string attributes = subIdMatch.Groups["attr"].Value; // if (attributes.Contains("s")) aScraps = true; //} } else { Console.WriteLine("Error :: Malformed submission ID: " + subStr); continue; } uint dbSubFid = SubmissionsDB.DB.GetFileId(subIdInt); bool dbSubExists = SubmissionsDB.DB.Exists(subIdInt); Console.WriteLine(string.Format("> Processing submission {0} {1}", subId, subFid > 0 ? string.Format("(file id {0})", subFid) : "" )); // Skip submissions that can be skipped without making any network requests try { if (dbSubExists && GlobalSettings.Settings.downloadOnlyOnce) { // can skip at lowest cost if either: // * not in update mode // * file ID is known and matches the one stored in the DB if ((!updateMode) || (updateMode && dbSubFid == subFid && dbSubFid != 0)) { Console.WriteLine("Skipped (present in DB)"); continue; } else { Console.WriteLine("Submission is present in the DB, but may have been updated; re-checking~"); } } } catch { Console.WriteLine("Unexpected error (DB presence check failed)!"); continue; } // get submission page string subUrl = "https://www.furaffinity.net/view/" + subId; int attempts = 3; string cpage = ""; beforeawait: try { Console.WriteLine("Getting page: " + subUrl); cpage = await http.GetStringAsync(subUrl); } catch (Exception E) { Console.WriteLine("GET request error (" + subUrl + "): " + E.Message); attempts--; System.Threading.Thread.Sleep(2000); if (attempts > 0) { goto beforeawait; } else { Console.WriteLine("Giving up on #" + subId); res.failedToGetPage.Add(subId); continue; } } // process submission page var downbtnkeys = new string[] { "<a href=\"//d.facdn.net/", "<a href=\"//d2.facdn.net/", "<a href=\"//d.furaffinity.net/" }; SubmissionProps sp = new SubmissionProps(); sp.SUBMID = subId; int keypos = -1; foreach (var downbtnkey in downbtnkeys) { keypos = cpage.IndexOf(downbtnkey, StringComparison.Ordinal); if (keypos >= 0) { break; } } if (keypos < 0) { Console.WriteLine("[Error] got page, but it doesn't contain any download links."); res.failedToGetPage.Add(subId); continue; } cpage = cpage.Substring(keypos); cpage = cpage.Substring(cpage.IndexOf("/", StringComparison.Ordinal)); sp.URL = "https:" + cpage.Substring(0, cpage.IndexOf("\"", StringComparison.Ordinal)); #region download URL parsing bool extensionInvalid = false; // future use, possibly come up with an extension that makes sense on a case by case basis const string urlComponentsRegex = @"\/art\/(?<artist>.+?)\/.*?(?<curfid>\d+?)\/(?<fid>.+?)\.(?<fname>.*)$"; var urlCompMatch = Regex.Match(sp.URL, urlComponentsRegex); if (urlCompMatch.Success) { sp.ARTIST = urlCompMatch.Groups["artist"].Value; sp.CURFILEID = urlCompMatch.Groups["curfid"].Value; uint.TryParse(sp.CURFILEID, out subFid); sp.FILEID = urlCompMatch.Groups["fid"].Value; uint.TryParse(sp.FILEID, out subInitFid); string filename = urlCompMatch.Groups["fname"].Value; /// original filename usually follows this pattern: /// $file_id.$artist_originalFileName.ext /// [^ "fname" group value ] /// however, some old (~2006) submissions use this pattern instead: /// $file_id.$artist.originalFileName.ext /// it is also quite common for the fname to be blank, i.e. /// $file_id. /// in this case we have no choice but to come up with our own name var fnameCheckMatch = Regex.Match(filename, string.Format(@"^{0}[_.](.+)", Regex.Escape(sp.ARTIST))); if (fnameCheckMatch.Success) { var filepart = fnameCheckMatch.Groups[1].Value; if (filepart.EndsWith(".") || !filepart.Contains(".")) // no extension or an empty one { extensionInvalid = true; Console.WriteLine("Info :: missing filename extension, assuming .jpg"); if (filepart.EndsWith(".")) { filepart = filepart.Substring(0, filepart.Length - 1) + ".jpg"; } else { filepart = filepart + ".jpg"; } } var filepartDotSplit = filepart.Split(new char[] { '.' }); sp.FILEPART = filepart; sp.FILEPARTNE = string.Join(".", filepartDotSplit.Take(filepartDotSplit.Length - 1)); sp.EXT = Utils.StripIllegalFilenameChars(filepartDotSplit.Last()); } else // completely broken filenames get replaced with "unknown.jpg" { Console.WriteLine("Info :: broken filename detected, replacing with \"unknown.jpg\""); sp.FILEPART = "unknown.jpg"; sp.FILEPARTNE = "unknown"; sp.EXT = "jpg"; extensionInvalid = true; } sp.FILEFULL = sp.FILEID + "." + sp.ARTIST + "_" + sp.FILEPART; sp.FILEFULL = Utils.StripIllegalFilenameChars(sp.FILEFULL); sp.FILEPART = Utils.StripIllegalFilenameChars(sp.FILEPART); } else { Console.WriteLine("Error: could not make sense of the URL for submission " + subId); res.failedToDownload.Add(subId); continue; } #endregion #region scraps detection, submission date, title and description { const string key_title = @"<div class=""submission-title"">"; const string key_enddiv = "</div>"; var submTitlePos = cpage.IndexOf(key_title, StringComparison.Ordinal); // scraps check: if there is a link to /$user/scraps before the submission title, it's in scraps var scrapsCheckMatch = Regex.Match(cpage, string.Format(@"href=""/scraps/{0}/""", Regex.Escape(sp.ARTIST))); if (scrapsCheckMatch.Success && scrapsCheckMatch.Index < submTitlePos) { Console.WriteLine("Location: scraps"); aScraps = true; } else { Console.WriteLine("Location: main gallery"); aScraps = false; } Utils.FillPropertiesFromDateTime(DateTime.Now, sp); // set Now as a fallback date sp.TITLE = "Unknown"; // fallback title // title cpage = cpage.Substring(submTitlePos); string sub_title_div = cpage.Substring(0, cpage.IndexOf(key_enddiv, cpage.IndexOf(key_enddiv, StringComparison.Ordinal) + 1, StringComparison.Ordinal) + key_enddiv.Length); var titleMatch = Regex.Match(sub_title_div, "<h2><p>(.+?)</p></h2>", RegexOptions.CultureInvariant); if (titleMatch.Success) { sp.TITLE = Utils.StripIllegalFilenameChars(System.Net.WebUtility.HtmlDecode(titleMatch.Groups[1].Value)); Console.WriteLine("Title: " + sp.TITLE); } else { Console.WriteLine("Warning :: no submission title found!"); } // replace relative date with the absolute one string sub_date_strong = ""; var dateMatch = Regex.Match(cpage, "<strong.+?title=\"(.+?)\" class=\"popup_date\">(.+?)<.+?</strong>", RegexOptions.CultureInvariant); if (dateMatch.Success) { string dateMatchVal = dateMatch.Value; string dateTimeStr = dateMatch.Groups[1].Value; // fixed format date string dateTimeStrFuzzy = dateMatch.Groups[2].Value; // depending on user settings, fuzzy and fixed times may be swapped if (dateTimeStrFuzzy.Contains(" PM") || dateTimeStrFuzzy.Contains(" AM")) { var temporary = dateTimeStr; dateTimeStr = dateTimeStrFuzzy; dateTimeStrFuzzy = temporary; } // replace relative date with a fixed format one sub_date_strong = dateMatchVal.Replace(dateTimeStrFuzzy, dateTimeStr); // parse date dateTimeStr = dateTimeStr.Replace(",", ""); { const string dateFormat = "MMM d yyyy hh:mm tt"; try { DateTime dateTime = DateTime.ParseExact(dateTimeStr, dateFormat, CultureInfo.InvariantCulture); Utils.FillPropertiesFromDateTime(dateTime, sp); } catch (Exception e) { Console.WriteLine("Warning :: cannot parse date :: " + e.Message); Console.WriteLine("Info :: date string :: " + dateTimeStr); } } } else { Console.WriteLine("Warning :: unable to extact submission date"); } // extract description const string key_desc = @"<div class=""submission-description user-submitted-links"">"; cpage = cpage.Substring(cpage.IndexOf(key_desc, StringComparison.Ordinal)); cpage = cpage.Substring(0, cpage.IndexOf(key_enddiv, cpage.IndexOf(key_enddiv, StringComparison.Ordinal) + 1, StringComparison.Ordinal) + key_enddiv.Length); cpage = cpage.Replace("href=\"/", "href=\"https://furaffinity.net/"); cpage = cpage.Replace("src=\"//", "src=\"https://"); cpage = @"<div class=""submission-description-container link-override""> <div class=""submission-title""> <h2 class=""submission-title-header"">{{{title}}}</h2> Posted {{{date}}} </div><hr>".Replace("{{{title}}}", sp.TITLE).Replace("{{{date}}}", sub_date_strong) + cpage; } #endregion // apply template(s) string fname = GlobalSettings.Settings.filenameTemplate; string dfname = GlobalSettings.Settings.descrFilenameTemplate; var scrapsTemplate = aScraps ? GlobalSettings.Settings.scrapsTemplateActive : GlobalSettings.Settings.scrapsTemplatePassive; fname = fname.Replace("%SCRAPS%", scrapsTemplate); dfname = dfname.Replace("%SCRAPS%", scrapsTemplate); foreach (FieldInfo fi in sp.GetType().GetFields( BindingFlags.Instance | BindingFlags.Public).ToArray() ) { if (fi.FieldType == typeof(string)) { fname = fname.Replace("%" + fi.Name + "%", (string)fi.GetValue(sp)); dfname = dfname.Replace("%" + fi.Name + "%", (string)fi.GetValue(sp)); #if DEBUG_PRINT_ALL_TEMPLATE_VALS // debug only: output all template values: Console.WriteLine(string.Format("+++ {0} = {1}", "%" + fi.Name + "%", (string)fi.GetValue(sp))); #endif } } // make sure directories exist string fnamefull = Path.Combine(GlobalSettings.Settings.downloadPath, fname); string dfnamefull = Path.Combine(GlobalSettings.Settings.downloadPath, dfname); try { Directory.CreateDirectory(Path.GetDirectoryName(fnamefull)); Directory.CreateDirectory(Path.GetDirectoryName(dfnamefull)); } catch { Console.WriteLine("Failed to make sure target directories do exist."); break; } // save description if (needDescription) { try { File.WriteAllText(dfnamefull, cpage); Console.WriteLine("description saved to filename:" + dfname); } catch (Exception E) { Console.WriteLine("Error saving description:" + E.Message); } } var fileExists = File.Exists(fnamefull); Console.WriteLine("target filename: " + fname + (fileExists ? " (exists)" : "")); // at this point we have the actual file ID, and can skip downloading based on that if (GlobalSettings.Settings.downloadOnlyOnce) { if ((!updateMode) && fileExists) // checked earlier: && !dbSubExists { if (subInitFid != subFid) { Console.WriteLine(string.Format( "Note :: submission {0} exists locally, but could've been updated\n" + "consider running this task in update mode", subId )); SubmissionsDB.DB.AddSubmission(subIdInt); } else { Console.WriteLine("Already exists, continuing~"); SubmissionsDB.DB.AddSubmissionWithFileId(subIdInt, subFid); } continue; } // this exact check can also be found before, it is repeated here for cases // when subFid was not known before the submission page request if (updateMode && dbSubFid == subFid && dbSubFid != 0) { Console.WriteLine("Already downloaded, continuing~"); continue; } } else // not `download only once` { if ((!updateMode) && fileExists) { if (subInitFid != subFid) { SubmissionsDB.DB.AddSubmission(subIdInt); } else { SubmissionsDB.DB.AddSubmissionWithFileId(subIdInt, subFid); } Console.WriteLine("Already exists, continuing~"); continue; } if (updateMode && fileExists && dbSubFid == subFid && dbSubFid != 0) { Console.WriteLine("Already exists, continuing~"); continue; } } // if we got here, there was no reason to skip the download bool mayBeUselessDownload = false; string oldFileHash = ""; if (fileExists) { Console.WriteLine(string.Format("subfid {0} dbsf {1}", subFid, dbSubFid)); oldFileHash = Utils.FileHash(fnamefull); fnamefull = Path.Combine(GlobalSettings.Settings.downloadPath, string.Format("{1} [v.{0}].{2}", subFid, fname, sp.EXT)); if (!(subFid != dbSubFid && dbSubFid != 0)) { Console.WriteLine("Info :: stored metadata is insufficient; downloading a remote file to compare aganst local"); mayBeUselessDownload = true; } } // download file int fattempts = 3; fbeforeawait: try { Console.WriteLine("Downloading: " + sp.URL); // "?" can only be in the URL if the user named their submission this way // it WILL be mistreated as an URL parameter, but this dirty hack with explicit replacement fixes it using (var response = await http.GetAsync(sp.URL.Replace("?", "%3F"), HttpCompletionOption.ResponseHeadersRead)) { if (!response.IsSuccessStatusCode) { throw new Exception(string.Format("HTTP error: {0}", response.StatusCode)); } using ( Stream contentStream = await response.Content.ReadAsStreamAsync(), stream = new FileStream( fnamefull, FileMode.Create, FileAccess.Write, FileShare.None, 1024 * 1024 /*Mb*/, true ) ) { await ReadNetworkStream(contentStream, stream, 5000); // await contentStream.CopyToAsync(stream); // this works, but may hang forever in case of network errors } } } catch (Exception E) { // write error message if (E is ObjectDisposedException) { Console.WriteLine("Network error (data receive timeout)"); } else { Console.WriteLine("GET request error (file " + sp.FILEID + "): " + E.Message); } // remove incomplete download if (File.Exists(fnamefull)) { File.Delete(fnamefull); } // try again or abort operation fattempts--; System.Threading.Thread.Sleep(2000); if (fattempts > 0) { goto fbeforeawait; } { Console.WriteLine("Giving up on downloading {0}", subId); res.failedToDownload.Add(subId); continue; } } SubmissionsDB.DB.AddSubmissionWithFileId(subIdInt, subFid); if (mayBeUselessDownload) { var newFileHash = Utils.FileHash(fnamefull); if (newFileHash == oldFileHash) { Console.WriteLine("Note :: existing version matches the one on the server, removing a duplicate"); File.Delete(fnamefull); } } Console.WriteLine("Done: #" + subId); TaskbarProgress.SetValue(currentConsoleHandle, subs.Count - i, subs.Count); res.processedPerfectly++; } // writing results try { if (res.failedToGetPage.Count > 0 || res.failedToDownload.Count > 0) { File.WriteAllLines(Path.Combine(GlobalSettings.Settings.systemPath, "get_sub_page_failed.log"), res.failedToGetPage); File.WriteAllLines(Path.Combine(GlobalSettings.Settings.systemPath, "download_failed.log"), res.failedToDownload); } } catch (Exception E) { Console.WriteLine("Failed to save list of subs with issues: " + E.Message); } // save DB SubmissionsDB.Save(); // stop progress indicating TaskbarProgress.SetState(currentConsoleHandle, TaskbarProgress.TaskbarStates.NoProgress); // return result, actually return(res); }
public override void Post(HttpRequest Request, HttpResponse Response, params string[] PathParams) { Response.Cache.SetCacheability(HttpCacheability.NoCache); Response.Cache.SetMaxAge(TimeSpan.Zero); JObject inputData = null; try { using (StreamReader reader = new StreamReader(Request.InputStream)) { using (JsonTextReader jsonReader = new JsonTextReader(reader)) { inputData = JObject.Load(jsonReader); } } } catch { RespondBadRequest(Response); } Int64 AppUserId; if (IsAuthorizedRequest(Request, Response, true, out AppUserId)) { Response.ContentType = @"application/json"; try { JToken jt; string card_tk = null, expire_date = null, authNumber = null, last4_digits = null, id_number = null, special_instructions = null; //Int64 order_id = 0; JArray products = null; Int64 supplierId = 0; int numOfPayments = 1; decimal totalPrice = 0; var lstProduct = new Dictionary <Int64, int>(); if (inputData.TryGetValue(@"card_tk", out jt)) { card_tk = jt.Value <string>(); } if (inputData.TryGetValue(@"expire_date", out jt)) { expire_date = jt.Value <string>(); } if (inputData.TryGetValue(@"last4_digits", out jt)) { last4_digits = jt.Value <string>(); } if (inputData.TryGetValue(@"id_number", out jt)) { id_number = jt.Value <string>(); } if (inputData.TryGetValue(@"products", out jt)) { products = jt.Value <JArray>(); } if (inputData.TryGetValue(@"supplier_id", out jt)) { supplierId = jt.Value <Int64>(); } if (inputData.TryGetValue(@"total_price", out jt) && jt != null) { totalPrice = jt.Value <decimal>(); } if (inputData.TryGetValue(@"auth_num", out jt) && jt != null) { authNumber = jt.Value <string>(); } if (inputData.TryGetValue(@"special_instructions", out jt) && jt != null) { special_instructions = jt.Value <string>(); } if (inputData.TryGetValue(@"num_of_payments", out jt) && jt != null) { numOfPayments = jt.Value <int>(); } foreach (JObject obj in products.Children <JObject>()) { Int64 product_id = 0; int amount = 1; if (obj.TryGetValue(@"product_id", out jt)) { product_id = jt.Value <Int64>(); } if (obj.TryGetValue(@"amount", out jt)) { amount = jt.Value <int>(); } lstProduct.Add(product_id, amount); } bool isPriceValid = false; if (supplierId > 0 && totalPrice > 0) { isPriceValid = OfferController.IsOfferStillValid(lstProduct, supplierId, totalPrice); } if (!isPriceValid) { RespondError(Response, HttpStatusCode.ExpectationFailed, @"price-not-valid"); } var results = new ProcessingResults { AuthNumber = authNumber, CardExpiration = expire_date, CardToken = card_tk, NumOfPayments = numOfPayments, SpecialInstructions = special_instructions, Last4Digits = last4_digits }; using (StreamWriter streamWriter = new StreamWriter(Response.OutputStream)) { using (JsonTextWriter jsonWriter = new JsonTextWriter(streamWriter)) { string gifts; var bidId = BidController.CreateBidProduct(AppUserId, supplierId, lstProduct, true, out gifts); var order = OrderController.GenerateNewOrder(results, AppUserId, bidId, gifts, supplierId, totalPrice, Source.Application); var offerProducts = ProductController.GetProductsByBid(order.BidId); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName(@"products"); jsonWriter.WriteStartArray(); foreach (var product in offerProducts) { jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName(@"product_id"); jsonWriter.WriteValue(product.ProductId); jsonWriter.WritePropertyName(@"product_name"); jsonWriter.WriteValue(product.ProductName); jsonWriter.WritePropertyName(@"product_category"); jsonWriter.WriteValue(product.CategoryName); jsonWriter.WritePropertyName(@"product_sub_category"); jsonWriter.WriteValue(product.SubCategoryName); jsonWriter.WritePropertyName(@"product_animal_name"); jsonWriter.WriteValue(product.AnimalName); jsonWriter.WriteEndObject(); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName(@"total_price"); jsonWriter.WriteValue(order.TotalPrice); jsonWriter.WritePropertyName(@"order_id"); jsonWriter.WriteValue(order.OrderId); jsonWriter.WritePropertyName(@"bid_id"); jsonWriter.WriteValue(order.BidId); jsonWriter.WriteEndObject(); } } } catch (Exception ex) { Helpers.LogProcessing("ProductOrderHandler - ex -", "\n exception: " + ex.ToString(), true); RespondError(Response, HttpStatusCode.InternalServerError, @"db-error"); } } }