private void Report(string data, Color colour) { if (_reportDelegate == null) { _reportDelegate = Report; } if (richTextBox1.InvokeRequired) { Invoke(_reportDelegate, data, colour); } else { RichTextBox rtb = richTextBox1; if (rtb.Text.Length > 4000) { rtb.Text = rtb.Text.Substring(rtb.Text.Length - 4000); } rtb.SelectionStart = rtb.Text.Length; rtb.SelectionColor = colour; rtb.SelectedText = DateTime.Now.ToString("yyMMdd HH:mm:ss") + " " + data + "\r\n"; rtb.SelectionStart = rtb.Text.Length; rtb.ScrollToCaret(); } }
internal static void Error(string errorMessage) { ReportDelegate del = ErrorEvent; if (del != null) { del(errorMessage, null); } }
internal static void Error(string errorMessage, Exception exception) { ReportDelegate del = ErrorEvent; if (del != null) { del(errorMessage, exception); } }
internal static void Warning(string warningMessage) { ReportDelegate del = WarningEvent; if (del != null) { del(warningMessage, null); } }
internal static void OperationError(string errorDetails, Exception exception) { ReportDelegate del = OperationErrorEvent; if (del != null) { del(errorDetails, exception); } }
static void Main(string[] args) { MediaStorage m = new MediaStorage(); AudioPlayer ap = new AudioPlayer(); VideoPlayer vp = new VideoPlayer(); ReportDelegate reportDelegate = new ReportDelegate(ap.AudioPlayCheck); m.ReportStatus(reportDelegate, "mp3"); reportDelegate = vp.VideoPlayCheck; m.ReportStatus(reportDelegate, "mkv"); }
/// <summary>Called when the progress of the observed transaction changes</summary> /// <param name="sender">Transaction whose progress has changed</param> /// <param name="arguments">Contains the updated progress</param> private void asyncProgressChanged(object sender, ProgressReportEventArgs arguments) { this.progress = arguments.Progress; ReportDelegate savedProgressUpdateCallback = this.progressUpdateCallback; if (savedProgressUpdateCallback != null) { savedProgressUpdateCallback(); } }
public string RunReport(WirelessReport report) { try { ReportDelegate d = (ReportDelegate)Delegate.CreateDelegate(typeof(ReportDelegate), report.ReportMethod); return(d(this)); } catch (Exception ex) { throw; } }
public void ReportStatus(ReportDelegate d, string extension) { int r = d(extension); if (r == 0) { Console.WriteLine($"{extension} Media Played SuccessFully"); } else { Console.WriteLine($"{extension} Media Did Not Play SuccessFully"); } }
/// <summary>Initializes a new observed transaction</summary> /// <param name="weightedTransaction">Weighted transaction being observed</param> /// <param name="progressUpdateCallback"> /// Callback to invoke when the transaction's progress changes /// </param> /// <param name="endedCallback"> /// Callback to invoke when the transaction has ended /// </param> internal ObservedWeightedTransaction( WeightedTransaction <TransactionType> weightedTransaction, ReportDelegate progressUpdateCallback, ReportDelegate endedCallback ) { this.weightedTransaction = weightedTransaction; // See if this transaction has already ended (initial check for performance) if (weightedTransaction.Transaction.Ended) { // Since we don't subscribe to the .Ended event (which would be fired immediately on // subscription if the transaction was already finished), we will emulate this // behavior here. There is no race condition here: The transition to .Ended occurs // only once and will never happen in reverse. This is just a minor optimization to // prevent object coupling where none is neccessary and to save some processing time. this.progress = 1.0f; progressUpdateCallback(); // Do not call the ended callback here. This constructor is called when the // TransactionGroup constructs its list of transactions. If this is called and // the first transaction to be added to the group happens to be in the ended // state, the transactionGroup will immediately think it has ended! //!DONT!endedCallback(); return; } this.endedCallback = endedCallback; this.progressUpdateCallback = progressUpdateCallback; // This might trigger the event handler to be invoked right here if the transaction // ended between our initial optimization attempt and this line. It's unlikely, // however, so we'll not waste time with another optimization attempt. this.weightedTransaction.Transaction.AsyncEnded += new EventHandler(asyncEnded); // See whether this transaction implements the IProgressReporter interface and if // so, connect to its progress report event in order to pass these reports on // to whomever created ourselfes. this.progressReporter = this.weightedTransaction.Transaction as IProgressReporter; if (this.progressReporter != null) { this.asyncProgressChangedEventHandler = new EventHandler <ProgressReportEventArgs>( asyncProgressChanged ); this.progressReporter.AsyncProgressChanged += this.asyncProgressChangedEventHandler; } }
/// <summary>Called when the observed transaction has ended</summary> /// <param name="sender">Transaction that has ended</param> /// <param name="e">Not used</param> private void asyncEnded(object sender, EventArgs e) { ReportDelegate savedEndedCallback = this.endedCallback; ReportDelegate savedProgressUpdateCallback = this.progressUpdateCallback; asyncDisconnectEvents(); // We don't need those anymore! // If the progress hasn't reached 1.0 yet, make a fake report so that even // when a transaction doesn't report any progress at all, the set or queue // owning us will have a percentage of transactions completed. // // There is the possibility of a race condition here, as a final progress // report could have been generated by a thread running the transaction // that was preempted by this thread. This would cause the progress to // jump to 1.0 and then back to whatever the waiting thread will report. if (this.progress != 1.0f) { this.progress = 1.0f; savedProgressUpdateCallback(); } savedEndedCallback(); }
////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////// // METHOD: PROCESS KEYWORD FILE static void ProcessKeywordFile(ReportDelegate _update_Delegate) { //INIT VARS int Occurrences = 0; string URL = ""; string Keyword = ""; int counter = 1; //GET OLD DATA using (StreamReader r = new StreamReader(KeywordsLog)) { while ((line = r.ReadLine()) != null) { if (counter > 1) { // GET URL URL = Regex.Match(line, "/site/[^$]*").Value; foreach (string Pair in Regex.Split(line, "\\$")) { // GET KEYWORDS && OCCURRENCES Match m = Regex.Match(Pair, "^([0-9]+)#(.+)"); if (Regex.IsMatch(m.Groups[1].Value, "[0-9]+")) { Occurrences = Convert.ToInt32(m.Groups[1].Value); Keyword = m.Groups[2].Value; var NewReferral = new Referral(URL, Keyword, Occurrences); if (NewReferral.Keywords.ContainsKey(Keyword)) { Referrals.Add(NewReferral); } } } } counter++; } } }
////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////// // METHOD: Update XML static void UpdateXML(ReportDelegate _update_Delegate) { string HotSearches = ""; // OPEN PAGE TITLES XML using (StreamReader r = new StreamReader(PageTitleXML)) { while ((line = r.ReadLine()) != null) { _update_Delegate(70, "Getting XML information"); // GET NAME && BEGINNING OF LINE Match m = Regex.Match(line, "([^n]+name=\")([^\"]+)"); string beginning = m.Groups[1].Value; string name = m.Groups[2].Value; // GET TITLE && END OF LINE m = Regex.Match(line, ">([^<(&#)]+)[^<]*(.+)"); string title = m.Groups[1].Value.Trim(); string end = m.Groups[2].Value; // GATHER TOP KEYWORDS FOR INDIVIDUAL PAGES if (Regex.IsMatch(title, "[a-zA-Z]") && Regex.IsMatch(name, "[a-zA-Z]")) { newTitleXML += beginning + name + "\">" + title; newHotSearchXML += beginning + name + "\">"; List<string> MyKeywordList = new List<string>(Referral.GetKeywords(name + "$", 5)); for (int counter = MyKeywordList.Count - 1; counter >= 0; counter--) { if (Regex.IsMatch(title, MyKeywordList[counter], RegexOptions.IgnoreCase) | Regex.IsMatch(MyKeywordList[counter], "tempositions", RegexOptions.IgnoreCase) ) { MyKeywordList.Remove(MyKeywordList[counter]); } } int i = 0; HotSearches = ""; foreach (string keyword in MyKeywordList) { if (i == 0 && i != MyKeywordList.Count - 1) { newTitleXML += " | " + keyword + " • "; HotSearches += keyword + ", "; } else if (i == 0) { newTitleXML += " | " + keyword; HotSearches += keyword; } else if (i != MyKeywordList.Count - 1) { newTitleXML += keyword + " • "; HotSearches += keyword + ", "; } else { newTitleXML += keyword; HotSearches += keyword; } i++; } newTitleXML += end + "\r\n"; if (Regex.IsMatch(HotSearches, "[a-zA-Z]")) { newHotSearchXML += HTMLText + HotSearches + "."; } newHotSearchXML += "</page>\r\n"; } else if (Regex.IsMatch(line, "[a-zA-Z]")) { newTitleXML += line + "\r\n"; newHotSearchXML += line + "\r\n"; } } } _update_Delegate(70, "Overwriting results files"); // OPEN STREAM TO PAGE TITLES XML StreamWriter TitleXML = new StreamWriter(PageTitleXML); TitleXML.Write(newTitleXML); // CLOSE FILE STREAM TitleXML.Close(); // OPEN STREAM TO HOT SEARCHES XML StreamWriter HotSearchXML = new StreamWriter(HotSearchesXML); HotSearchXML.Write(newHotSearchXML); // CLOSE FILE STREAM HotSearchXML.Close(); }
public bool ProveIt(ReportDelegate report) { // Negates the theorem. AST negated = CNF.Convert(Theorem.Ast.Negation()); m_theoremNegation = new CNFProposition(negated); report(string.Format( "Teorema: {0}\nTeorema negado: {1}\nTeorema negado em FNC: {2}\n", CNF.Convert(Theorem.Ast).ToString(), negated.ToString(), m_theoremNegation.ToString() )); // Join all premisses and sort them. List <MultipleDisjunction> clauses = new List <MultipleDisjunction>(); foreach (CNFProposition premisse in Premisses.Values) { clauses.AddRange(premisse.Props); } clauses.AddRange(m_theoremNegation.Props); clauses.Sort(); string premissesStr = "Cláusulas (premissas + negação do teorema):"; int premisseCount = 0; foreach (MultipleDisjunction premisse in clauses) { premissesStr += string.Format("\n({0}) {1}", ++premisseCount, premisse); } report(premissesStr + "\n"); // Evaluate Resolutions. report("\n=====================\nTentando provar...\n------------------\n\n"); List <TestedPair> tested = new List <TestedPair>(); bool solved = false; for (int i = 0; i < clauses.Count; i++) { for (int j = 0; j < clauses.Count; j++) { if (i == j) { continue; } TestedPair pair = new TestedPair(i, j); if (tested.Contains(pair)) { continue; } else { tested.Add(pair); } MultipleDisjunction resolvent = clauses[i].ApplyResolution(clauses[j]); if (resolvent == null) { continue; // complementaries not found. } solved = resolvent.ISNullClause; if (solved) { report(string.Format( "Cláusula nula encontrada da aplicação de {0} em {1}.\n", j + 1, i + 1 )); break; } if (clauses.Contains(resolvent)) { continue; } clauses.Add(resolvent); report(string.Format( "({0}) {1} :: De {2} em {3}.\n", clauses.Count, resolvent.ToString(), j + 1, i + 1 )); i = j = 0; } if (solved) { break; } } report("\n\n==================\n"); report(solved ? "TEOREMA PROVADO!" : "NÃO FOI POSSÍVEL PROVAR A TEORIA!"); report("\n==================\n\n"); return(solved); }
internal static void ReportStatusOld(ClientNodeStatus status, string statusText, string error) { try { if (NetControl.MasterAddress == "127.0.0.1" || Settings.MasterController) { return; } if (Earth3d.Logging) { Earth3d.WriteLogMessage("Reporting Status:" + statusText); } currentStatus = status; CurrentStatusText = statusText; string url = string.Format("http://{0}:5050/status?NodeID={1}&NodeName={2}&FPS={3}&Error={4}&Status={5}&StatusText={6}" , NetControl.MasterAddress, Earth3d.MainWindow.Config.NodeID, Earth3d.MainWindow.Config.NodeDiplayName, Earth3d.LastFPS, error, status, statusText); ReportDelegate report = new ReportDelegate(Report); report.BeginInvoke(url, null, null); //WebClient client = new WebClient(); //client.DownloadStringAsync(new Uri(url)); } catch { } }
////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////// // METHOD: CLEAN ANOMALIES FROM KEYWORD LOG static void CleanAnomalies(ReportDelegate _update_Delegate) { bool pass = false; if (PrintToConsole == true) { Console.Write("\n\tCleaning logs. . . \n\n"); } Referral.Clean(); ProcessKeywordFile(_update_Delegate); // OPEN STREAM TO KEYWORDS.LOG StreamWriter fileWrite = new StreamWriter(KeywordsLog); StreamWriter fileWriteForHumans = new StreamWriter(KeywordsLogForHumans); // RESET RUN COUNT fileWrite.WriteLine("0"); fileWriteForHumans.WriteLine("READOO!\r\n\r\nKEYWORDS CAUGHT TO DATE: {0}\r\n RUNS BEFORE NEXT CLEANUP: {1}", Today, RunsBeforeCleaning); if (PrintToConsole == true) { Console.WriteLine("\n\nREADOO!\n\nKEYWORDS CAUGHT TO DATE: {0}\n RUNS BEFORE NEXT CLEANUP: {1}", Today, RunsBeforeCleaning); } for (int i = 0; i < Referrals.Count(); i++) { pass = false; // FIRST THE URL if (PrintToConsole == true) { Console.Write("\n\n{0}\n-", Referrals[i].URL); } fileWriteForHumans.WriteLine(""); fileWriteForHumans.WriteLine("{0}", Referrals[i].URL); // IF THERES AN ALLOWED KEYWORD // NOW TO THE FILE! foreach (string Phrase in Referrals[i].Keywords.Keys) { if (Referrals[i].Keywords[Phrase] >= MinimumOccurrencesToKeep) { pass = true; } } if (pass == true) { fileWrite.Write("{0}", Referrals[i].URL); } var SortedKeys = from k in Referrals[i].Keywords.Keys orderby Referrals[i].Keywords[k] descending select k; foreach (string Phrase in SortedKeys) { if (Referrals[i].Keywords[Phrase] >= MinimumOccurrencesToKeep) { if (PrintToConsole == true) { Console.Write("\t({0}) {1}\n", Referrals[i].Keywords[Phrase], Phrase); } fileWriteForHumans.WriteLine("\t({0}) {1}", Referrals[i].Keywords[Phrase], Phrase); fileWrite.Write("${0}#{1}", Referrals[i].Keywords[Phrase], Phrase); } } // LINE BREAK FOR FILE! fileWrite.WriteLine(""); fileWriteForHumans.WriteLine(""); } fileWriteForHumans.WriteLine(""); fileWriteForHumans.WriteLine(""); // CLOSE FILE STREAM fileWrite.Close(); fileWriteForHumans.Close(); if (PrintToConsole == true) { Console.Write("\n\nDone!"); } Referral.Clean(); }
public ReportDescription(ReportDelegate builder, PrologEpilogDelegate prolog, PrologEpilogDelegate epilog) { Builder = builder; Prolog = prolog; Epilog = epilog; }
public ReportDescription(ReportDelegate builder) { Builder = builder; Prolog = delegate(ListDictionary parameters) { }; Epilog = delegate(ListDictionary parameters) { }; }
/// <summary> /// Check a directory for duplicates (No GIF at the moment due to animation issues) /// </summary> /// <param name="parth">path to check for dups in</param> public Dictionary <string, int> CheckDirDups(string parth, ReportDelegate report, DoWorkEventArgs ecp, BackgroundWorker worker) { Dictionary <string, int> files = new Dictionary <string, int>(); string[] szFiles; FileArray = new ArrayList(); foreach (string szType in FileTypes) { szFiles = Directory.GetFiles(parth, szType); if (szFiles.Length > 0) { FileArray.AddRange(szFiles); } } int cur = 0; string checksum = ""; progressmax = FileArray.Count; progressmax = (100 / progressmax); int dictionaryKey = 0; foreach (string file in FileArray) { progress = (int)(progressmax * cur); if (progress <= 100) { report(progress, "Running"); if (worker.CancellationPending) { ecp.Cancel = true; break; } } else { report(100, "Running"); if (worker.CancellationPending) { ecp.Cancel = true; break; } } cur++; checksum = IsUnique(file); if (checksum != "error") { System.Diagnostics.Debug.WriteLine(file + ": In not in list, adding " + checksum); fingerprint_feild.Add(checksum); filename_feild.Add(file); } else { System.Diagnostics.Debug.WriteLine(file + ": Is a duplicate"); try { if (files.ContainsKey(lastmatch)) { int v = files[lastmatch]; files.Add(file, v); } else { files.Add(file, dictionaryKey); files.Add(lastmatch, dictionaryKey); dictionaryKey++; } } catch (Exception e) { System.Diagnostics.Debug.WriteLine(e); } } } return(files); }
////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////// // METHOD: UPDATE PROGRAM LOGS static void UpdateProgramLogs(ReportDelegate _update_Delegate) { // OPEN STREAM TO KEYWORDS.LOG StreamWriter fileWrite = new StreamWriter(KeywordsLog); StreamWriter fileWriteForHumans = new StreamWriter(KeywordsLogForHumans); // UPDATE RUN TALLY fileWrite.WriteLine(RunCount); fileWriteForHumans.WriteLine("READOO! Written by Dylan Lloyd.\r\n\r\nKEYWORDS CAUGHT TO DATE: {0}\r\n RUNS BEFORE NEXT CLEANUP: {1}", Today, RunsBeforeCleaning - RunCount); for (int i = 0; i < Referrals.Count(); i++) { fileWriteForHumans.WriteLine(""); fileWriteForHumans.WriteLine("{0}", Referrals[i].URL); // NOW TO THE FILE! fileWrite.Write("{0}", Referrals[i].URL); var SortedKeys = from k in Referrals[i].Keywords.Keys orderby Referrals[i].Keywords[k] descending select k; foreach (string Phrase in SortedKeys) { fileWriteForHumans.WriteLine("\t({0}) {1}", Referrals[i].Keywords[Phrase], Phrase); // NOW TO THE FILE! fileWrite.Write("${0}#{1}", Referrals[i].Keywords[Phrase], Phrase); } // LINE BREAK FOR FILE! fileWrite.WriteLine(""); fileWriteForHumans.WriteLine(""); } // CLOSE FILE STREAM fileWrite.Close(); fileWriteForHumans.Close(); }
////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////// // METHOD: PROCESS LOG static void ProcessLog(string fileName, ReportDelegate _update_Delegate) { using (StreamReader r = new StreamReader(fileName)) { while ((line = r.ReadLine()) != null) { // FIND QUERIES Match m = Regex.Match(line, @"(p|q)=([^\s%]+)"); string referringWords = m.Groups[2].Value; if (m.Success) { // GET URL Match m2 = Regex.Match(line, @"\s/[^\s]+"); string referringURL = m2.Value; // GET KEYWORDS referringWords = Regex.Replace(referringWords, "&(.*)", " "); referringWords = Regex.Replace(referringWords, "[^a-zA-Z]", " "); // IF EVERYTHING MAKES SENSE?? if ( Regex.IsMatch(referringWords, "[a-zA-Z]{3,50}") && Regex.IsMatch(referringURL, "\\.(aspx|asp|html|php|xhtml|)") && //ALLOW USER TO SELECT APPROPRIATE EXTENSIONS Regex.IsMatch(referringURL, "/site/default\\.aspx$") == false && Regex.IsMatch(referringWords, "cache|http|www|iceurl") == false && Regex.IsMatch(referringWords, UnwantedKeywords) == false ) { var NewReferral = new Referral(referringURL.Trim(), referringWords.Trim().ToLower(), 1); if (NewReferral.URL == referringURL.Trim()) { Referrals.Add(NewReferral); } } } // GET CLIENT IP // 165.155.110.2 //Match i = Regex.Match(line, @"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"); //string ClientIP = i.Groups[0].Value + "\n"; //Console.Write(line + "##" + ClientIP + "##" + ); } } }
/// <summary> /// Check a directory for duplicates (No GIF at the moment due to animation issues) /// </summary> /// <param name="parth">path to check for dups in</param> public Dictionary<string, int> CheckDirDups(string parth, ReportDelegate report,DoWorkEventArgs ecp, BackgroundWorker worker) { Dictionary<string, int> files = new Dictionary<string, int>(); string[] szFiles; FileArray = new ArrayList(); foreach (string szType in FileTypes) { szFiles = Directory.GetFiles(parth, szType); if (szFiles.Length > 0) { FileArray.AddRange(szFiles); } } int cur = 0; string checksum = ""; progressmax = FileArray.Count; progressmax = (100 / progressmax); int magic = 0; foreach (string file in FileArray) { progress = (int)(progressmax * cur); if (progress <= 100) { report(progress, "Running"); if (worker.CancellationPending) { ecp.Cancel = true; break; } } else { report(100, "Running"); if (worker.CancellationPending) { ecp.Cancel = true; break; } } cur++; checksum = IsUnique(file); if (checksum != "error") { Console.WriteLine(file + ": In not in list, adding " + checksum); fingerprint_feild.Add(checksum); filename_feild.Add(file); } else { Console.WriteLine(file + ": Is a duplicate"); try { if (files.ContainsKey(lastmatch)) { int v = files[lastmatch]; files.Add(file, v); } else { files.Add(file, magic); files.Add(lastmatch, magic); magic++; } } catch (Exception e) { Console.Out.WriteLine(e); } } } return files; }
////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////// // METHOD: MAIN public static void Run(ReportDelegate _update_Delegate) { _update_Delegate(0, "Starting READOO!"); _update_Delegate(0, "Calculating approximate completion time"); long TotalLogDirectorySize = 0; int HowManyLogs = 0; int progress = 0; string[] fileEntries; // // // // // THIS DOES NOTHING. // // // // // THIS DOES NOTHING. // // // // // THIS DOES NOTHING. // // // // // THIS DOES NOTHING. // // // // // THIS DOES NOTHING. // // // // // FIX ME. FILE SIZES MUST BE PASSED, ADDED TO VAR AND THEN USED TO FIND % fileEntries = Directory.GetFiles(LogsDirectory, SystemLogFormat); foreach (string fileName in fileEntries) { FileInfo FileInfo = new FileInfo(fileName); TotalLogDirectorySize += FileInfo.Length; HowManyLogs++; } int Increment = Convert.ToInt32(TotalLogDirectorySize / HowManyLogs); Increment = 70 / Increment; // FIRST TIME? JUST USE ALL THE OLD LOGS! THEN START RUNCOUNT. if (System.IO.File.Exists(KeywordsLog) == false) { _update_Delegate(progress, "First time running!"); foreach (string fileName in fileEntries) { _update_Delegate(progress, "Processing log " + fileName); ProcessLog(fileName, _update_Delegate); RunCount = 1; progress += Increment; } } // NOT FIRST TIME? COLLECT YOUR OLD KEYWORDS, THEN GET TODAYS. THEN ADD TO RUN TALLY. else { _update_Delegate(0, "Not first time running"); _update_Delegate(0, "Processing READOO generated keyword file"); ProcessKeywordFile(_update_Delegate); _update_Delegate(70, "Processing last completed log"); ProcessLog(CurrentLog, _update_Delegate); _update_Delegate(70, "Updating run cycle"); using (StreamReader r = new StreamReader(KeywordsLog)) { RunCount = Convert.ToInt32(r.ReadLine()); RunCount++; } } _update_Delegate(70, "Updating READOO generated XML files"); // PUT ALL THE DATA TO USE UpdateProgramLogs(_update_Delegate); if (Settings.Default.XML) { UpdateXML(_update_Delegate); } // CLEAN UP IF NECESSARY _update_Delegate(95, "Checking run cycle"); if (RunCount > RunsBeforeCleaning) { _update_Delegate(98, "Cleaning old logs. . ."); CleanAnomalies(_update_Delegate); } _update_Delegate(100, "DONE!"); }