public void AddItem(LogItem item) { if (LogLevel == LogLevel.None || item == null || string.IsNullOrWhiteSpace(item.Exception.ToString())) { return; } try { OnItemAdded.RaiseEvent(item, new EventArgs()); var text = item.Exception.ToString(); text = item.Exception.Data.Cast<DictionaryEntry>() .Aggregate( text, (current, entry) => current + string.Format("{0}{1}: {2}", Environment.NewLine, entry.Key, entry.Value)); Console.ForegroundColor = ConsoleColor.Yellow; Console.WriteLine(text); Console.ResetColor(); } catch (Exception ex) { Console.WriteLine(ex); } }
public void ConstructLine(StringBuilder sb, LogItem item) { foreach (var logConstructor in constructors) { logConstructor.ConstructLine(sb, item); } }
public bool Filter(LogItem logItem) { bool result = true; for (int n = 0; n < filtersArray.Length && result; n++) result = filtersArray[n].Filter(logItem); return result; }
void Receive(LogItem message) { if (message.Level <= LogLevel.Error || CheckIds(message.Ids)) { writer.Write(message); } }
public void FilterAddIDTest() { LogItem message = new LogItem(category, level, msg, ids, GetException()); var qThread = new Mock<IQueued<LogItem>>(MockBehavior.Strict); var writer = new Mock<ILogWriter>(MockBehavior.Strict); writer .Setup(s => s.Write(It.IsAny<LogItem>())); writer .Setup(s => s.GetTimeout()) .Returns(timeout); qThread .Setup(s => s.SetTimeout(It.IsAny<int>())); LogCollector target = new LogCollector(qThread.Object, writer.Object); target.FilterAddID(1); qThread.Raise(s => s.OnReceive += null, message); writer .Verify(s => s.Write(It.IsAny<LogItem>()), Times.Never()); target.FilterAddID(3); target.FilterAddID(1); qThread.Raise(s => s.OnReceive += null, message); writer .Verify(s => s.Write(It.Is<LogItem>(a => a.Equals(message))), Times.Once()); }
private void AddItem(LogItem li) { Application.Current.Dispatcher.Invoke(delegate { logList.Add(li); }); }
public void FindLogItem() { long id = 1; LogItem item = new LogItem(); item.Id = id; item.Category = Guid.NewGuid(); item.Event = Guid.NewGuid(); item.Message = "message"; item.Severity = Guid.NewGuid(); item.Title = "title"; ILogMapper stubbedLogMapper = MockRepository.GenerateStub<ILogMapper>(); stubbedLogMapper.Stub(x => x.FindLogItem(id)).Return(item).Repeat.Any(); m_service.SetMapper(stubbedLogMapper); ILogItem result = m_service.FindLogItem(new LogItem() { Id = id }); LogItemDTO serviceResult = ((ILogServiceContract)m_service).FindLogItem(new LogIdDTO() { Id = id }); Assert.That(result.Id, Is.EqualTo(item.Id)); Assert.That(result.Category, Is.EqualTo(item.Category)); Assert.That(result.Event, Is.EqualTo(item.Event)); Assert.That(result.Message, Is.EqualTo(item.Message)); Assert.That(result.Severity, Is.EqualTo(item.Severity)); Assert.That(result.Title, Is.EqualTo(item.Title)); Assert.That(serviceResult.Id, Is.EqualTo(item.Id)); Assert.That(serviceResult.Category, Is.EqualTo(item.Category)); Assert.That(serviceResult.Event, Is.EqualTo(item.Event)); Assert.That(serviceResult.Message, Is.EqualTo(item.Message)); Assert.That(serviceResult.Severity, Is.EqualTo(item.Severity)); Assert.That(serviceResult.Title, Is.EqualTo(item.Title)); }
void Logger_Logged(LogItem log) { var s = log.Timestamp.ToString() + " [" + log.PriorityLabel + "] " + log.Message; if (log.Exception != null) { s += ": " + log.Exception.Message + " Stack trace:\n" + log.Exception.StackTrace; } Debug.WriteLine(s); if (log.Priority == LogPriority.Error) { var toastTemplate = ToastTemplateType.ToastText02; var toastXml = ToastNotificationManager.GetTemplateContent(toastTemplate); toastXml.GetElementsByTagName("text")[0].AppendChild(toastXml.CreateTextNode(log.Message)); toastXml.GetElementsByTagName("text")[1].AppendChild(toastXml.CreateTextNode(log.Exception != null ? log.Exception.Message : "")); var toast = new ToastNotification(toastXml); //var toastNode = toastXml.SelectSingleNode("/toast"); //((XmlElement)toastNode).SetAttribute("launch", "{\"type\":\"toast\",\"param1\":\"12345\",\"param2\":\"67890\"}"); ToastNotificationManager.CreateToastNotifier().Show(toast); /*Window.Current.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { new MessageDialog(s, "Error").ShowAsync(); });*/ } }
public override void Log(LogItem logItem) { foreach (var logFactory in _logFactories) { var logger = logFactory.GetLogger(_loggerName); LogWithLogger(logItem, logger); } }
public void Write(LogItem message) { TimeSpan elapsed = (DateTime.Now - lastFlush); constructor.ConstructLine(buffer, message); if (buffer.Length >= BufferSize || elapsed > forceFlush) { Flush(); } }
// GET: LogItems/Create public ActionResult Create(int? assetId) { LogItem logItem = new LogItem(); logItem.DateCreated = DateTime.Now; if (assetId.HasValue) logItem.AssetId = assetId.Value; TempData["referringController"] = GetReferringControllerName(Request.UrlReferrer); SetCreateAndEditViewbag(logItem); return View(logItem); }
private void StartExecute() { Task.Factory.StartNew(() => { int i = 0; while (true) { LogItem info = new LogItem(DateTime.UtcNow, i, "Application started."); Messenger.Default.Send(info); Thread.Sleep(2000); i += 1; } }); }
public override void Log(LogItem logItem) { if (LoggerName != null) { System.Diagnostics.Debug.WriteLine("{0} [{1}] [{2}] {3}", logItem.Timestamp, logItem.LogLevel, logItem.LoggerName, logItem.Message); } else { System.Diagnostics.Debug.WriteLine("{0} [{1}] {2}", logItem.Timestamp, logItem.LogLevel, logItem.Message); } if (logItem.Exception != null) { System.Diagnostics.Debug.WriteLine(logItem.Exception); } }
public void OnItemAdded (LogItem logItem) { switch (logItem.type) { case LogType.LOG: Logs++; break; case LogType.WARNING: Warnings++; break; case LogType.ERROR: Errors++; break; } }
public RectTransform AddItem (LogItem logItem) { RectTransform item = Instantiate (this.item); item.SetParent (transform, false); item.GetComponent<BLogItem> ().LogItem = logItem; ApplyIncludeAndExclude (item); UpdateChildPosition (item); UpdateHeight (); return item; }
public LogItem CreateLogItem(Guid categoryGuid, Guid eventGuid, Guid severityGuid, Guid incident, string title, string message) { LogItem newLogItem = new LogItem() { Id = FindLastLogId(), Category = categoryGuid, Event = eventGuid, Severity = severityGuid, Incident = incident, Title = title, Message = message }; LogItemz.Add(newLogItem); return newLogItem; }
private void DumpItem(TextWriter output, bool asCsv, LogItem item) { if (!LastDisplayedTime.HasValue || item.Time.Subtract(LastDisplayedTime.GetValueOrDefault()) > TimeSpan.FromMilliseconds(100)) { LastDisplayedTime = item.Time; output.WriteLine(asCsv ? "TS, {0}, {1}" : "[TS] {0}, {1}", item.Time.TotalMilliseconds, item.Time); } else if (item.Time.Subtract(LastDisplayedTime.GetValueOrDefault()) < TimeSpan.Zero) { LastDisplayedTime = item.Time; // racing with the analysis... output.WriteLine(asCsv ? "TSW" : "[TSW]"); } try { if (asCsv) { output.WriteLine("{0}, {1}", item.Event, string.Join(", ", AsCsvStrings(item.Args))); } else { output.WriteLine("[{0}] {1}", item.Event, string.Join(", ", item.Args)); } } catch { } }
void Log(string message, string stackTrace, LogType type) { LogItem tmp = new LogItem(); tmp.message = message; tmp.stackTrace = stackTrace; tmp.type = type; // dwie opcje w sumie //logs.Add(tmp); // nowe wiadomosci na koncu if(logs.Count >= 1) { // nie wrzucamy wiadomoci jesli mamy juz taka wiadomosc ostatnia // wrzucilem to bo duzo bledow od soundow co sekunde milion i nic nie widac w konsoli :) if(logs[0].message != message) { logs.Insert (0, tmp); // nowe wiadomosci na poczatku } } else { logs.Insert (0, tmp); // nowe wiadomosci na poczatku } }
private static void LogWithLogger(LogItem logItem, ILogger logger) { switch (logItem.LogLevel) { case LogLevel.Trace: logger.Trace(() => logItem.Message, logItem.Exception); break; case LogLevel.Info: logger.Info(() => logItem.Message, logItem.Exception); break; case LogLevel.Warn: logger.Warn(() => logItem.Message, logItem.Exception); break; case LogLevel.Error: logger.Error(() => logItem.Message, logItem.Exception); break; case LogLevel.Fatal: logger.Fatal(() => logItem.Message, logItem.Exception); break; } }
private void LogMapPoint(object aSrc, string aSrcType, object aItem) { // Keys cant be null. If null, we just say ILScanner is the source if (aSrc == null) { aSrc = typeof(Reader); } var xLogItem = new LogItem() { SrcType = aSrcType, Item = aItem }; List<LogItem> xList; if (!mLogMap.TryGetValue(aSrc, out xList)) { xList = new List<LogItem>(); mLogMap.Add(aSrc, xList); } xList.Add(xLogItem); }
public void ConstructLineTest() { ILogConstructor target = CreateLogConstructor(); // TODO: Initialize to an appropriate value Exception ex = null; try { throw new Exception(exception); } catch (Exception ex1) { ex = ex1; } LogItem item = new LogItem(category, level, message, ids, ex); LogLevel.SetLevel(LogLevel.Error, "Error"); string actual; StringBuilder sb = new StringBuilder(); target.ConstructLine(sb, item); actual = sb.ToString(); Assert.IsTrue(stringCriteria(actual)); }
public static void Write(IController controller, string anevent, string result, string customData){ var c = controller as Controller; var descriptor = MvcContext.Create(((Controller) controller).Context, ((Controller) controller).ControllerContext, controller); descriptor.Category = anevent; // var allowLog = ControllerExpert.Run("log", true, descriptor).ToBoolean(); // if (allowLog){ var newlog = new LogItem(); newlog.Event = anevent; newlog.CustomData = customData; newlog.Result = result; newlog.Area = c.AreaName; newlog.Action = c.Action; newlog.Controller = c.Name; newlog.Time = DateTime.Now; newlog.Usr = myapp.usrName; var p = string.Join("&", new[]{c.Form.ToString(), c.Query.ToString()}); if (p.StartsWith("&")){ p = p.Remove(0, 1); } newlog.Params = p; newlog.RequestTime = HttpContext.Current.Timestamp; if (anevent.Contains("error")){ log.Warn(newlog.ToString()); } else if (anevent.Contains("start")){ log.Info(newlog.ToString()); } else{ log.Debug(newlog.ToString()); } // } }
public void AlterLogItems(LogItem logItem) { dataAccess.AlterLogItems(logItem); }
public void DeleteLogItems(LogItem deleteLogItem) { dataAccess.DeleteLogItems(deleteLogItem); }
public void AddLogItems(LogItem addLogItem) { dataAccess.AddLogItems(addLogItem); }
public void ConstructLine(StringBuilder sb, LogItem item) { sb.Append(staticString); }
/// <summary> /// opens a dgindex script /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="horizontalResolution">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="signalAR">whether or not ar signalling is to be used for the output /// (depending on this parameter, resizing changes to match the source AR)</param> /// <returns>the name of the AviSynth script created, empty of there was an error</returns> private string openVideo(string path, Dar?AR, int horizontalResolution, bool signalAR, LogItem log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, out Dar?dar) { dar = null; IMediaFile d2v = new d2vFile(path); IVideoReader reader = d2v.GetVideoReader(); if (reader.FrameCount < 1) { log.Error("DGDecode reported 0 frames in this file. This is a fatal error. Please recreate the DGIndex project"); return(""); } //Autocrop CropValues final = Autocrop.autocrop(reader); if (signalAR) { if (avsSettings.Mod16Method == mod16Method.overcrop) { ScriptServer.overcrop(ref final); } else if (avsSettings.Mod16Method == mod16Method.mod4Horizontal) { ScriptServer.cropMod4Horizontal(ref final); } else if (avsSettings.Mod16Method == mod16Method.undercrop) { ScriptServer.undercrop(ref final); } } bool error = (final.left == -1); if (!error) { log.LogValue("Autocrop values", final); } else { log.Error("Autocrop failed, aborting now"); return(""); } decimal customDAR; log.LogValue("Auto-detect aspect ratio now", AR == null); //Check if AR needs to be autodetected now if (AR == null) // it does { customDAR = d2v.Info.DAR.ar; if (customDAR > 0) { log.LogValue("Aspect ratio", customDAR); } else { customDAR = Dar.ITU16x9PAL.ar; log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else { customDAR = AR.Value.ar; } // Minimise upsizing int sourceHorizontalResolution = (int)d2v.Info.Width - final.right - final.left; if (horizontalResolution > sourceHorizontalResolution) { if (avsSettings.Mod16Method == mod16Method.resize) { while (horizontalResolution > sourceHorizontalResolution + 16) { horizontalResolution -= 16; } } else { horizontalResolution = sourceHorizontalResolution; } } //Suggest a resolution (taken from AvisynthWindow.suggestResolution_CheckedChanged) int scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); log.LogValue("Output resolution", horizontalResolution + "x" + scriptVerticalResolution); if (settings != null && settings is x264Settings) // verify that the video corresponds to the chosen avc level, if not, change the resolution until it does fit { x264Settings xs = (x264Settings)settings; if (xs.Level != 15) { AVCLevels al = new AVCLevels(); log.LogValue("AVC level", al.getLevels()[xs.Level]); int compliantLevel = 15; while (!this.al.validateAVCLevel(horizontalResolution, scriptVerticalResolution, d2v.Info.FPS, xs, out compliantLevel)) { // resolution not profile compliant, reduce horizontal resolution by 16, get the new vertical resolution and try again string levelName = al.getLevels()[xs.Level]; horizontalResolution -= 16; scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } log.LogValue("Resolution adjusted for AVC Level", horizontalResolution + "x" + scriptVerticalResolution); } } //Generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine(path, false, PossibleSources.d2v, false, false, false, 0); log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { string d2vPath = path; SourceDetector sd = new SourceDetector(inputLine, d2vPath, false, mainForm.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(analyseUpdate), new FinishedAnalysis(finishedAnalysis)); finished = false; sd.analyse(); waitTillAnalyseFinished(); deinterlaceLines = filters[0].Script; log.LogValue("Deinterlacing used", deinterlaceLines); } inputLine = ScriptServer.GetInputLine(path, interlaced, PossibleSources.d2v, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0); cropLine = ScriptServer.GetCropLine(true, final); denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize, horizontalResolution, scriptVerticalResolution, (ResizeFilterType)avsSettings.ResizeMethod); string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } log.LogValue("Generated Avisynth script", newScript); try { StreamWriter sw = new StreamWriter(Path.ChangeExtension(path, ".avs")); sw.Write(newScript); sw.Close(); } catch (IOException i) { log.LogValue("Error saving AviSynth script", i, ImageType.Error); return(""); } return(Path.ChangeExtension(path, ".avs")); }
private void expandAll(LogItem i) { expandOrCollapseAll(i, true); }
public bool IsMatch(LogItem item) => this.IsSelected && item.Severity == this.Severity;
public Logging() { item = new LogItem(string.Empty); }
/** * 解析日志文件 * 将字符串转化为实体Bean * 每行日志是一条数据 */ private void DeserializeLog(string logStr, bool isAddText) { if (logStr == null || "".Equals(logStr)) { return; } string[] logStrArr = logStr.Split(new string[] { "\r\n" }, StringSplitOptions.None); if (logStrArr == null || logStrArr.Length == 0) { return; } Debug.Log("logStrArr.Length: " + logStrArr.Length); List <string> logStrList = new List <string>(logStrArr); // 非内容追加,即全量加载才做这个过滤 if (!isAddText) { try { this.logFileItem.pageUrl = logStrList[0].ToString(); this.logFileItem.createDate = logStrList[1].ToString(); }catch (ArgumentException err) { Debug.Log("DeserializeLog error!"); } //移除日志头部的URL和创建时间 logStrList.RemoveAt(0); logStrList.RemoveAt(0); } //正则匹配日志,后向引用选中的文本 //这个正则写的不够精准(比如时间的匹配) //但是够用了,懒得写那么复杂。。 string pattern = @"^(?<time>\[\d{4}-\d{2}-\d{2}\s(\d{2}:){2}\d{2}\])\s?(?<level>\[\w+\])(?<content>.*)"; Regex regex = new Regex(pattern); foreach (string aLogStr in logStrList) { if (aLogStr == null || "".Equals(aLogStr) || Environment.NewLine.Equals(aLogStr)) { continue; } //this.Log("循环的aLogStr=" + aLogStr); LogItem aLogJson = new LogItem(); Match match = regex.Match(aLogStr); string time = match.Groups["time"].ToString(); string level = match.Groups["level"].ToString(); string content = match.Groups["content"].ToString(); if (time != null && !"".Equals(time)) { //去掉前后中括号 time = time.Substring(1, time.Length - 2); aLogJson.time = time; } if (level != null && !"".Equals(level)) { //去掉前后中括号 level = level.Substring(1, level.Length - 2); aLogJson.level = level; } if (!String.IsNullOrEmpty(content)) { aLogJson.content = content; } //this.Log("aLogJson.ToString: " + aLogJson.ToString()); this.logFileItem.logList.Add(aLogJson); } }
public void AddLog(LogItem log) { ((FDgvLog)form).AddLog(log); }
public void StartWrite(string message, LogItem item) { depth++; item.Add(message, depth); }
private void AddToLog(LogItem logitem) { if (lbLog == null) return; if (lbLog.InvokeRequired) { lbLog.Invoke((Action<LogItem>)AddToLog, logitem); return; } if (_LastLogMessage != logitem.Message) { _LastLogMessage = logitem.Message; _LastLogMessageCount = 1; } else { _LastLogMessageCount++; } if (_LastLogMessageCount > 1) { logitem.Message += " (message repeated " + _LastLogMessageCount + " times)"; lbLog.Items.RemoveAt(0); } lbLog.Items.Insert(0, logitem); while (lbLog.Items.Count > 250) { lbLog.Items.RemoveAt(lbLog.Items.Count - 1); } }
private static void finalLog(l level, string caller, string text) { LogItem _lItem = new LogItem(level, caller, text); LogQueue.Add(_lItem); }
public override void Write(LogItem item) { //this.message.sendStatusMessage(this.RemoteTest, Status.RED, item.LogLevel + " " + item.Message); Console.WriteLine("{0} - {1} {2}", this.RemoteTest, Status.RED.ToString(), item.LogLevel + " " + item.Message); }
public void ConstructLine(StringBuilder sb, LogItem item) { AppendException(sb, item.Ex); }
private void ReceiveMsg(LogItem log) { this.BeginInvoke((Action)(() => { txtMessage.Text += log.ToString() + System.Environment.NewLine; })); }
private void OnLogAdded(LogItem obj) { Console.ForegroundColor = obj.Color; Console.WriteLine(obj.Content); }
private void openVideo(string fileName) { setControlState(true); this._oLog = MainForm.Instance.FileIndexerLog; if (_oLog == null) { _oLog = MainForm.Instance.Log.Info("FileIndexer"); MainForm.Instance.FileIndexerLog = _oLog; } gbFileInformation.Text = " File Information "; iFile = null; if (GetDVDorBluraySource(fileName, ref iFile)) { if (iFile != null) { fileName = iFile.FileName; string strText = (iFile.VideoInfo.PGCNumber > 1 ? " - PGC " + iFile.VideoInfo.PGCNumber.ToString("D2") : string.Empty) + (iFile.VideoInfo.AngleNumber > 0 ? " - Angle " + iFile.VideoInfo.AngleNumber + " " : string.Empty); if (strText.Trim().Length > 0) { gbFileInformation.Text += strText.Trim() + " "; } } } else { iFile = new MediaInfoFile(fileName, ref _oLog); } if (iFile != null && iFile.HasVideo) { strVideoCodec = iFile.VideoInfo.Track.Codec; strVideoScanType = iFile.VideoInfo.ScanType; strContainerFormat = iFile.ContainerFileTypeString; } else { strVideoCodec = strVideoScanType = strContainerFormat = string.Empty; } if (String.IsNullOrEmpty(strVideoCodec)) { txtCodecInformation.Text = " unknown"; } else { txtCodecInformation.Text = " " + strVideoCodec; } if (String.IsNullOrEmpty(strContainerFormat)) { txtContainerInformation.Text = " unknown"; } else { txtContainerInformation.Text = " " + strContainerFormat; } if (String.IsNullOrEmpty(strVideoScanType)) { txtScanTypeInformation.Text = " unknown"; } else { txtScanTypeInformation.Text = " " + strVideoScanType; } if (iFile != null && iFile.HasAudio) { audioTracks = iFile.AudioInfo.Tracks; } else { audioTracks = new List <AudioTrackInfo>(); } if (input.Filename != fileName) { input.Filename = fileName; } generateAudioList(); if (iFile != null) { IndexType newType = IndexType.NONE; iFile.recommendIndexer(out newType); if (newType == IndexType.D2V || newType == IndexType.DGM || newType == IndexType.DGI || newType == IndexType.FFMS || newType == IndexType.LSMASH) { btnD2V.Enabled = iFile.isD2VIndexable(); btnDGM.Enabled = iFile.isDGMIndexable(); btnDGI.Enabled = iFile.isDGIIndexable(); btnFFMS.Enabled = iFile.isFFMSIndexable(); btnLSMASH.Enabled = iFile.isLSMASHIndexable(); gbIndexer.Enabled = gbAudio.Enabled = gbOutput.Enabled = true; changeIndexer(newType); setControlState(false); return; } } // source file not supported btnD2V.Enabled = btnDGM.Enabled = btnDGI.Enabled = btnFFMS.Enabled = btnLSMASH.Enabled = false; gbIndexer.Enabled = gbAudio.Enabled = gbOutput.Enabled = false; btnFFMS.Checked = btnD2V.Checked = btnDGM.Checked = btnDGI.Checked = btnLSMASH.Checked = false; output.Filename = ""; demuxNoAudiotracks.Checked = true; setControlState(false); MessageBox.Show("No indexer for this file found!", "Warning", MessageBoxButtons.OK, MessageBoxIcon.Warning); }
private void collapseAll(LogItem i) { expandOrCollapseAll(i, false); }
public void addMessage(DateTime date, string mess, TimeSpan time) { LogItem log = new LogItem(date, mess, time); queue.Add(log); }
private void InformationOnPropertyChanged(object sender, PropertyChangedEventArgs e) { var unit = (this.player.QueryUnit ?? this.player.Selection.FirstOrDefault()) as Unit; if (unit?.IsValid != true) { return; } this.lastUnitInfo = unit.Handle; var item = new LogItem(LogType.Unit, Color.PaleGreen, "Unit information"); item.AddLine("Unit name: " + unit.Name, unit.Name); item.AddLine("Unit network name: " + unit.NetworkName, unit.NetworkName); item.AddLine("Unit classID: " + unit.ClassId, unit.ClassId); item.AddLine("Unit position: " + unit.Position, unit.Position); if (this.showLevel) { item.AddLine("Unit level: " + unit.Level, unit.Level); } if (this.showTeam) { item.AddLine("Unit team: " + unit.Team, unit.Team); } if (this.showsHpMp) { item.AddLine("Unit health: " + unit.Health + "/" + unit.MaximumHealth); item.AddLine("Unit mana: " + (int)unit.Mana + "/" + (int)unit.MaximumMana); } item.AddLine("Unit attack capability: " + unit.AttackCapability, unit.AttackCapability); if (this.showVision) { item.AddLine("Unit vision: " + unit.DayVision + "/" + unit.NightVision); } if (this.showState) { item.AddLine("Unit state: " + unit.UnitState, unit.UnitState); } if (this.showsAbilityInfo) { item.AddLine("Abilities =>"); item.AddLine(" Talents count: " + unit.Spellbook.Spells.Count(x => x.Name.StartsWith("special_"))); item.AddLine( " Active spells count: " + unit.Spellbook.Spells.Count( x => !x.Name.StartsWith("special_") && x.AbilityBehavior != AbilityBehavior.Passive)); item.AddLine( " Passive spells count: " + unit.Spellbook.Spells.Count( x => !x.Name.StartsWith("special_") && x.AbilityBehavior == AbilityBehavior.Passive)); } if (this.showItemInfo && unit.HasInventory) { item.AddLine("Items =>"); item.AddLine(" Inventory Items count: " + unit.Inventory.Items.Count()); item.AddLine(" Backpack Items count: " + unit.Inventory.Backpack.Count()); item.AddLine(" Stash Items count: " + unit.Inventory.Stash.Count()); } if (this.showModifierInfo) { item.AddLine("Modifiers =>"); item.AddLine(" Active modifiers count: " + unit.Modifiers.Count(x => !x.IsHidden)); item.AddLine(" Hidden modifiers count: " + unit.Modifiers.Count(x => x.IsHidden)); } this.log.Display(item); }
private static void LogReport(TFSSourceControlProvider sourceControlProvider, LogReportData logreport, string path, TextWriter output) { string serverPath = "/"; if (path.IndexOf('/', 9) > -1) { serverPath = path.Substring(path.IndexOf('/', 9)); } int end = int.Parse(logreport.EndRevision); int start = int.Parse(logreport.StartRevision); LogItem logItem = sourceControlProvider.GetLog(serverPath, Math.Min(start, end), Math.Max(start, end), Recursion.Full, int.Parse(logreport.Limit ?? "1000000")); if (start < end) { Array.Reverse(logItem.History); } output.Write("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n"); output.Write("<S:log-report xmlns:S=\"svn:\" xmlns:D=\"DAV:\">\n"); foreach (SourceItemHistory history in logItem.History) { output.Write("<S:log-item>\n"); output.Write("<D:version-name>" + history.ChangeSetID + "</D:version-name>\n"); output.Write("<D:creator-displayname>" + history.Username + "</D:creator-displayname>\n"); output.Write("<S:date>" + Helper.FormatDate(history.CommitDateTime) + "</S:date>\n"); output.Write("<D:comment>" + Helper.EncodeB(history.Comment) + "</D:comment>\n"); foreach (SourceItemChange change in history.Changes) { if ((change.ChangeType & ChangeType.Add) == ChangeType.Add || (change.ChangeType & ChangeType.Undelete) == ChangeType.Undelete) { output.Write("<S:added-path>/" + Helper.EncodeB(change.Item.RemoteName) + "</S:added-path>\n"); } else if ((change.ChangeType & ChangeType.Edit) == ChangeType.Edit) { output.Write("<S:modified-path>/" + Helper.EncodeB(change.Item.RemoteName) + "</S:modified-path>\n"); } else if ((change.ChangeType & ChangeType.Delete) == ChangeType.Delete) { output.Write("<S:deleted-path>/" + Helper.EncodeB(change.Item.RemoteName) + "</S:deleted-path>\n"); } else if ((change.ChangeType & ChangeType.Rename) == ChangeType.Rename) { var renamedItem = (RenamedSourceItem)change.Item; output.Write("<S:added-path copyfrom-path=\"/" + Helper.EncodeB(renamedItem.OriginalRemoteName) + "\" copyfrom-rev=\"" + renamedItem.OriginalRevision + "\">/" + Helper.EncodeB(change.Item.RemoteName) + "</S:added-path>\n"); output.Write("<S:deleted-path>/" + Helper.EncodeB(renamedItem.OriginalRemoteName) + "</S:deleted-path>\n"); } else if ((change.ChangeType & ChangeType.Branch) == ChangeType.Branch) { var renamedItem = (RenamedSourceItem)change.Item; output.Write("<S:added-path copyfrom-path=\"/" + Helper.EncodeB(renamedItem.OriginalRemoteName) + "\" copyfrom-rev=\"" + renamedItem.OriginalRevision + "\">/" + Helper.EncodeB(change.Item.RemoteName) + "</S:added-path>\n"); } else if (change.ChangeType == ChangeType.Merge) { // Ignore merge entries that are not an add, edit, delete, or rename } else { throw new InvalidOperationException("Unrecognized change type " + change.ChangeType); } } output.Write("</S:log-item>\n"); } output.Write("</S:log-report>\n"); }
public GroupKey(LogItem anchor, object[] values) { Anchor = anchor; Values = values; Length = values.Length; }
public void Add(LogItem Item) { MainWindow.UpdateGui(this.AddItem, Item); }
public void Visit(GroupByNode node) { node.Inner[0].Accept(this); Items = Items.Aggregate(() => new Dictionary <GroupKey, object[]>(), Update, Join, Complete).AsParallel().AsOrdered(); Dictionary <GroupKey, object[]> Update(Dictionary <GroupKey, object[]> state, LogItem item) { // calculate the key for the item var key = new GroupKey(item, node.GroupFunctions.Select(k => k(item)).ToArray()); // find the group for the item object[] group; if (!state.TryGetValue(key, out group)) { group = node.Aggregates.Select(a => a.Initialize()).ToArray(); state[key] = group; } // update the group with the item for (var n = 0; n < node.Aggregates.Length; n++) { group[n] = node.Aggregates[n].Update(group[n], item); } return(state); } Dictionary <GroupKey, object[]> Join(Dictionary <GroupKey, object[]> a, Dictionary <GroupKey, object[]> b) { foreach (var group in b) { if (a.ContainsKey(group.Key)) { a[group.Key] = node.Aggregates.Select((x, i) => x.Join(a[group.Key][i], group.Value[i])).ToArray(); } else { a[group.Key] = group.Value; } } return(a); } IEnumerable <LogItem> Complete(Dictionary <GroupKey, object[]> state) { foreach (var key in state.Keys) { // create a new item and populate it using the group key var item = new LogItem(string.Empty, key.Anchor.File, key.Anchor.Member, key.Anchor.Position, key.Anchor.Line); for (var n = 0; n < node.GroupNames.Length; n++) { item.Fields[node.GroupNames[n]] = key.Values[n]; } // complete all aggregates for the group and add them to the group var aggregates = node.Aggregates.Select((a, i) => a.Complete(state[key][i]).ToList()).ToList(); for (var n = 0; n < node.Aggregates.Length; n++) { if (aggregates[n].Count > 1) { item.Fields[node.AggregateNames[n]] = string.Join("\n", aggregates[n]); } else { item.Fields[node.AggregateNames[n]] = aggregates[n].FirstOrDefault(); } } yield return(item); } } }
/// <summary> /// saves all the profiles /// this is called when the program exists and ensures that all /// currently defined profiles are saved, overwriting currently existing ones /// </summary> public static bool WriteProfiles(string savePath, IEnumerable <Profile> profiles) { if (string.IsNullOrEmpty(savePath)) { savePath = GetDefaultProfilPath(); } // remove old backup files if available if (!deleteFiles(savePath, "*.backup")) { return(false); } // backup profiles try { DirectoryInfo fi = new DirectoryInfo(savePath); FileInfo[] files = fi.GetFiles("*.xml", SearchOption.AllDirectories); foreach (FileInfo f in files) { f.CopyTo(Path.Combine(f.Directory.FullName, Path.ChangeExtension(f.Name, ".backup"))); } } catch (Exception ex) { LogItem _oLog = MainForm.Instance.Log.Info("Error"); _oLog.LogValue("Backup profile files could not be created", ex, ImageType.Error); // remove backup files deleteFiles(savePath, "*.backup"); return(false); } // remove profile files if (!deleteFiles(savePath, "*.xml")) { // restore backup try { DirectoryInfo fi = new DirectoryInfo(savePath); FileInfo[] files = fi.GetFiles("*.backup", SearchOption.AllDirectories); foreach (FileInfo f in files) { f.CopyTo(Path.Combine(f.Directory.FullName, Path.ChangeExtension(f.Name, ".xml"))); } } catch (Exception e) { LogItem _oLog = MainForm.Instance.Log.Info("Error"); _oLog.LogValue("Profile files could not be restored", e, ImageType.Error); } return(false); } bool bSuccess = true; try { foreach (Profile p in profiles) { if (!Util.XmlSerialize(p, profilePath(savePath, p))) { string backupFile = Path.ChangeExtension(profilePath(savePath, p), ".backup"); if (File.Exists(backupFile)) { File.Copy(backupFile, profilePath(savePath, p), true); } bSuccess = false; } } deleteFiles(savePath, "*.backup"); } catch (Exception ex) { LogItem _oLog = MainForm.Instance.Log.Info("Error"); _oLog.LogValue("Profile files could not be created", ex, ImageType.Error); bSuccess = false; } return(bSuccess); }
public override IEnumerable <LogItem> GetEntries(string dataSource, FilterParams filter) { var settings = new XmlReaderSettings { ConformanceLevel = ConformanceLevel.Fragment }; var nt = new NameTable(); var mgr = new XmlNamespaceManager(nt); mgr.AddNamespace("log4j", Constants.LAYOUT_LOG4J); var pc = new XmlParserContext(nt, mgr, string.Empty, XmlSpace.Default); var date = new DateTime(1970, 1, 1, 0, 0, 0, 0); using (var stream = new FileStream(dataSource, FileMode.OpenOrCreate, FileAccess.Read, FileShare.ReadWrite)) { using (var reader = new StreamReader(stream, System.Text.Encoding.Default, true)) { using (var xmlTextReader = XmlReader.Create(reader, settings, pc)) { var entryId = 1; DateTime?prevTimeStamp = null; while (xmlTextReader.Read()) { if ((xmlTextReader.NodeType != XmlNodeType.Element) || (xmlTextReader.Name != "log4j:event")) { continue; } var entry = new LogItem { Id = entryId, Path = dataSource }; entry.Logger = xmlTextReader.GetAttribute("logger"); entry.TimeStamp = date.AddMilliseconds(Convert.ToDouble(xmlTextReader.GetAttribute("timestamp"))).ToLocalTime(); if (prevTimeStamp.HasValue) { entry.Delta = (entry.TimeStamp - prevTimeStamp.Value).TotalSeconds; } prevTimeStamp = entry.TimeStamp; entry.Level = xmlTextReader.GetAttribute("level"); entry.Thread = xmlTextReader.GetAttribute("thread"); while (xmlTextReader.Read()) { var breakLoop = false; switch (xmlTextReader.Name) { case "log4j:event": breakLoop = true; break; default: switch (xmlTextReader.Name) { case ("log4j:message"): entry.Message = xmlTextReader.ReadString(); break; case ("log4j:data"): switch (xmlTextReader.GetAttribute("name")) { case ("log4net:UserName"): entry.UserName = xmlTextReader.GetAttribute("value"); break; case ("log4japp"): entry.App = xmlTextReader.GetAttribute("value"); break; case ("log4jmachinename"): entry.MachineName = xmlTextReader.GetAttribute("value"); break; case ("log4net:HostName"): entry.HostName = xmlTextReader.GetAttribute("value"); break; } break; case ("log4j:throwable"): entry.Throwable = xmlTextReader.ReadString(); break; case ("log4j:locationInfo"): entry.Class = xmlTextReader.GetAttribute("class"); entry.Method = xmlTextReader.GetAttribute("method"); entry.File = xmlTextReader.GetAttribute("file"); entry.Line = xmlTextReader.GetAttribute("line"); break; } break; } if (breakLoop) { break; } } if (filterByParameters(entry, filter)) { yield return(entry); entryId++; } } } } } }
public void Log(LogItem item) { Write(item); }
/// <summary> /// postprocesses an audio job followed by a video job /// this constellation happens in automated or one click encoding where we have an audio job linked /// to a video job /// first, any audio jobs previous to the audio job in question will be located /// then we get the size of all audio tracks /// from the desired final output size stored in the first video job, we calculate the video bitrate /// we have to use to obtain the final file size, taking container overhead into account /// the calculated bitrate is then applied to all video jobs /// </summary> /// <param name="firstAudio">the audio job that is linked to a video job</param> /// <param name="firstpass">the video job to which the audio job is linked</param> public static LogItem calculateBitrate(MainForm mainForm, Job ajob) { if (!(ajob is VideoJob)) return null; VideoJob job = (VideoJob)ajob; if (job.BitrateCalculationInfo == null) return null; BitrateCalculationInfo b = job.BitrateCalculationInfo; LogItem log = new LogItem("Bitrate calculation for video"); List<AudioBitrateCalculationStream> audioStreams = new List<AudioBitrateCalculationStream>(); foreach (string s in b.AudioFiles) audioStreams.Add(new AudioBitrateCalculationStream(s)); double framerate; ulong framecount; JobUtil.getInputProperties(out framecount, out framerate, job.Input); CalcData data = new CalcData((long)framecount, (decimal)framerate, b.Container, job.Settings.Codec, job.Settings.NbBframes > 0, audioStreams.ToArray()); data.TotalSize = b.DesiredSize; try { data.CalcByTotalSize(); } catch (Exception e) { log.LogValue("Calculation failed", e, ImageType.Error); return log; } log.LogValue("Desired size after subtracting audio", data.VideoSize.KBExact + "KBs"); log.LogValue("Calculated desired bitrate", data.VideoBitrate + "kbit/s"); foreach (TaggedJob t in b.VideoJobs) ((VideoJob)t.Job).Settings.BitrateQuantizer = (int)data.VideoBitrate; return log; }
private static bool filterByParameters(LogItem entry, FilterParams parameters) { if (entry == null) { throw new ArgumentNullException("entry"); } if (parameters == null) { throw new ArgumentNullException("parameters"); } bool accept = false; switch (parameters.Level) { case 1: if (String.Equals(entry.Level, "ERROR", StringComparison.InvariantCultureIgnoreCase)) { accept = true; } break; case 2: if (String.Equals(entry.Level, "INFO", StringComparison.InvariantCultureIgnoreCase)) { accept = true; } break; case 3: if (String.Equals(entry.Level, "DEBUG", StringComparison.InvariantCultureIgnoreCase)) { accept = true; } break; case 4: if (String.Equals(entry.Level, "WARN", StringComparison.InvariantCultureIgnoreCase)) { accept = true; } break; case 5: if (String.Equals(entry.Level, "FATAL", StringComparison.InvariantCultureIgnoreCase)) { accept = true; } break; default: accept = true; break; } if (parameters.Date.HasValue) { if (entry.TimeStamp < parameters.Date) { accept = false; } } if (!String.IsNullOrEmpty(parameters.Thread)) { if (!String.Equals(entry.Thread, parameters.Thread, StringComparison.InvariantCultureIgnoreCase)) { accept = false; } } if (!String.IsNullOrEmpty(parameters.Message)) { if (!entry.Message.ToUpper().Contains(parameters.Message.ToUpper())) { accept = false; } } if (!String.IsNullOrEmpty(parameters.Logger)) { if (!entry.Logger.ToUpper().Contains(parameters.Logger.ToUpper())) { accept = false; } } return(accept); }
public void AddItem(LogItem item) { if (LogLevel == LogLevel.None || item == null || string.IsNullOrWhiteSpace(item.Exception.ToString())) { return; } try { var uniqueValue = (item.Exception + AdditionalData.ToDebugString()).Trim(); if (!_unique.Contains(uniqueValue)) { OnItemAdded.RaiseEvent(item, new EventArgs()); _unique.Add(uniqueValue); var file = Path.Combine( LogDir, string.Format( _fileName, DateTime.Now.ToString("yyyy_MM_dd"), LogLevel.ToString().ToLower(), (item.Exception + AdditionalData.ToDebugString()).ToMd5Hash())); if (File.Exists(file)) { return; } AddData(item.Exception); if (OutputConsole) { Console.ForegroundColor = ConsoleColor.Yellow; Console.WriteLine(item.Exception); Console.ResetColor(); } using ( var fileStream = new FileStream( file, FileMode.CreateNew, FileAccess.Write, FileShare.None, 4096, true)) { using (Stream gzStream = new GZipStream(fileStream, CompressionMode.Compress, false)) { var text = item.Exception.ToString(); text = item.Exception.Data.Cast<DictionaryEntry>() .Aggregate( text, (current, entry) => current + string.Format("{0}{1}: {2}", Environment.NewLine, entry.Key, entry.Value)); if (string.IsNullOrWhiteSpace(text.Trim())) { return; } var logByte = new UTF8Encoding(true).GetBytes(text); if (Compression) { gzStream.Write(logByte, 0, logByte.Length); } else { fileStream.Write(logByte, 0, logByte.Length); } } } } } catch (Exception ex) { Console.WriteLine(ex); } }
public async Task <bool> RegisterOrUpdate <T>(LogItem <T> log, object id, [CallerMemberName] string memberName = "", [CallerFilePath] string memberFile = "") where T : class, ICloneable { try { if (string.IsNullOrEmpty(collectionName)) { collectionName = typeof(T).Name.ToLower(); } log.Url = httpContext.HttpContext.Request.Path.Value; log.Method = memberName; log.File = memberFile; var logModel = await GetById <T>(id); var exist = logModel != null; if (!exist) { logModel = new LogModel <T>(log.User, log.OldData); } logModel.History.Add(log); logModel.ObjectId = id.ToString(); var collection = mongoDatabase.GetCollection <LogModel <T> >(collectionName); if (exist) { var builder = Builders <LogModel <T> > .Filter; var filter = builder.Eq("ObjectId", $"{id}"); var result = await collection.ReplaceOneAsync(filter, logModel); return(result.ModifiedCount > 0); } else { await collection.InsertOneAsync(logModel); } return(true); } catch (Exception ex) { Debug.Print("--------------------------------------------------------"); Debug.Print("--------------------------------------------------------"); Debug.Print("--------------------------------------------------------"); Debug.WriteLine("--------------------------------------------------------"); Debug.WriteLine("--------------------------------------------------------"); Debug.WriteLine("----------------------Exception----------------------------------"); Debug.WriteLine(ex); Debug.WriteLine("--------------------------------------------------------"); Debug.WriteLine("--------------------------------------------------------"); Debug.WriteLine("-----------------------Exception message---------------------------------"); Debug.WriteLine(ex.Message); Debug.WriteLine("--------------------------------------------------------"); Debug.WriteLine("--------------------------------------------------------"); Debug.WriteLine("--------------------------Stack trace------------------------------"); Debug.WriteLine(ex.StackTrace); Debug.WriteLine("--------------------------------------------------------"); Debug.WriteLine("--------------------------------------------------------"); Debug.WriteLine("-------------------------Inner exception-------------------------------"); Debug.WriteLine(ex?.InnerException); Debug.WriteLine("--------------------------------------------------------"); Debug.WriteLine("--------------------------------------------------------"); Debug.WriteLine("-------------------------Inner Exception message-------------------------------"); Debug.WriteLine(ex?.InnerException?.Message); return(false); } }
public void Write(string message, LogItem item) { }
static void Main(string[] args) { // select you endpoint https://help.aliyun.com/document_detail/29008.html String endpoint = "", accesskeyId = "xxxx", accessKey = "xxxxxxx", project = "sssss", logstore = "xxxxxxxxxxxxxxxx"; LogClient client = new LogClient(endpoint, accesskeyId, accessKey); //init http connection timeout client.ConnectionTimeout = client.ReadWriteTimeout = 10000; //list logstores foreach (String l in client.ListLogstores(new ListLogstoresRequest(project)).Logstores) { Console.WriteLine(l); } //put logs PutLogsRequest putLogsReqError = new PutLogsRequest(); putLogsReqError.Project = project; putLogsReqError.Topic = "dotnet_topic"; putLogsReqError.Logstore = logstore; putLogsReqError.LogItems = new List <LogItem>(); for (int i = 1; i <= 10; ++i) { LogItem logItem = new LogItem(); logItem.Time = DateUtils.TimeSpan(); for (int k = 0; k < 10; ++k) { logItem.PushBack("error_" + i.ToString(), "invalid operation"); } putLogsReqError.LogItems.Add(logItem); } PutLogsResponse putLogRespError = client.PutLogs(putLogsReqError); Thread.Sleep(5000); //query logs, if query string is "", it means query all data GetLogsRequest getLogReq = new GetLogsRequest(project, logstore, DateUtils.TimeSpan() - 100, DateUtils.TimeSpan(), "dotnet_topic", "", 100, 0, false); GetLogsResponse getLogResp = client.GetLogs(getLogReq); Console.WriteLine("Log count : " + getLogResp.Logs.Count.ToString()); for (int i = 0; i < getLogResp.Logs.Count; ++i) { var log = getLogResp.Logs[i]; Console.WriteLine("Log time : " + DateUtils.GetDateTime(log.Time)); for (int j = 0; j < log.Contents.Count; ++j) { Console.WriteLine("\t" + log.Contents[j].Key + " : " + log.Contents[j].Value); } Console.WriteLine(""); } //query histogram GetHistogramsResponse getHisResp = client.GetHistograms(new GetHistogramsRequest(project, logstore, DateUtils.TimeSpan() - 100, DateUtils.TimeSpan(), "dotnet_topic", "")); Console.WriteLine("Histograms total count : " + getHisResp.TotalCount.ToString()); //list shards ListShardsResponse listResp = client.ListShards(new ListShardsRequest(project, logstore)); Console.WriteLine("Shards count : " + listResp.Shards.Count.ToString()); //batch get logs for (int i = 0; i < listResp.Shards.Count; ++i) { //get cursor String cursor = client.GetCursor(new GetCursorRequest(project, logstore, listResp.Shards[i], ShardCursorMode.BEGIN)).Cursor; Console.WriteLine("Cursor : " + cursor); BatchGetLogsResponse batchGetResp = client.BatchGetLogs(new BatchGetLogsRequest(project, logstore, listResp.Shards[i], cursor, 10)); Console.WriteLine("Batch get log, shard id : " + listResp.Shards[i].ToString() + ", log count : " + batchGetResp.LogGroupList.LogGroupList_Count.ToString()); } }
public void Write(string message, LogItem item) { item.Append(message, depth); }
private bool LoadFile(string filePath) { try { if (!File.Exists(filePath)) { MessageBox.Show("Can't load file!"); return(false); } //In Jimmy's code, the last line in log file is always missed. So add an empty line to log file before calling Jimmy's code. try { using (StreamWriter sw = File.AppendText(filePath)) { sw.WriteLine(""); } } catch { } using (StreamReader sr = File.OpenText(filePath)) { string logHead = ""; string start = ""; bool isFull = false; while (!sr.EndOfStream) { #region Process the end at the beginning if (!Regex.Match(start, PARTIAL_HEAD_PATTERN).Success) { while (!Regex.Match(start, PARTIAL_HEAD_PATTERN).Success) { if ((start = sr.ReadLine()) == null) { return(false); } } continue; } #endregion #region Get text of log item StringBuilder logDescription = new StringBuilder(); if (Regex.Match(start, FULL_HEAD_PATTERN).Success) { logHead = Regex.Match(start, FULL_HEAD_PATTERN).Value; isFull = true; string temp = start.Substring(logHead.Length).TrimStart(' '); logDescription.Append(temp); } else { logHead = Regex.Match(start, PARTIAL_HEAD_PATTERN).Value; isFull = false; string temp = start.Substring(logHead.Length).TrimStart(' '); logDescription.Append(temp); } start = ""; #region Process following code when log info is multiline while (!Regex.Match(start, PARTIAL_HEAD_PATTERN).Success) { if ((start = sr.ReadLine()) != null) { if (!Regex.Match(start, PARTIAL_HEAD_PATTERN).Success&& start.Replace("=", "").Trim() != "") { logDescription.Append(start + "\r\n"); } } else { break; } } #endregion #endregion #region Transform string-format to xml-format try //When occuring exception, ignore this log item { LogItem item = new LogItem(); item.DateTime = Regex.Match(logHead, DATEtIME_PATTERN).Value; item.LogType = (LogType)Enum.Parse(typeof(LogType), Regex.Match(logHead, LEVEL_PATTERN).Value.Replace("[", "").Replace("]", "")); item.AssemblyName = Path.GetFileName(filePath).Substring(adapterName.Length + 1, Path.GetFileName(filePath).Length - restLenth); item.Module = Regex.Match(logHead, MODULE_PATTERN).Value.Replace("{", "").Replace("}", ""); item.Description = logDescription.ToString().Trim(); LogDataMgt.Log.LogItemList.Add(item); } catch (Exception err) { Program.Log.Write(err); } #endregion } return(true); } } catch (FileNotFoundException err) { MessageBox.Show(err.Message + "\r\nMaybe it has been deleted or has not been created!", "Load File Failed", MessageBoxButtons.OK, MessageBoxIcon.Information); return(false); } catch (Exception err) { Program.Log.Write(err.Message); return(false); } }
Logging(LogItem item) { this.item = item; }
public LogEventArgs(LogItem logItem) { this.LogItem = logItem with { }; }