private void ProcessFile(FileSystemEventArgs e) { var fs = new FileStream(PlayerLogFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); using (var sr = new StreamReader(fs)) { if (sr.BaseStream.Length > FileLength + 1024 * 5) { this.Dispatcher.Invoke(() => { boxPrimary.Text = ""; }); List <string> logData = LogParser.ReadLogData(); logData.Reverse(); string textboxText = ""; foreach (string logLine in logData) { textboxText += $"{logLine}\r\n"; } this.Dispatcher.Invoke(() => { boxPrimary.Text = textboxText; }); FileLength = (int)sr.BaseStream.Length; } else { } } }
public void ShouldReturnNullWhenNoPatternAppliesToLog() { LogParser parser; Log log; Event ev; Column[] columns; columns = new Column[] { new Column() { Name = "C1" }, new Column() { Name = "C2" }, new Column() { Name = "C3" }, new Column() { Name = "C4" } }; parser = new LogParser(columns); //parser.Add(@"\d\|\d\|\d\|\d$", true); // no pattern log = new Log(); log.Lines.Add(new Line() { Value = log1 }); ev = parser.Parse(log); Assert.IsNull(ev); }
public void Should_Process_And_Output_Correct_Min_Max_Time() { IDictionary <string, LogData> map = new Dictionary <string, LogData>(); string[][] mockedInput = new string[][] { new string[] { "1581589721", "/person/1/details", "GET", "35", "200" }, new string[] { "1581589721", "/person/1/details", "GET", "78", "200" }, new string[] { "1581589721", "/person/1/details", "GET", "96", "200" } }; var obj = new LogParser(); int i = 3; while (--i >= 0) { obj.ProcessRows(mockedInput[i], map); } Assert.NotEmpty(map); string expectedKey = "/person/{id}/details_GET"; Assert.True(map.ContainsKey(expectedKey)); LogData output; map.TryGetValue(expectedKey, out output); Assert.Equal(35, output.Min); Assert.Equal(96, output.Max); Assert.Equal(3, output.Frequency); Assert.Equal(69.67, Math.Round(output.Total / (1.0 * output.Frequency), 2)); }
public void Should_Throw_Exception_For_Invalid_Data() { string[] mockedInput = { "1581589721", "", "GET", "35", "200" }; var obj = new LogParser(); Assert.Throws <ArgumentException>(() => obj.ProcessRows(mockedInput, It.IsAny <IDictionary <string, LogData> >())); }
private void AnalyzeLog(string deviceId) { string desktopPath = Environment.GetFolderPath(Environment.SpecialFolder.Desktop); string fileName = @"dataLog.csv"; string filePath = desktopPath + "\\" + fileName; int faultCount = 0; if (!File.Exists(filePath)) { return; } LogParser parser = new LogParser(); StreamReader sr = new StreamReader(filePath); //Build log for device Id parser.ParseEvents(deviceId, sr); //Get pattern count faultCount = parser.GetEventCount(deviceId); //Update count in list view var item = Devices.FirstOrDefault(x => x.DeviceId == deviceId); if (item != null) { item.FaultCount = faultCount; } }
public void LogParser_CannotParseWith_NoIncurrectLevel() { var jsonLevelTooLow = new JObject { ["component"] = "TestService.", ["message"] = "Test message.", ["level"] = 0 }; var jsonLevelTooHeigh = new JObject { ["component"] = "TestService.", ["message"] = "Test message.", ["level"] = 6 }; try { LogParser.Parse(jsonLevelTooLow); Assert.Fail(); } catch (Exception) { } try { LogParser.Parse(jsonLevelTooHeigh); Assert.Fail(); } catch (Exception) { } }
public void SetColumnsVisibility(LogParser logParser) { log.Debug("Setting columns visibility"); if (!logParser.LogPattern.ContainsThread) { this.Columns.Remove(threadColumn); } else if (threadColumn.ListView == null) { this.Columns.Insert(1, threadColumn); } if (!logParser.LogPattern.ContainsType) { this.Columns.Remove(typeColumn); } else if (typeColumn.ListView == null) { this.Columns.Insert(threadColumn.ListView == null ? 1 : 2, typeColumn); } if (!logParser.LogPattern.ContainsClass) { this.Columns.Remove(classColumn); } else if (classColumn.ListView == null) { this.Columns.Insert(this.Columns.Count - 1, classColumn); } }
private void parseButton_Click(object sender, EventArgs e) { string LogPath = filepathTextBox.Text; LogParser lp = new LogParser(); lp.ParseLogFile(LogPath); uniqueTextBox.Text = lp.TotalUniqueIpAddresses().ToString(); List <LogItemTotal> totalAddressList = lp.GetTopIpAddresses(5); foreach (LogItemTotal item in totalAddressList) { ListViewItem lvi = new ListViewItem(); lvi.Text = item.LogItemGroup; lvi.SubItems.Add(item.TotalGroup.ToString()); addressListView.Items.Add(lvi); } List <LogItemTotal> totalUrlList = lp.GetTopUrls(3); foreach (LogItemTotal item in totalUrlList) { ListViewItem lvi = new ListViewItem(); lvi.Text = item.LogItemGroup; lvi.SubItems.Add(item.TotalGroup.ToString()); urlListView.Items.Add(lvi); } }
public void LoadLogFileAndParseTest() { string logFile = @"C:\Temp\KingPro\TestLog.log"; //string pattern = @""; string[] splitStrings = new string[] { "||-", "||" }; string[] colNames = { "Date Time", "User", "Http State", "File Type", "File Name", "File Size(Downloaded)", "File Size (Original)", "Visitor IP", "Domain Name", "Port Number", "Network Address", "Original Address", "Visitor IP or ID", "IE and System Info", }; LogParser parser = new LogParser(); DataTable result = parser.ParseWithSplit(logFile, colNames, splitStrings); foreach (DataRow row in result.Rows) { foreach (string colName in colNames) { string content = row[colName].ToString(); if (colName == "File Name") { content = ParserHelper.ConvertHexStringToNormalChineseString(content); } Debug.Write(content); Debug.Write(","); Debug.Write(Environment.NewLine); } } }
public void ShouldThrowExceptionWhenLogIsNull() { LogParser parser; Log log; Column[] columns; columns = new Column[] { new Column() { Name = "C1" }, new Column() { Name = "C2" }, new Column() { Name = "C3" }, new Column() { Name = "C4" } }; parser = new LogParser(columns); parser.Add(@"(?<C1>\d)(\|)(?<C2>\d)(\|)(?<C3>\d)(\|)(?<C5>\d$)"); // changed C4 to C5 to simulate missing column log = null; Assert.ThrowsException <ArgumentNullException>(() => parser.Parse(log)); }
public static void ParseFiles(LogParser parser, FileInfo[] files, OpCodeValidator filter, Action <PacketParser> packetHandler) { var extractor = new GenericLogParser(parser, filter, packetHandler); extractor.Parse(files); }
public override ServiceProviderItemBandwidth[] GetServiceItemsBandwidth(ServiceProviderItem[] items, DateTime since) { ServiceProviderItemBandwidth[] itemsBandwidth = new ServiceProviderItemBandwidth[items.Length]; // create parser object // and update statistics LogParser parser = new LogParser("Gene6Ftp", SiteId, GetLogsPath(), "cs-username"); parser.ParseLogs(); // update items with diskspace for (int i = 0; i < items.Length; i++) { ServiceProviderItem item = items[i]; // create new bandwidth object itemsBandwidth[i] = new ServiceProviderItemBandwidth(); itemsBandwidth[i].ItemId = item.Id; itemsBandwidth[i].Days = new DailyStatistics[0]; if (item is FtpAccount) { try { // get daily statistics itemsBandwidth[i].Days = parser.GetDailyStatistics(since, new string[] { item.Name }); } catch (Exception ex) { Log.WriteError(ex); } } } return(itemsBandwidth); }
protected override void ReadEventLogReferences() { _users.Clear(); _computers.Clear(); _events.Clear(); _metadata.Clear(); _applications.Clear(); _workServers.Clear(); _primaryPorts.Clear(); _secondaryPorts.Clear(); DateTime beginReadReferences = DateTime.Now; LogParser.ReadEventLogReferences( _users, _computers, _applications, _events, _metadata, _workServers, _primaryPorts, _secondaryPorts); _referencesReadDate = beginReadReferences; }
public void ReturnCorrectlyFourParsedEntry() { // Arrange var logFile = new FileInfo("test.txt"); var writer = logFile.CreateText(); var logEntries = new List <LogEntry> { new LogEntry(new DateTime(2019, 10, 10, 6, 0, 0), "Comment"), new LogEntry(new DateTime(2019, 12, 1, 17, 0, 0), ""), new LogEntry(new DateTime(2019, 9, 10, 17, 0, 1), "Comment With Space"), new LogEntry(new DateTime(2019, 6, 10, 9, 45, 0), "Comment Terminating With Special Caracatere %") }; foreach (var logEntry in logEntries) { writer.WriteLine(logEntry.ToString()); } writer.Close(); // Act var log = LogParser.Parse(logFile); // Assert Assert.AreEqual(logEntries[0].ToString(), log[0].ToString()); Assert.AreEqual(logEntries[1].ToString(), log[1].ToString()); Assert.AreEqual(logEntries[2].ToString(), log[2].ToString()); Assert.AreEqual(logEntries[3].ToString(), log[3].ToString()); }
public void TestParseSingle() { string file = TestHelper.TestLogPath + "single.log"; var log = new LogFile { FileName = file }; var parser = new LogParser { Log = log }; parser.Parse(); Assert.AreEqual(1, parser.RawEvents.Count); Assert.AreEqual("0.000", parser.RawEvents[0].Body[4].Values[ValueKeys.Duration]); Assert.AreEqual("3.765", parser.RawEvents[0].Body[6].Values[ValueKeys.Duration]); Assert.AreEqual(1, parser.Events.Count); Assert.IsInstanceOfType(parser.Events[0], typeof(SqlEvent)); var ev = (SqlEvent)parser.Events[0]; Assert.AreEqual(1, ev.Queries.Count); var expectedCommand = "SELECT A0.MAIL_ID,A0.ATTACH_NUM FROM DOCSADM.VERSIONS A0 WHERE ((A0.MAIL_ID = '350AD92BCBF6984BA5DF2B5EB61688A0') AND (A0.ATTACH_NUM = '-1'))"; var expectedRead = "0.000"; var expectedIssue = "3.765"; Assert.AreEqual(expectedCommand, ev.Queries[0].Command); Assert.AreEqual(expectedRead, ev.Queries[0].DurationReadItem); Assert.AreEqual(expectedIssue, ev.Queries[0].DurationIssueCommand); }
public override ILogEvent ParseFromArguments(string[] args) { // Format: DeadPlayer, KillerPlayer, Damage, DeadPlayerX, DeadPlayerY, KillerPlayerX, KillerPlayerY, KillerItem, KillerProj, KillerNPC, KillerOther if (args == null || args.Length != 11) { throw new ArgumentException("args length must be 11."); } Player = LogParser.GetPlayer(args[0]); EventPosX = float.Parse(args[3], NumberStyles.Float, CultureInfo.InvariantCulture); EventPosY = float.Parse(args[4], NumberStyles.Float, CultureInfo.InvariantCulture); Damage = int.Parse(args[2], NumberStyles.Integer, CultureInfo.InvariantCulture); KillerPlayer = LogParser.GetPlayer(args[1]); KillerPositionX = float.Parse(args[5], NumberStyles.Float, CultureInfo.InvariantCulture); KillerPositionY = float.Parse(args[6], NumberStyles.Float, CultureInfo.InvariantCulture); KillerItem = args[7]; KillerProjectile = args[8] ?? string.Empty; HasKillerProjectile = !string.IsNullOrEmpty(KillerProjectile); PairEvent = new KillPlayerEvent(Time, this); return(this); }
public static void ParseFile(LogParser parser, string inputFile, OpCodeValidator filter, Action <PacketParser> packetHandler) { var extractor = new GenericLogParser(parser, filter, packetHandler); extractor.Parse(new FileInfo(inputFile)); }
private void LogViewer_Load(object sender, EventArgs e) { Items = LogParser.Parse(FileName); checkedListBox1.SetItemChecked(0, true); checkedListBox1.SetItemChecked(1, true); checkedListBox1.SetItemChecked(2, true); checkedListBox1.SetItemChecked(3, true); DisplayList(Items); UpdateCheckbox(); panelWidth = Width - panel1.Left; textBoxWidth = Width - fastColoredTextBox1.Width - fastColoredTextBox1.Left + 10; textBoxHeight = Height - fastColoredTextBox1.Height - fastColoredTextBox1.Top + 5; checkedListBoxWidth = Width - checkedListBox1.Left; checkedListBoxHeight = Height - checkedListBox1.Top; buttonWidth = Width - button1.Left; buttonHeight = Height - button1.Top; LogViewer_SizeChanged(null, null); dateTimePicker_from.Value = Items.OrderBy(p => p.DateTime).First().DateTime; dateTimePicker_to.Value = Items.OrderBy(p => p.DateTime).Last().DateTime; fastColoredTextBox1.GoEnd(); Inited = true; }
public void RemoveEndOfLineText() { var lp = new LogParser(); string input = "[INF] end-of-line23033 Network Falure end-of-line27"; Assert.Equal("[INF] Network Falure ", lp.RemoveEndOfLineText(input)); }
public void AllRecords_SameDate_DiffTimes() { string fileName = "access3.log"; LogParser parserObj = new LogParser(string.Empty, string.Empty, false); var streamObj = File.Open(fileName, FileMode.Open); var result = parserObj.Parse("[email protected]_2019-09-03T00:40:26 00:00", streamObj); Assert.AreNotEqual(result, null); Assert.AreEqual(result.Count, 1); var content = File.ReadAllText(result[0]); AccessDataDetail addObj = JsonConvert.DeserializeObject <AccessDataDetail>(content); Assert.AreEqual(addObj.AccessDetails.Count, 3); Assert.AreEqual(addObj.AccessDetails[0].ModuleName, "en-teachertraining"); Assert.AreEqual(addObj.AccessDetails[0].MainModuleCount, 1); Assert.AreEqual(addObj.AccessDetails[0].SubModuleCount, 0); Assert.AreEqual(addObj.AccessDetails[1].ModuleName, "en-kolibri-index"); Assert.AreEqual(addObj.AccessDetails[1].MainModuleCount, 0); Assert.AreEqual(addObj.AccessDetails[1].SubModuleCount, 1); Assert.AreEqual(addObj.AccessDetails[2].ModuleName, "en-wikipedia"); Assert.AreEqual(addObj.AccessDetails[2].MainModuleCount, 0); Assert.AreEqual(addObj.AccessDetails[2].SubModuleCount, 1); }
public void Logs_Contains_Correct_Message_Templates() { //Log Parser var parser = new LogParser(); //Open/parse the file into memory parser.ReadLogs(_logfilePath); //Once a file been read/open we can call further methods var results = parser.Search(); var templates = results.MessageTemplates; //Count no of templates Assert.AreEqual(43, templates.Count()); //Verify all templates & counts are unique CollectionAssert.AllItemsAreUnique(templates); //Ensure the collection contains LogTemplate objects CollectionAssert.AllItemsAreInstancesOfType(templates, typeof(LogTemplate)); //Get first item & verify its template & count are what we expect var popularTemplate = templates.FirstOrDefault(); Assert.IsNotNull(popularTemplate); Assert.AreEqual("{LogPrefix} Task added {TaskType}", popularTemplate.MessageTemplate); Assert.AreEqual(689, popularTemplate.Count); }
public void Parse() { // PC名を置換する this.ReplacePCName(); var log = this.LogReplacedPCName; int.TryParse( this.LogType, NumberStyles.HexNumber, CultureInfo.InvariantCulture, out int detectedType); // ログメッセージタイプの文言を除去する log = LogMessageTypeExtensions.RemoveLogMessageType( detectedType, log, true); // ツールチップシンボル, ワールド名を除去する log = LogParser.RemoveTooltipSynbols(log); log = LogParser.RemoveWorldName(log); // ログを互換形式に変換する log = LogParser.FormatLogLine( detectedType, log); this.ParsedLog = log; }
public static void ParseDir(LogParser parser, string inputDir, OpCodeValidator filter, Action <PacketParser> packetHandler) { var extractor = new GenericLogParser(parser, filter, packetHandler); extractor.Parse(new DirectoryInfo(inputDir)); }
public void REadFile() { LogParser lp = new LogParser(); List <string> items = new List <string>(); Regex match = new Regex(@"\d{4}-\d{2}-\d{2}"); string[] files = File.ReadAllLines(@"c:\LCP\Logs\LCP Trading Monitor\log.2.txt"); foreach (string line in files) { items.Add(line); } for (int i = 0; i < items.Count;) { if (i + 1 < items.Count && !match.IsMatch(items[i + 1])) { items[i] = items[i] + items[i + 1]; items.RemoveAt(i + 1); } else { i++; } } foreach (string line in items) { if (!match.IsMatch(line)) { Assert.IsFalse(true); } } }
public void IgnoreEmptyLines() { var expected = new Dictionary <string, string> { { "temp-1", "precise" } }; Assert.Equal(JsonConvert.SerializeObject(expected), LogParser.Parse( @"reference thermometer 70.0 thermometer temp-1 2007-04-05T22:00 72.4 2007-04-05T22:01 76.0 2007-04-05T22:02 79.1 2007-04-05T22:03 75.6 2007-04-05T22:04 71.2 2007-04-05T22:05 71.4 2007-04-05T22:06 69.2 2007-04-05T22:07 65.2 2007-04-05T22:08 62.8 2007-04-05T22:09 61.4 2007-04-05T22:10 64.0 2007-04-05T22:11 67.5 2007-04-05T22:12 69.4 " ) ); }
public static void DumpLogFile(LogParser parser, string inputFile, OpCodeValidator filter, Action <PacketParser> packetHandler) { //var extractor = new GenericLogParser(parser, filter, packetHandler, updatePacketHandler); //extractor.Parse(new FileInfo(inputFile)); throw new NotImplementedException(); }
void WriteJoystickResult(ChallengeData chData, StreamWriter sw) { float[] averageTimeSpent = new float[4]; float[] averageFailures = new float[4]; double[] averageDistance = new double[4]; double[] averageArea = new double[4]; string header = "Attack,Average Time Spent,Average Failures,Average Distance,Average Area"; sw.WriteLine("--------" + chData.name + "--------"); sw.WriteLine(header); for (int i = 0; i < 4; i++) { int attackID = LogParser.ReorderAttackID(i); averageTimeSpent[attackID] = chData.totalTimeSpent[attackID] / ( chData.attempts[attackID] - chData.failures[attackID] ); averageFailures[attackID] = (float)chData.failures[attackID] / chData.attempts[attackID]; averageDistance[attackID] = chData.totalDistance[attackID] / chData.attempts[attackID]; averageArea[attackID] = chData.totalArea[attackID] / chData.attempts[attackID]; string line = LogParser.AttackIDToName(attackID) + ","; line += averageTimeSpent[attackID].ToString(usCulture) + ","; line += averageFailures[attackID].ToString(usCulture) + ","; line += averageDistance[attackID].ToString(usCulture) + ","; line += averageArea[attackID].ToString(usCulture); sw.WriteLine(line); } }
public static void Run([BlobTrigger("webserverlog/{name}", Connection = "AzureWebJobsStorage")] Stream accessLogBlob, string name, ILogger log) { LogParser parserObj = new LogParser(connectionString, containerName, true); log.LogInformation("Connection String: " + connectionString); log.LogInformation("Container Name: " + containerName); //Parse the Access Log and Store summary data in JSON - One per day. IList <string> resultList = parserObj.Parse(name, accessLogBlob); if (resultList == null) { log.LogError("Error Parsing the Access Log"); return; } log.LogInformation("Successfully Parsed the Access Log"); //Upload the JSON Files to azure blob container bool uploadStatus = parserObj.Upload(resultList); if (uploadStatus == false) { log.LogError("Error Parsing the Access Log"); return; } log.LogInformation("Successfully Uploaded"); }
public void Populate(LogParser parser) { _guidReportId = parser.LogParserId; _useExistingData = parser.UseExistData; PopulateParalel(parser); }
public static void RaiseLog( string log) { if (string.IsNullOrEmpty(log)) { return; } lock (NoticeLocker) { if (lastRaisedLog == log) { if ((DateTime.Now - lastRaisedLogTimestamp).TotalSeconds <= 0.1) { return; } } lastRaisedLog = log; lastRaisedLogTimestamp = DateTime.Now; } log = log.Replace(Environment.NewLine, "\\n"); LogParser.RaiseLog(DateTime.Now, log); }
public GOSerializer(LogParser parser, IndentTextWriter writer) : base(parser, new LogHandler(ValidateOpCode, HandlePacket)) { // just make sure to have the XML-definition of the packet before using this Class if (!PacketAnalyzer.IsDefined(RealmServerOpCode.SMSG_GAMEOBJECT_QUERY_RESPONSE, PacketSender.Server)) { throw new InvalidOperationException("SMSG_GAMEOBJECT_QUERY_RESPONSE is not defined."); } s_Writer = writer; }
static void Main(string[] args) { Program program = new Program(); if (!program.ParseCmdLine(args)) { program.PrintHelp(); return; } try { bool simpleMode = String.IsNullOrEmpty(program.Mode) || "0" == program.Mode; LogParser logParser = new LogParser(program.InputFile, program.OutputFile, simpleMode); logParser.Process(); } catch (Exception eh) { Console.WriteLine(eh.Message); } }
public static void DumpLogFile(LogParser parser, string inputFile, OpCodeValidator filter, Action<PacketParser> packetHandler) { //var extractor = new GenericLogParser(parser, filter, packetHandler, updatePacketHandler); //extractor.Parse(new FileInfo(inputFile)); throw new NotImplementedException(); }
public static void ParseFiles(LogParser parser, FileInfo[] files, OpCodeValidator filter, Action<PacketParser> packetHandler) { var extractor = new GenericLogParser(parser, filter, packetHandler); extractor.Parse(files); }
public static void ParseDir(LogParser parser, string inputDir, params LogHandler[] handlers) { var extractor = new GenericLogParser(parser, handlers); extractor.Parse(new DirectoryInfo(inputDir)); }
public static void ParseDir(LogParser parser, string inputDir, Action<ParsedUpdatePacket> packetHandler) { var extractor = new GenericLogParser(parser, new LogHandler(packetHandler)); extractor.Parse(new DirectoryInfo(inputDir)); }
public GOSerializer(LogParser parser, string outputFile) : this(parser, new IndentTextWriter(new StreamWriter(outputFile))) { }
/// <summary> /// /// </summary> /// <param name="parser"></param> protected AdvancedLogParser(LogParser parser, params LogHandler[] handlers) { m_Parser = parser; Handlers = handlers; }
public static void ExtractGOs(LogParser parser, string inputDir, string outputFile) { var extractor = new GOSerializer(parser, outputFile); extractor.Parse(new DirectoryInfo(inputDir)); }
public override ServiceProviderItemBandwidth[] GetServiceItemsBandwidth(ServiceProviderItem[] items, DateTime since) { ServiceProviderItemBandwidth[] itemsBandwidth = new ServiceProviderItemBandwidth[items.Length]; // create parser object // and update statistics LogParser parser = new LogParser("Gene6Ftp", SiteId, GetLogsPath(), "cs-username"); parser.ParseLogs(); // update items with diskspace for (int i = 0; i < items.Length; i++) { ServiceProviderItem item = items[i]; // create new bandwidth object itemsBandwidth[i] = new ServiceProviderItemBandwidth(); itemsBandwidth[i].ItemId = item.Id; itemsBandwidth[i].Days = new DailyStatistics[0]; if (item is FtpAccount) { try { // get daily statistics itemsBandwidth[i].Days = parser.GetDailyStatistics(since, new string[] { item.Name }); } catch (Exception ex) { Log.WriteError(ex); } } } return itemsBandwidth; }
public static void DumpLogFiles(LogParser parser, FileInfo[] files, OpCodeValidator filter) { //var extractor = new GenericLogParser(parser, filter, packetHandler, updatePacketHandler); //extractor.Parse(files); throw new NotImplementedException(); }
/// <summary> /// /// </summary> /// <param name="parser"></param> protected AdvancedLogParser(LogParser parser) : this(parser, null) { }
public GenericLogParser(LogParser parser, params LogHandler[] handlers) : base(parser, handlers) { }
public override ServiceProviderItemBandwidth[] GetServiceItemsBandwidth(ServiceProviderItem[] items, DateTime since) { ServiceProviderItemBandwidth[] itemsBandwidth = new ServiceProviderItemBandwidth[items.Length]; // update items with diskspace for (int i = 0; i < items.Length; i++) { ServiceProviderItem item = items[i]; // create new bandwidth object itemsBandwidth[i] = new ServiceProviderItemBandwidth(); itemsBandwidth[i].ItemId = item.Id; itemsBandwidth[i].Days = new DailyStatistics[0]; if (item is WebSite) { try { WebSite site = GetSite(item.Name); string siteId = site[WebSite.IIS7_SITE_ID]; string logsPath = Path.Combine(site.LogsPath, siteId); if (!Directory.Exists(logsPath)) continue; // create parser object // and update statistics LogParser parser = new LogParser("Web", siteId, logsPath, "s-sitename"); parser.ParseLogs(); // get daily statistics itemsBandwidth[i].Days = parser.GetDailyStatistics(since, new string[] { siteId }); } catch (Exception ex) { Log.WriteError(ex); } } } return itemsBandwidth; }
public static void ParseFile(LogParser parser, string inputFile, OpCodeValidator filter, Action<PacketParser> packetHandler) { var extractor = new GenericLogParser(parser, filter, packetHandler); extractor.Parse(new FileInfo(inputFile)); }
public static void ParseDir(LogParser parser, string inputDir, OpCodeValidator filter, Action<PacketParser> packetHandler) { var extractor = new GenericLogParser(parser, filter, packetHandler); extractor.Parse(new DirectoryInfo(inputDir)); }
private static void Main(string[] args) { var parser = new LogParser(); parser.Parse("E:\\perf.log"); }
public GenericLogParser(LogParser parser) : base(parser) { }
public GenericLogParser(LogParser parser, OpCodeValidator validator, Action<PacketParser> packetHandler) : base(parser, new LogHandler(validator, packetHandler)) { }
public override ServiceProviderItemBandwidth[] GetServiceItemsBandwidth(ServiceProviderItem[] items, DateTime since) { ServiceProviderItemBandwidth[] itemsBandwidth = new ServiceProviderItemBandwidth[items.Length]; // calculate bandwidth for Default FTP Site FtpSite fptSite = GetSite(SiteId); string siteId = SiteId.Replace("/", ""); string logsPath = Path.Combine(fptSite.LogFileDirectory, siteId); // create parser object // and update statistics LogParser parser = new LogParser("Ftp", siteId, logsPath, "s-sitename", "cs-username"); parser.ParseLogs(); // update items with diskspace for (int i = 0; i < items.Length; i++) { ServiceProviderItem item = items[i]; // create new bandwidth object itemsBandwidth[i] = new ServiceProviderItemBandwidth(); itemsBandwidth[i].ItemId = item.Id; itemsBandwidth[i].Days = new DailyStatistics[0]; if (item is FtpAccount) { try { // get daily statistics itemsBandwidth[i].Days = parser.GetDailyStatistics(since, new string[] { siteId, item.Name }); } catch (Exception ex) { Log.WriteError(ex); } } } return itemsBandwidth; }
public override ServiceProviderItemBandwidth[] GetServiceItemsBandwidth(ServiceProviderItem[] items, DateTime since) { ServiceProviderItemBandwidth[] itemsBandwidth = new ServiceProviderItemBandwidth[items.Length]; // calculate bandwidth for Default FTP Site FtpSite ftpSite = GetSite(SiteId); string siteId = String.Concat("FTPSVC", ftpSite[FtpSite.MSFTP7_SITE_ID]); string logsPath = Path.Combine(ftpSite.LogFileDirectory, siteId); // create parser object // and update statistics LogParser parser = new LogParser("Ftp", siteId, logsPath, "s-sitename", "cs-username"); // Subscribe to the events because FTP 7.0 has several differences that should be taken into account // and processed in a specific way parser.ProcessKeyFields += new ProcessKeyFieldsEventHandler(LogParser_ProcessKeyFields); parser.CalculateStatisticsLine += new CalculateStatsLineEventHandler(LogParser_CalculateStatisticsLine); // parser.ParseLogs(); // update items with diskspace for (int i = 0; i < items.Length; i++) { ServiceProviderItem item = items[i]; // create new bandwidth object itemsBandwidth[i] = new ServiceProviderItemBandwidth(); itemsBandwidth[i].ItemId = item.Id; itemsBandwidth[i].Days = new DailyStatistics[0]; if (item is FtpAccount) { try { // get daily statistics itemsBandwidth[i].Days = parser.GetDailyStatistics(since, new string[] { siteId, item.Name }); } catch (Exception ex) { Log.WriteError(ex); } } } return itemsBandwidth; }