protected override void on_new_lines(string new_lines) { int line_count = 0; last_lines_string_.set_lines(new_lines, ref line_count); if (line_count < 1) return; int start_idx = 0; if (has_header_line_) lock (this) // if at least one entry - can't read column names if (this.column_names.Count < 1 && entries_.Count == 0) { this.column_names = split.to_list(last_lines_string_.line_at(0), separator_); start_idx = 1; } List<log_entry_line> entries_now = new List<log_entry_line>(); var column_names = this.column_names; for (int i = start_idx; i < line_count; ++i) { var list = split.to_list(last_lines_string_.line_at(i), separator_); log_entry_line entry = new log_entry_line(); for ( int j = 0; j < column_names.Count; ++j) entry.add( column_names[j], list.Count > j ? list[j] : ""); entries_now.Add(entry); } lock (this) { foreach ( var entry in entries_now) string_.add_preparsed_line(entry.ToString()); entries_.AddRange(entries_now); } }
// forces the WHOLE FILE to be reloaded // // be VERY careful calling this - I should call this only when the syntax has changed public override void force_reload() { base.force_reload(); lock (this) { last_entry_ = new log_entry_line(); valid_line_count_ = 0; } }
private log_entry_line to_log_entry(EventRecord rec, string log_name) { log_entry_line entry = new log_entry_line(); try { entry.add("Log", log_name); entry.add("EventID", "" + rec.Id); entry.add("level", event_level((StandardEventLevel)rec.Level)); entry.analyze_and_add("timestamp", rec.TimeCreated.Value); try { var task = rec.Task != 0 ? rec.TaskDisplayName : ""; entry.add("Category", task ?? ""); } catch { entry.add("Category", ""); } entry.add("Machine Name", rec.MachineName); entry.add("Source", "" + rec.ProviderName); string user_id = rec.UserId != null ? rec.UserId.Value : ""; if (user_id != "") { user_id = new SecurityIdentifier(user_id).Translate(typeof(NTAccount)).ToString(); } ; entry.add("User Name", user_id); /* 1.5.14+ this generates waaaay too many errors - just ignore for now * try { * var keywords = rec.KeywordsDisplayNames; * entry.add("Keywords", keywords != null ? util.concatenate(keywords, ",") : ""); * } catch { * entry.add("Keywords", ""); * }*/ // note: this throws a lot of exceptions; however, we don't have much of a choice here - just showing the raw properties is rather useless try { var desc = rec.FormatDescription(); if (desc == null) { desc = util.concatenate(rec.Properties.Select(x => x.Value.ToString()), "\r\n"); } entry.add("msg", desc ?? ""); } catch { try { string desc = util.concatenate(rec.Properties.Select(x => x.Value.ToString()), "\r\n"); entry.add("msg", desc); } catch { entry.add("msg", ""); } } } catch (Exception e) { logger.Fatal("can't convert EventRectord to entry " + e.Message); } return(entry); }
protected override void on_new_lines(string new_lines) { int line_count = 0; last_lines_string_.set_lines(new_lines, ref line_count); if (line_count < 1) { return; } int start_idx = 0; if (has_header_line_) { lock (this) // if at least one entry - can't read column names if (this.column_names.Count < 1 && entries_.Count == 0) { this.column_names = parse_csv(last_lines_string_.line_at(0)); start_idx = 1; } } List <log_entry_line> entries_now = new List <log_entry_line>(); var column_names = this.column_names; string before = ""; for (int i = start_idx; i < line_count; ++i) { var cur_line = last_lines_string_.line_at(i); var list = parse_csv(before + cur_line); if (list.Count < column_names.Count) { before += cur_line + "\r\n"; continue; } before = ""; if (list.Count > column_names.Count) { logger.Warn("invalid csv line, too many cells: " + list.Count + " , instead of " + column_names.Count); } log_entry_line entry = new log_entry_line(); for (int j = 0; j < column_names.Count; ++j) { entry.add(column_names[j], list[j]); } entries_now.Add(entry); } lock (this) { foreach (var entry in entries_now) { string_.add_preparsed_line(entry.ToString()); } entries_.AddRange(entries_now); } }
private log_entry_line line_from_reader() { log_entry_line row = new log_entry_line(); int i = 0; try { for (; i < mappings_.Count; ++i) { bool is_time_ = mappings_[i].Item2 == info_type.time; if (is_time_) { if (read_time_as_string_) { row.add_time(reader_.GetString(i)); } else { // read time column as datetime try { // if this throws, read it as string row.add_time(reader_.GetDateTime(i)); } catch (Exception e) { logger.Error("can't read time column as datetime (will try reading as string) :" + e.Message); read_time_as_string_ = true; --i; } } if (settings.db_id_field == "") { // sorting, for when we'll need to do tailing last_time_str_ = reader_.GetString(i); } } else { // non-time column // FIXME perhaps I could speed this up - perhaps I might even be able to create a 'line' object directly. But for now, lets leave it like this row.add(mappings_[i].Item1, reader_.GetString(i)); } } if (settings.db_id_field != "") { last_id_ = reader_.GetInt64(mappings_.Count); } } catch (Exception ee) { logger.Error("can't read db row " + ee.Message); errors_.add("Cannot read db field " + mappings_[i].Item1 + " : " + ee.Message); continue_reading_ = false; } // update last_id and time return(row); }
private log_entry_line debug_entry(capture_all_debug_events.debug_event evt) { log_entry_line entry = new log_entry_line(); entry.add("date", evt.date.ToString("dd-MM-yyyy")); entry.add("time", evt.date.ToString("HH:mm:ss.fff")); entry.add("Pid", "" + evt.process_id); entry.add("Process Name", evt.lo_process_name); entry.add("msg", evt.msg); return(entry); }
protected override void on_new_lines(string new_lines) { int line_count = 0; last_lines_string_.set_lines(new_lines, ref line_count); if (line_count < 1) { return; } int start_idx = 0; if (has_header_line_) { lock (this) // if at least one entry - can't read column names if (this.column_names.Count < 1 && entries_.Count == 0) { this.column_names = split.to_list(last_lines_string_.line_at(0), separator_); start_idx = 1; } } List <log_entry_line> entries_now = new List <log_entry_line>(); var column_names = this.column_names; for (int i = start_idx; i < line_count; ++i) { var list = split.to_list(last_lines_string_.line_at(i), separator_); log_entry_line entry = new log_entry_line(); for (int j = 0; j < column_names.Count; ++j) { entry.add(column_names[j], list.Count > j ? list[j] : ""); } entries_now.Add(entry); } lock (this) { foreach (var entry in entries_now) { string_.add_preparsed_line(entry.ToString()); } entries_.AddRange(entries_now); } }
protected override void on_new_lines(string new_lines) { foreach (var c in new_lines.ToCharArray()) { sb_.Append(c); if (c == '{') { open_count_++; } else if (c == '}') { open_count_--; if (open_count_ == 0) { // Full object in buffer var obj = JsonConvert.DeserializeObject <Dictionary <string, dynamic> >(sb_.ToString()); var line = new log_entry_line(); foreach (var entry in obj) { var value = entry.Value.ToString(); if (entry.Value.GetType() == typeof(DateTime)) { value = ((DateTime)entry.Value).ToString("o"); } line.analyze_and_add(entry.Key, value); } lock (this) { entries_.Add(line); string_.add_preparsed_line(line.ToString()); } sb_.Clear(); } } } }
protected override void on_new_lines(string new_lines) { // assume text is written line by line (thus, we read full lines) int line_count = 0; last_lines_string_.set_lines(new_lines, ref line_count); List<log_entry_line> entries_now = new List<log_entry_line>(line_count); log_entry_line last_entry; lock (this) last_entry = last_entry_; for (int i = 0; i < line_count; ++i) { string cur = last_lines_string_.line_at(i); int separator = cur.IndexOf(separator_char_); if (separator >= 0) { string name = cur.Substring(0, separator).Trim(); string value = cur.Substring(separator + 1).Trim(); last_entry.add(name, value); ++valid_line_count_; } else if (cur.Trim() != "") last_entry.append_to_last(cur); else { // empty line signals end of entry entries_now.Add(last_entry); last_entry = new log_entry_line(); } } int entry_count; lock (this) entry_count = entries_.Count + entries_now.Count; if (entries_now.Count > 0) --entry_count; // ...ignore last entry from computing avg - it may not be full int avg_entry_count = valid_line_count_ / entry_count; if (last_entry.ToString() != "" && last_entry.entry_count >= avg_entry_count) { // in this case, we guess the last entry was full entries_now.Add(last_entry); last_entry = new log_entry_line(); } lock (this) { if (entries_now.Count > 0 && column_names.Count == 0) column_names = entries_now[0].names; last_entry_ = last_entry; foreach ( var entry in entries_now) string_.add_preparsed_line( entry.ToString()); entries_.AddRange(entries_now); } }
// forces the WHOLE FILE to be reloaded // // be VERY careful calling this - I should call this only when the syntax has changed public override void force_reload() { base.force_reload(); lock (this) { last_entry_ = new log_entry_line(); valid_line_count_ = 0; } }
protected override void on_new_lines(string new_lines) { // assume text is written line by line (thus, we read full lines) int line_count = 0; last_lines_string_.set_lines(new_lines, ref line_count); List <log_entry_line> entries_now = new List <log_entry_line>(line_count); log_entry_line last_entry; lock (this) last_entry = last_entry_; for (int i = 0; i < line_count; ++i) { string cur = last_lines_string_.line_at(i); int separator = cur.IndexOf(separator_char_); if (separator >= 0) { string name = cur.Substring(0, separator).Trim(); string value = cur.Substring(separator + separator_char_.Length).Trim(); last_entry.add(name, value); ++valid_line_count_; } else if (cur.Trim() != "") { last_entry.add("msg", cur); } else { // empty line signals end of entry entries_now.Add(last_entry); last_entry = new log_entry_line(); } } int entry_count; lock (this) entry_count = entries_.Count + entries_now.Count; if (entries_now.Count > 0) { --entry_count; // ...ignore last entry from computing avg - it may not be full } int avg_entry_count = valid_line_count_ / entry_count; if (last_entry.ToString() != "" && last_entry.entry_count >= avg_entry_count) { // in this case, we guess the last entry was full entries_now.Add(last_entry); last_entry = new log_entry_line(); } lock (this) { if (entries_now.Count > 0 && column_names.Count == 0) { column_names = entries_now[0].names; } last_entry_ = last_entry; foreach (var entry in entries_now) { string_.add_preparsed_line(entry.ToString()); } entries_.AddRange(entries_now); } }
protected override void on_new_lines(string next) { string now = ""; string delimeter; bool needs_set_column_names; lock (this) { last_ += next; if (last_.Length < MIN_LEN && delimeter_name_ == "") return; last_ = last_.TrimStart(); if (delimeter_name_ == "") { if (last_.StartsWith("<?xml ")) { // we need to ignore xml prefix when searching for delimeter int ignore = last_.IndexOf(">"); last_ = last_.Substring(ignore + 1).TrimStart(); } int delimeter_idx = last_.IndexOfAny( new []{'>',' ', '\n', '\r', '\t'}); delimeter_name_ = last_.Substring(1, delimeter_idx - 1); logger.Debug("[parse] parsing xml by " + delimeter_name_); } string end = "/" + delimeter_name_; int last_idx = last_.LastIndexOf(end); if (last_idx >= 0) { // we can fully parse at least one entry int xml_end = last_.IndexOf('>', last_idx); if (xml_end > 0) { now = last_.Substring(0, xml_end + 1); last_ = last_.Substring(xml_end + 1); } } if ( now == "") // there's not enought text to parse a single log entry return; delimeter = delimeter_name_; needs_set_column_names = column_names.Count < 1; } XmlTextReader reader = new XmlTextReader(now, XmlNodeType.Element, xml_parse_context_) { Namespaces = false }; // FIXME read all attributes , and save them as name.attr_name ; if name contains "xxx:", ignore that // timestamp -> date + time log_entry_line entry = new log_entry_line(); string last_element = ""; List<string> column_names_now = null; try { while (reader.Read()) { if (reader.NodeType == XmlNodeType.Element) { string element_name = reader.Name; last_element = simple_element_name(element_name); // read all its attributes for (int i = 0; i < reader.AttributeCount; ++i) { reader.MoveToAttribute(i); string name = last_element + "." + reader.Name; string text = (reader.Value ?? "").Trim(); if (ignore_delimeter_name_on_log_entry_) if (element_name == delimeter) name = reader.Name; entry.analyze_and_add(name, text); } } else if (reader.NodeType == XmlNodeType.Text) { Debug.Assert(last_element != ""); string text = (reader.Value ?? "").Trim(); entry.analyze_and_add(last_element, text); } else if (reader.NodeType == XmlNodeType.EndElement) { if (reader.Name == delimeter) { // we read a full object if (needs_set_column_names && column_names_now == null) column_names_now = entry.names; lock (this) { entries_.Add(entry); string_.add_preparsed_line(entry.ToString()); } entry = new log_entry_line(); last_element = ""; } } } if ( column_names_now != null) column_names = column_names_now; } catch (Exception e) { logger.Fatal("[parse] could not parse xml: " + e); } }
protected override void on_new_lines(string new_lines) { int line_count = 0; last_lines_string_.set_lines(new_lines, ref line_count); if (line_count < 1) return; int start_idx = 0; if (has_header_line_) lock (this) // if at least one entry - can't read column names if (this.column_names.Count < 1 && entries_.Count == 0) { this.column_names = parse_csv( last_lines_string_.line_at(0)); start_idx = 1; } List<log_entry_line> entries_now = new List<log_entry_line>(); var column_names = this.column_names; string before = ""; for (int i = start_idx; i < line_count; ++i) { var cur_line = last_lines_string_.line_at(i); var list = parse_csv(before + cur_line); if (list.Count < column_names.Count) { before += cur_line + "\r\n"; continue; } before = ""; if ( list.Count > column_names.Count) logger.Warn("invalid csv line, too many cells: " + list.Count + " , instead of " + column_names.Count); log_entry_line entry = new log_entry_line(); for ( int j = 0; j < column_names.Count; ++j) entry.add( column_names[j], list[j]); entries_now.Add(entry); } lock (this) { foreach ( var entry in entries_now) string_.add_preparsed_line(entry.ToString()); entries_.AddRange(entries_now); } }
protected override void on_new_lines(string new_lines) { int line_count = 0; last_lines_string_.set_lines(new_lines, ref line_count); if (line_count < 1) { return; } int start_idx = 0; if (has_header_line_) { lock (this) // if at least one entry - can't read column names if (this.column_names.Count < 1 && entries_.Count == 0) { start_idx = try_parse_header(parse_csv(last_lines_string_.line_at(0))) ? 1 : 0; } } List <log_entry_line> entries_now = new List <log_entry_line>(); var column_names = this.column_names; string before = before_unprocessed_; for (int i = start_idx; i < line_count; ++i) { var cur_line = last_lines_string_.line_at(i); var list = parse_csv(before + cur_line); if (list.Count < column_names.Count) { before += cur_line + "\r\n"; continue; } if (list.Count > column_names.Count) { if (list.Count == column_names.Count + 1 && !has_appended_message_column_) { has_appended_message_column_ = true; var new_column_names = column_names.ToList(); new_column_names.Add("msg"); column_names = this.column_names = new_column_names; } } if (list.Count > column_names.Count) { logger.Warn("invalid csv line" + (i + line_offset_) + " too many cells: " + list.Count + " , instead of " + column_names.Count); reader.add_error("Bad CSV Line at " + (i + line_offset_ + 1) + ". Expected " + column_names.Count + " cells, got " + list.Count, error_list_keeper.level_type.warning); } log_entry_line entry = new log_entry_line(); for (int j = 0; j < column_names.Count; ++j) { entry.analyze_and_add(column_names[j], list[j]); } entries_now.Add(entry); before = ""; } line_offset_ += line_count; before_unprocessed_ = before; lock (this) { foreach (var entry in entries_now) { string_.add_preparsed_line(entry.ToString()); } entries_.AddRange(entries_now); } }
protected override void on_new_lines(string new_lines) { int line_count = 0; last_lines_string_.set_lines(new_lines, ref line_count); if (line_count < 1) return; int start_idx = 0; if (has_header_line_) lock (this) // if at least one entry - can't read column names if (this.column_names.Count < 1 && entries_.Count == 0) start_idx = try_parse_header( parse_csv( last_lines_string_.line_at(0))) ? 1 : 0; List<log_entry_line> entries_now = new List<log_entry_line>(); var column_names = this.column_names; string before = before_unprocessed_; for (int i = start_idx; i < line_count; ++i) { var cur_line = last_lines_string_.line_at(i); var list = parse_csv(before + cur_line); if (list.Count < column_names.Count) { before += cur_line + "\r\n"; continue; } if ( list.Count > column_names.Count) if (list.Count == column_names.Count + 1 && !has_appended_message_column_) { has_appended_message_column_ = true; var new_column_names = column_names.ToList(); new_column_names.Add("msg"); column_names = this.column_names = new_column_names; } if (list.Count > column_names.Count) { logger.Warn("invalid csv line" + (i+line_offset_) + " too many cells: " + list.Count + " , instead of " + column_names.Count); reader.add_error("Bad CSV Line at " + (i+line_offset_+1) + ". Expected " + column_names.Count + " cells, got " + list.Count, error_list_keeper.level_type.warning); } log_entry_line entry = new log_entry_line(); for ( int j = 0; j < column_names.Count; ++j) entry.analyze_and_add( column_names[j], list[j]); entries_now.Add(entry); before = ""; } line_offset_ += line_count; before_unprocessed_ = before; lock (this) { foreach ( var entry in entries_now) string_.add_preparsed_line(entry.ToString()); entries_.AddRange(entries_now); } }
protected override void on_new_lines(string next) { string now = ""; string delimeter; bool needs_set_column_names; lock (this) { last_ += next; if (last_.Length < MIN_LEN && delimeter_name_ == "") { return; } last_ = last_.TrimStart(); if (delimeter_name_ == "") { if (last_.StartsWith("<?xml ")) { // we need to ignore xml prefix when searching for delimeter int ignore = last_.IndexOf(">"); last_ = last_.Substring(ignore + 1).TrimStart(); } int delimeter_idx = last_.IndexOfAny(new [] { '>', ' ', '\n', '\r', '\t' }); delimeter_name_ = last_.Substring(1, delimeter_idx - 1); logger.Debug("[parse] parsing xml by " + delimeter_name_); } string end = "/" + delimeter_name_; int last_idx = last_.LastIndexOf(end); if (last_idx >= 0) { // we can fully parse at least one entry int xml_end = last_.IndexOf('>', last_idx); if (xml_end > 0) { now = last_.Substring(0, xml_end + 1); last_ = last_.Substring(xml_end + 1); } } if (now == "") { // there's not enought text to parse a single log entry return; } delimeter = delimeter_name_; needs_set_column_names = column_names.Count < 1; } XmlTextReader reader = new XmlTextReader(now, XmlNodeType.Element, xml_parse_context_) { Namespaces = false }; // FIXME read all attributes , and save them as name.attr_name ; if name contains "xxx:", ignore that // timestamp -> date + time log_entry_line entry = new log_entry_line(); string last_element = ""; List <string> column_names_now = null; try { while (reader.Read()) { if (reader.NodeType == XmlNodeType.Element) { string element_name = reader.Name; last_element = simple_element_name(element_name); // read all its attributes for (int i = 0; i < reader.AttributeCount; ++i) { reader.MoveToAttribute(i); string name = last_element + "." + reader.Name; string text = (reader.Value ?? "").Trim(); if (ignore_delimeter_name_on_log_entry_) { if (element_name == delimeter) { name = reader.Name; } } entry.analyze_and_add(name, text); } } else if (reader.NodeType == XmlNodeType.Text) { Debug.Assert(last_element != ""); string text = (reader.Value ?? "").Trim(); entry.analyze_and_add(last_element, text); } else if (reader.NodeType == XmlNodeType.EndElement) { if (reader.Name == delimeter) { // we read a full object if (needs_set_column_names && column_names_now == null) { column_names_now = entry.names; } lock (this) { entries_.Add(entry); string_.add_preparsed_line(entry.ToString()); } entry = new log_entry_line(); last_element = ""; } } } if (column_names_now != null) { column_names = column_names_now; } } catch (Exception e) { logger.Fatal("[parse] could not parse xml: " + e); } }