private log_entry_line debug_entry(capture_all_debug_events.debug_event evt) { log_entry_line entry = new log_entry_line(); entry.add("date", evt.date.ToString("dd-MM-yyyy")); entry.add("time", evt.date.ToString("HH:mm:ss.fff")); entry.add("Pid", "" + evt.process_id); entry.add("Process Name", evt.lo_process_name); entry.add("msg", evt.msg); return(entry); }
protected override void on_new_lines(string new_lines) { int line_count = 0; last_lines_string_.set_lines(new_lines, ref line_count); if (line_count < 1) return; int start_idx = 0; if (has_header_line_) lock (this) // if at least one entry - can't read column names if (this.column_names.Count < 1 && entries_.Count == 0) { this.column_names = split.to_list(last_lines_string_.line_at(0), separator_); start_idx = 1; } List<log_entry_line> entries_now = new List<log_entry_line>(); var column_names = this.column_names; for (int i = start_idx; i < line_count; ++i) { var list = split.to_list(last_lines_string_.line_at(i), separator_); log_entry_line entry = new log_entry_line(); for ( int j = 0; j < column_names.Count; ++j) entry.add( column_names[j], list.Count > j ? list[j] : ""); entries_now.Add(entry); } lock (this) { foreach ( var entry in entries_now) string_.add_preparsed_line(entry.ToString()); entries_.AddRange(entries_now); } }
protected override void on_new_lines(string new_lines) { int line_count = 0; last_lines_string_.set_lines(new_lines, ref line_count); if (line_count < 1) { return; } int start_idx = 0; if (has_header_line_) { lock (this) // if at least one entry - can't read column names if (this.column_names.Count < 1 && entries_.Count == 0) { this.column_names = parse_csv(last_lines_string_.line_at(0)); start_idx = 1; } } List <log_entry_line> entries_now = new List <log_entry_line>(); var column_names = this.column_names; string before = ""; for (int i = start_idx; i < line_count; ++i) { var cur_line = last_lines_string_.line_at(i); var list = parse_csv(before + cur_line); if (list.Count < column_names.Count) { before += cur_line + "\r\n"; continue; } before = ""; if (list.Count > column_names.Count) { logger.Warn("invalid csv line, too many cells: " + list.Count + " , instead of " + column_names.Count); } log_entry_line entry = new log_entry_line(); for (int j = 0; j < column_names.Count; ++j) { entry.add(column_names[j], list[j]); } entries_now.Add(entry); } lock (this) { foreach (var entry in entries_now) { string_.add_preparsed_line(entry.ToString()); } entries_.AddRange(entries_now); } }
private log_entry_line line_from_reader() { log_entry_line row = new log_entry_line(); int i = 0; try { for (; i < mappings_.Count; ++i) { bool is_time_ = mappings_[i].Item2 == info_type.time; if (is_time_) { if (read_time_as_string_) { row.add_time(reader_.GetString(i)); } else { // read time column as datetime try { // if this throws, read it as string row.add_time(reader_.GetDateTime(i)); } catch (Exception e) { logger.Error("can't read time column as datetime (will try reading as string) :" + e.Message); read_time_as_string_ = true; --i; } } if (settings.db_id_field == "") { // sorting, for when we'll need to do tailing last_time_str_ = reader_.GetString(i); } } else { // non-time column // FIXME perhaps I could speed this up - perhaps I might even be able to create a 'line' object directly. But for now, lets leave it like this row.add(mappings_[i].Item1, reader_.GetString(i)); } } if (settings.db_id_field != "") { last_id_ = reader_.GetInt64(mappings_.Count); } } catch (Exception ee) { logger.Error("can't read db row " + ee.Message); errors_.add("Cannot read db field " + mappings_[i].Item1 + " : " + ee.Message); continue_reading_ = false; } // update last_id and time return(row); }
protected override void on_new_lines(string new_lines) { int line_count = 0; last_lines_string_.set_lines(new_lines, ref line_count); if (line_count < 1) { return; } int start_idx = 0; if (has_header_line_) { lock (this) // if at least one entry - can't read column names if (this.column_names.Count < 1 && entries_.Count == 0) { this.column_names = split.to_list(last_lines_string_.line_at(0), separator_); start_idx = 1; } } List <log_entry_line> entries_now = new List <log_entry_line>(); var column_names = this.column_names; for (int i = start_idx; i < line_count; ++i) { var list = split.to_list(last_lines_string_.line_at(i), separator_); log_entry_line entry = new log_entry_line(); for (int j = 0; j < column_names.Count; ++j) { entry.add(column_names[j], list.Count > j ? list[j] : ""); } entries_now.Add(entry); } lock (this) { foreach (var entry in entries_now) { string_.add_preparsed_line(entry.ToString()); } entries_.AddRange(entries_now); } }
private log_entry_line to_log_entry(EventRecord rec, string log_name) { log_entry_line entry = new log_entry_line(); try { entry.add("Log", log_name); entry.add("EventID", "" + rec.Id); entry.add("level", event_level((StandardEventLevel)rec.Level)); entry.add("date", rec.TimeCreated.Value.ToString("dd-MM-yyyy")); entry.add("time", rec.TimeCreated.Value.ToString("HH:mm:ss.fff")); try { var task = rec.Task != 0 ? rec.TaskDisplayName : ""; entry.add("Category", task ?? ""); } catch { entry.add("Category", ""); } entry.add("Machine Name", rec.MachineName); entry.add("Source", "" + rec.ProviderName); entry.add("User Name", rec.UserId != null ? rec.UserId.Value : ""); /* 1.5.14+ this generates waaaay too many errors - just ignore for now * try { * var keywords = rec.KeywordsDisplayNames; * entry.add("Keywords", keywords != null ? util.concatenate(keywords, ",") : ""); * } catch { * entry.add("Keywords", ""); * }*/ // note: this throws a lot of exceptions; however, we don't have much of a choice here - just showing the raw properties is rather useless try { var desc = rec.FormatDescription(); entry.add("msg", desc ?? ""); } catch { try { string desc = util.concatenate(rec.Properties.Select(x => x.Value.ToString()), "\r\n"); entry.add("msg", desc); } catch { entry.add("msg", ""); } } } catch (Exception e) { logger.Fatal("can't convert EventRectord to entry " + e.Message); } return(entry); }
protected override void on_new_lines(string new_lines) { int line_count = 0; last_lines_string_.set_lines(new_lines, ref line_count); if (line_count < 1) return; int start_idx = 0; if (has_header_line_) lock (this) // if at least one entry - can't read column names if (this.column_names.Count < 1 && entries_.Count == 0) { this.column_names = parse_csv( last_lines_string_.line_at(0)); start_idx = 1; } List<log_entry_line> entries_now = new List<log_entry_line>(); var column_names = this.column_names; string before = ""; for (int i = start_idx; i < line_count; ++i) { var cur_line = last_lines_string_.line_at(i); var list = parse_csv(before + cur_line); if (list.Count < column_names.Count) { before += cur_line + "\r\n"; continue; } before = ""; if ( list.Count > column_names.Count) logger.Warn("invalid csv line, too many cells: " + list.Count + " , instead of " + column_names.Count); log_entry_line entry = new log_entry_line(); for ( int j = 0; j < column_names.Count; ++j) entry.add( column_names[j], list[j]); entries_now.Add(entry); } lock (this) { foreach ( var entry in entries_now) string_.add_preparsed_line(entry.ToString()); entries_.AddRange(entries_now); } }