// computes the line matches - does not care about colors or the additions - just to know which lines actually match public void compute_line_matches(log_line_reader log) { log.refresh(); if (old_line_matches_log_ != log) { old_line_matches_log_ = log; line_matches_.Clear(); old_line_count_ = 0; } // note: in order to match, all lines must match int new_line_count = log.line_count; for (int i = old_line_count_; i < new_line_count; ++i) { bool matches = true; foreach (filter_line fi in items_) { if (fi.part != filter_line.part_type.font) { if (!fi.matches(log.line_at(i))) { matches = false; break; } } } if (matches) { line_matches_.Add(i); } } // if we have at least one line - we'll recheck this last line next time - just in case we did not fully read it last time old_line_count_ = new_line_count > 0 ? new_line_count - 1 : new_line_count; }
// once we've been forced to reload - we should return true once per each reader public bool forced_reload(log_line_reader reader) { lock (this) { if (forced_reload_.Contains(reader)) { return(false); } forced_reload_.Add(reader); return(true); } }
public void compute_matches(log_line_reader log) { Debug.Assert(log != null); lock (this) { if (new_log_ != log) { is_up_to_date_ = false; } new_log_ = log; } start_compute_matches_thread(); }
private void add_addition_line(int line_idx, Color fg, log_line_reader log) { // if insert_idx > 0 , that means we already have it int insert_idx = match_indexes_.BinarySearch(line_idx); if (insert_idx < 0) { match_indexes_.Insert(~insert_idx, line_idx); matches_.Add(line_idx, new match { matches = empty_match, line = log.line_at(line_idx), line_idx = line_idx, font = new filter_line.font_info { bg = Color.White, fg = fg } }); } }
private void apply_additions(int old_match_count, log_line_reader log, List <filter_row> rows) { // FIXME note: we should normally care about the last match before old_match_count as well, to see maybe it still matches some "addition" lines // but we ignore that for now // // when impleemnting the above, make sure to find the last matched line, not an existing addition bool has_additions = false; foreach (filter_row row in rows) { if (row.additions.Count > 0) { has_additions = true; } } if (!has_additions) { // optimize for when no additions return; } Dictionary <int, Color> additions = new Dictionary <int, Color>(); int new_match_count; lock (this) new_match_count = match_indexes_.Count; for (int match_idx = old_match_count; match_idx < new_match_count; ++match_idx) { int line_idx; lock (this) line_idx = match_indexes_[match_idx]; var match = match_at(match_idx); int matched_filter = -1; for (int filter_idx = 0; filter_idx < match.matches.Length && matched_filter < 0; ++filter_idx) { if (match.matches[filter_idx]) { matched_filter = filter_idx; } } if (matched_filter >= 0) { Color gray_fg = util.grayer_color(rows[matched_filter].get_match(line_idx).font.fg); foreach (var addition in rows[matched_filter].additions) { switch (addition.type) { case addition.number_type.lines: for (int i = 0; i < addition.number; ++i) { int add_line_idx = line_idx + (addition.add == addition.add_type.after ? i : -i); if (add_line_idx >= 0 && add_line_idx < log.line_count) { additions.Add(add_line_idx, gray_fg); } } break; case addition.number_type.millisecs: DateTime start = util.str_to_time(log.line_at(line_idx).part(info_type.time)); for (int i = line_idx; i >= 0 && i < log.line_count;) { i = i + (addition.add == addition.add_type.after ? 1 : -1); if (i >= 0 && i < log.line_count) { DateTime now = util.str_to_time(log.line_at(i).part(info_type.time)); int diff = (int)((now - start).TotalMilliseconds); bool ok = (addition.add == addition.add_type.after && diff <= addition.number) || (addition.add == addition.add_type.before && -diff <= addition.number); if (ok && !additions.ContainsKey(i)) { additions.Add(i, gray_fg); } else { break; } } } break; default: Debug.Assert(false); break; } } } } lock (this) foreach (var add_idx in additions) { add_addition_line(add_idx.Key, add_idx.Value, log); } }
private void compute_matches_impl(log_line_reader new_log, log_line_reader old_log) { Debug.Assert(new_log != null); int old_line_count = new_log.line_count; new_log.refresh(); if (new_log != old_log || new_log.forced_reload) { logger.Info("[filter] new log " + new_log.name); old_line_count = 0; force_recompute_matches(); } bool has_new_lines = (old_line_count != new_log.line_count); // get a pointer to the rows_; in case it changes on the main thread, we don't care, // since next time we will have the new rows List <filter_row> rows; lock (this) rows = rows_; if (old_line_count == 0) { foreach (filter_row row in rows) { row.refresh(); } } foreach (filter_row row in rows) { row.compute_line_matches(new_log); } if (has_new_lines) { // the filter matches Dictionary <int, match> new_matches = new Dictionary <int, match>(); // ... the indexes, in sorted order List <int> new_indexes = new List <int>(); // from old_lines to log.line_count -> these need recomputing int old_match_count; lock (this) old_match_count = match_indexes_.Count; bool[] matches = new bool[rows.Count]; for (int line_idx = old_line_count; line_idx < new_log.line_count; ++line_idx) { bool any_match = false; for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) { if (rows[filter_idx].enabled || rows[filter_idx].dimmed) { matches[filter_idx] = rows[filter_idx].line_matches.Contains(line_idx); } else { matches[filter_idx] = false; } if (matches[filter_idx]) { any_match = true; } } if (any_match) { // in this case, prefer the first "enabled" filter int enabled_idx = -1; for (int filter_idx = 0; filter_idx < matches.Count() && enabled_idx < 0; ++filter_idx) { if (matches[filter_idx] && rows[filter_idx].enabled) { enabled_idx = filter_idx; } } int used_idx = -1; if (enabled_idx < 0) { for (int filter_idx = 0; filter_idx < matches.Count() && used_idx < 0; ++filter_idx) { if (matches[filter_idx] && rows[filter_idx].dimmed) { used_idx = filter_idx; } } } Debug.Assert(enabled_idx >= 0 || used_idx >= 0); int idx = enabled_idx >= 0 ? enabled_idx : used_idx; var cur_match = rows[idx].get_match(line_idx); new_matches.Add(line_idx, new match { font = cur_match.font, line = new_log.line_at(line_idx), line_idx = line_idx, matches = matches.ToArray() }); new_indexes.Add(line_idx); } bool any_filter = (rows.Count > 0); if (!any_filter) { new_matches.Add(line_idx, new match { matches = new bool[0], line = new_log.line_at(line_idx), line_idx = line_idx, font = new filter_line.font_info { bg = Color.White, fg = Color.Black } }); new_indexes.Add(line_idx); } } lock (this) { foreach (var kv in new_matches) { matches_.Add(kv.Key, kv.Value); } match_indexes_.AddRange(new_indexes); } apply_additions(old_match_count, new_log, rows); } bool is_up_to_date = new_log.up_to_date; lock (this) is_up_to_date_ = is_up_to_date; }
public void compute_matches(log_line_reader log) { Debug.Assert(log != null); lock (this) { if (new_log_ != log) is_up_to_date_ = false; new_log_ = log; } start_compute_matches_thread(); }
private void compute_matches_impl(log_line_reader new_log, log_line_reader old_log) { Debug.Assert(new_log != null); int old_line_count = new_log.line_count; new_log.refresh(); if (new_log != old_log || new_log.forced_reload) { logger.Info("[filter] new log " + new_log.name); old_line_count = 0; force_recompute_matches(); } bool has_new_lines = (old_line_count != new_log.line_count); // get a pointer to the rows_; in case it changes on the main thread, we don't care, // since next time we will have the new rows List<filter_row> rows; lock (this) rows = rows_; if ( old_line_count == 0) foreach ( filter_row row in rows) row.refresh(); foreach ( filter_row row in rows) row.compute_line_matches(new_log); if (has_new_lines) { // the filter matches Dictionary<int, match> new_matches = new Dictionary<int, match>(); // ... the indexes, in sorted order List<int> new_indexes = new List<int>(); // from old_lines to log.line_count -> these need recomputing int old_match_count; lock (this) old_match_count = match_indexes_.Count; bool[] matches = new bool[rows.Count]; for (int line_idx = old_line_count; line_idx < new_log.line_count; ++line_idx) { bool any_match = false; for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) { if (rows[filter_idx].enabled || rows[filter_idx].dimmed) matches[filter_idx] = rows[filter_idx].line_matches.Contains(line_idx); else matches[filter_idx] = false; if (matches[filter_idx]) any_match = true; } if (any_match) { // in this case, prefer the first "enabled" filter int enabled_idx = -1; for (int filter_idx = 0; filter_idx < matches.Count() && enabled_idx < 0; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].enabled) enabled_idx = filter_idx; int used_idx = -1; if ( enabled_idx < 0) for (int filter_idx = 0; filter_idx < matches.Count() && used_idx < 0; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].dimmed) used_idx = filter_idx; Debug.Assert(enabled_idx >= 0 || used_idx >= 0); int idx = enabled_idx >= 0 ? enabled_idx : used_idx; var cur_match = rows[idx].get_match(line_idx); new_matches.Add(line_idx, new match { font = cur_match.font, line = new_log.line_at(line_idx), line_idx = line_idx, matches = matches.ToArray() }); new_indexes.Add(line_idx); } bool any_filter = (rows.Count > 0); if (!any_filter) { new_matches.Add(line_idx, new match { matches = new bool[0], line = new_log.line_at(line_idx), line_idx = line_idx, font = new filter_line.font_info { bg = Color.White, fg = Color.Black } }); new_indexes.Add(line_idx); } } lock (this) { foreach ( var kv in new_matches) matches_.Add(kv.Key, kv.Value); match_indexes_.AddRange(new_indexes); } apply_additions(old_match_count, new_log, rows); } bool is_up_to_date = new_log.up_to_date; lock (this) is_up_to_date_ = is_up_to_date; }
private void apply_additions(int old_match_count, log_line_reader log, List<filter_row> rows ) { // FIXME note: we should normally care about the last match before old_match_count as well, to see maybe it still matches some "addition" lines // but we ignore that for now // // when impleemnting the above, make sure to find the last matched line, not an existing addition bool has_additions = false; foreach( filter_row row in rows) if (row.additions.Count > 0) has_additions = true; if (!has_additions) // optimize for when no additions return; Dictionary<int, Color> additions = new Dictionary<int, Color>(); int new_match_count; lock (this) new_match_count = match_indexes_.Count; for (int match_idx = old_match_count; match_idx < new_match_count; ++match_idx) { int line_idx; lock(this) line_idx = match_indexes_[match_idx]; var match = match_at(match_idx); int matched_filter = -1; for ( int filter_idx = 0; filter_idx < match.matches.Length && matched_filter < 0; ++filter_idx) if (match.matches[filter_idx]) matched_filter = filter_idx; if (matched_filter >= 0) { Color gray_fg = util.grayer_color(rows[matched_filter].get_match(line_idx).font.fg); foreach (var addition in rows[matched_filter].additions) { switch (addition.type) { case addition.number_type.lines: for (int i = 0; i < addition.number; ++i) { int add_line_idx = line_idx + (addition.add == addition.add_type.after ? i : -i); if (add_line_idx >= 0 && add_line_idx < log.line_count) additions.Add(add_line_idx, gray_fg); } break; case addition.number_type.millisecs: DateTime start = util.str_to_time(log.line_at(line_idx).part(info_type.time)); for (int i = line_idx; i >= 0 && i < log.line_count;) { i = i + (addition.add == addition.add_type.after ? 1 : -1); if (i >= 0 && i < log.line_count) { DateTime now = util.str_to_time(log.line_at(i).part(info_type.time)); int diff = (int) ((now - start).TotalMilliseconds); bool ok = (addition.add == addition.add_type.after && diff <= addition.number) || (addition.add == addition.add_type.before && -diff <= addition.number); if (ok && !additions.ContainsKey(i)) additions.Add(i, gray_fg); else break; } } break; default: Debug.Assert(false); break; } } } } lock(this) foreach (var add_idx in additions) add_addition_line(add_idx.Key, add_idx.Value, log); }
private void add_addition_line(int line_idx, Color fg, log_line_reader log) { // if insert_idx > 0 , that means we already have it int insert_idx = match_indexes_.BinarySearch(line_idx); if (insert_idx < 0) { match_indexes_.Insert(~insert_idx, line_idx); matches_.Add(line_idx, new match { matches = empty_match, line = log.line_at(line_idx), line_idx = line_idx, font = new filter_line.font_info { bg = Color.White, fg = fg } } ); } }
// computes the line matches - does not care about colors or the additions - just to know which lines actually match public void compute_line_matches(log_line_reader log) { log.refresh(); if (old_line_matches_log_ != log) { old_line_matches_log_ = log; line_matches_.Clear(); old_line_count_ = 0; } // note: in order to match, all lines must match int new_line_count = log.line_count; for (int i = old_line_count_; i < new_line_count; ++i) { bool matches = true; foreach (filter_line fi in items_) if ( fi.part != filter_line.part_type.font) if ( !fi.matches(log.line_at(i))) { matches = false; break; } if ( matches) line_matches_.Add(i); } // if we have at least one line - we'll recheck this last line next time - just in case we did not fully read it last time old_line_count_ = new_line_count > 0 ? new_line_count -1 : new_line_count; }
internal void set_log(log_line_reader log) { if (log_ != log) { log_ = log; filter_changed_ = true; last_item_count_while_current_view_ = 0; visible_columns_refreshed_ = false; last_view_column_index_ = 0; wait_for_filter_to_read_from_new_log_ = DateTime.Now.AddMilliseconds(WAIT_FOR_NEW_LOG_MAX_MS); logger.Debug("[view] new log for " + name + " - " + log.name); model_.set_matches(new List<filter.match>(), this); update_x_of_y(); } }
// once we've been forced to reload - we should return true once per each reader public bool forced_reload(log_line_reader reader) { lock (this) { if (forced_reload_.Contains(reader)) return false; forced_reload_.Add(reader); return true; } }