// computes the line matches - does not care about colors or the additions - just to know which lines actually match public void compute_line_matches(log_line_reader log) { log.refresh(); if (old_line_matches_log_ != log) { old_line_matches_log_ = log; line_matches_.Clear(); old_line_count_ = 0; } // note: in order to match, all lines must match int new_line_count = log.line_count; for (int i = old_line_count_; i < new_line_count; ++i) { bool matches = true; foreach (filter_line fi in items_) { if (fi.part != filter_line.part_type.font) { if (!fi.matches(log.line_at(i))) { matches = false; break; } } } if (matches) { line_matches_.Add(i); } } // if we have at least one line - we'll recheck this last line next time - just in case we did not fully read it last time old_line_count_ = new_line_count > 0 ? new_line_count - 1 : new_line_count; }
private void compute_matches_impl(log_line_reader new_log, log_line_reader old_log) { Debug.Assert(new_log != null); int old_line_count = new_log.line_count; new_log.refresh(); if (new_log != old_log || new_log.forced_reload) { logger.Info("[filter] new log " + new_log.name); old_line_count = 0; force_recompute_matches(); } bool has_new_lines = (old_line_count != new_log.line_count); // get a pointer to the rows_; in case it changes on the main thread, we don't care, // since next time we will have the new rows List <filter_row> rows; lock (this) rows = rows_; if (old_line_count == 0) { foreach (filter_row row in rows) { row.refresh(); } } foreach (filter_row row in rows) { row.compute_line_matches(new_log); } if (has_new_lines) { // the filter matches Dictionary <int, match> new_matches = new Dictionary <int, match>(); // ... the indexes, in sorted order List <int> new_indexes = new List <int>(); // from old_lines to log.line_count -> these need recomputing int old_match_count; lock (this) old_match_count = match_indexes_.Count; bool[] matches = new bool[rows.Count]; for (int line_idx = old_line_count; line_idx < new_log.line_count; ++line_idx) { bool any_match = false; for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) { if (rows[filter_idx].enabled || rows[filter_idx].dimmed) { matches[filter_idx] = rows[filter_idx].line_matches.Contains(line_idx); } else { matches[filter_idx] = false; } if (matches[filter_idx]) { any_match = true; } } if (any_match) { // in this case, prefer the first "enabled" filter int enabled_idx = -1; for (int filter_idx = 0; filter_idx < matches.Count() && enabled_idx < 0; ++filter_idx) { if (matches[filter_idx] && rows[filter_idx].enabled) { enabled_idx = filter_idx; } } int used_idx = -1; if (enabled_idx < 0) { for (int filter_idx = 0; filter_idx < matches.Count() && used_idx < 0; ++filter_idx) { if (matches[filter_idx] && rows[filter_idx].dimmed) { used_idx = filter_idx; } } } Debug.Assert(enabled_idx >= 0 || used_idx >= 0); int idx = enabled_idx >= 0 ? enabled_idx : used_idx; var cur_match = rows[idx].get_match(line_idx); new_matches.Add(line_idx, new match { font = cur_match.font, line = new_log.line_at(line_idx), line_idx = line_idx, matches = matches.ToArray() }); new_indexes.Add(line_idx); } bool any_filter = (rows.Count > 0); if (!any_filter) { new_matches.Add(line_idx, new match { matches = new bool[0], line = new_log.line_at(line_idx), line_idx = line_idx, font = new filter_line.font_info { bg = Color.White, fg = Color.Black } }); new_indexes.Add(line_idx); } } lock (this) { foreach (var kv in new_matches) { matches_.Add(kv.Key, kv.Value); } match_indexes_.AddRange(new_indexes); } apply_additions(old_match_count, new_log, rows); } bool is_up_to_date = new_log.up_to_date; lock (this) is_up_to_date_ = is_up_to_date; }
private void compute_matches_impl(log_line_reader new_log, log_line_reader old_log) { Debug.Assert(new_log != null); int old_line_count = new_log.line_count; new_log.refresh(); if (new_log != old_log || new_log.forced_reload) { logger.Info("[filter] new log " + new_log.name); old_line_count = 0; force_recompute_matches(); } bool has_new_lines = (old_line_count != new_log.line_count); // get a pointer to the rows_; in case it changes on the main thread, we don't care, // since next time we will have the new rows List<filter_row> rows; lock (this) rows = rows_; if ( old_line_count == 0) foreach ( filter_row row in rows) row.refresh(); foreach ( filter_row row in rows) row.compute_line_matches(new_log); if (has_new_lines) { // the filter matches Dictionary<int, match> new_matches = new Dictionary<int, match>(); // ... the indexes, in sorted order List<int> new_indexes = new List<int>(); // from old_lines to log.line_count -> these need recomputing int old_match_count; lock (this) old_match_count = match_indexes_.Count; bool[] matches = new bool[rows.Count]; for (int line_idx = old_line_count; line_idx < new_log.line_count; ++line_idx) { bool any_match = false; for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) { if (rows[filter_idx].enabled || rows[filter_idx].dimmed) matches[filter_idx] = rows[filter_idx].line_matches.Contains(line_idx); else matches[filter_idx] = false; if (matches[filter_idx]) any_match = true; } if (any_match) { // in this case, prefer the first "enabled" filter int enabled_idx = -1; for (int filter_idx = 0; filter_idx < matches.Count() && enabled_idx < 0; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].enabled) enabled_idx = filter_idx; int used_idx = -1; if ( enabled_idx < 0) for (int filter_idx = 0; filter_idx < matches.Count() && used_idx < 0; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].dimmed) used_idx = filter_idx; Debug.Assert(enabled_idx >= 0 || used_idx >= 0); int idx = enabled_idx >= 0 ? enabled_idx : used_idx; var cur_match = rows[idx].get_match(line_idx); new_matches.Add(line_idx, new match { font = cur_match.font, line = new_log.line_at(line_idx), line_idx = line_idx, matches = matches.ToArray() }); new_indexes.Add(line_idx); } bool any_filter = (rows.Count > 0); if (!any_filter) { new_matches.Add(line_idx, new match { matches = new bool[0], line = new_log.line_at(line_idx), line_idx = line_idx, font = new filter_line.font_info { bg = Color.White, fg = Color.Black } }); new_indexes.Add(line_idx); } } lock (this) { foreach ( var kv in new_matches) matches_.Add(kv.Key, kv.Value); match_indexes_.AddRange(new_indexes); } apply_additions(old_match_count, new_log, rows); } bool is_up_to_date = new_log.up_to_date; lock (this) is_up_to_date_ = is_up_to_date; }
// computes the line matches - does not care about colors or the additions - just to know which lines actually match public void compute_line_matches(log_line_reader log) { log.refresh(); if (old_line_matches_log_ != log) { old_line_matches_log_ = log; line_matches_.Clear(); old_line_count_ = 0; } // note: in order to match, all lines must match int new_line_count = log.line_count; for (int i = old_line_count_; i < new_line_count; ++i) { bool matches = true; foreach (filter_line fi in items_) if ( fi.part != filter_line.part_type.font) if ( !fi.matches(log.line_at(i))) { matches = false; break; } if ( matches) line_matches_.Add(i); } // if we have at least one line - we'll recheck this last line next time - just in case we did not fully read it last time old_line_count_ = new_line_count > 0 ? new_line_count -1 : new_line_count; }