private void compute_matches_impl(log_reader new_log, log_reader old_log) { Debug.Assert(new_log != null); if (app.inst.no_ui.read_full_log_first) { // 1.0.76d+ - wait until log has fully loaded - in the hopes of using less memory if (new_log == old_log) { bool at_least_once; lock (this) at_least_once = new_log_fully_read_at_least_once_; if (!at_least_once) { if (!new_log.parser_up_to_date) { return; } lock (this) new_log_fully_read_at_least_once_ = true; } } } int old_line_count = new_log.line_count; new_log.refresh(); if (new_log != old_log || new_log.forced_reload) { bool changed_log = new_log != old_log && old_log_ == null; if (changed_log || old_log == new_log) { logger.Info((new_log != old_log ? "[filter] new log " : "[filter] forced refresh of ") + new_log.tab_name + " / " + new_log.log_name); } lock (this) if (matches_.count > 0) { force_recompute_matches_ = true; } } lock (this) if (force_recompute_matches_) { old_line_count = 0; } bool has_new_lines = (old_line_count != new_log.line_count); // get a pointer to the rows_; in case it changes on the main thread, we don't care, // since next time we will have the new rows List <filter_row> rows; lock (this) rows = rows_; if (old_line_count == 0) { foreach (filter_row row in rows) { row.refresh(); } } foreach (filter_row row in rows) { row.compute_line_matches(new_log); } if (has_new_lines) { bool is_full_log = row_count < 1; int expected_capacity = is_full_log ? (new_log.line_count - old_line_count) : (new_log.line_count - old_line_count) / 5; // the filter matches memory_optimized_list <match> new_matches = new memory_optimized_list <match>() { min_capacity = expected_capacity, name = "temp_m " + name, increase_percentage = .7 }; // from old_lines to log.line_count -> these need recomputing int old_match_count = matches_.count; BitArray matches = new BitArray(rows.Count); // handle the case where all the filters are disabled (thus, show all lines) int run_filter_count = rows.Count(x => x.enabled || x.dimmed); for (int line_idx = old_line_count; line_idx < new_log.line_count; ++line_idx) { bool any_match = false; bool any_non_apply_to_existing_lines_filters = false; // 1.0.69 added "apply to existing filters" for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) { var row = rows[filter_idx]; if ((row.enabled || row.dimmed) && !row.apply_to_existing_lines) { matches[filter_idx] = row.line_matches.Contains(line_idx); any_non_apply_to_existing_lines_filters = true; } else { matches[filter_idx] = false; } if (matches[filter_idx]) { any_match = true; } } if (!any_non_apply_to_existing_lines_filters) { // in this case - all filters apply to existing lines - thus, by default, we show all the lines any_match = true; } // 1.0.69 "apply to existing filters" is applied afterwards font_info existing_filter_font = null; if (any_match) { for (int filter_idx = 0; filter_idx < matches.Length && any_match; ++filter_idx) { var row = rows[filter_idx]; if ((row.enabled || row.dimmed) && row.apply_to_existing_lines) { bool is_font_only = row.has_font_info; if (row.line_matches.Contains(line_idx)) { if (existing_filter_font == null && is_font_only) { // in this case, use the font from "apply to existing filters" - only if the user has specifically set it existing_filter_font = row.get_match(line_idx).font; matches[filter_idx] = true; } } else if (!is_font_only) { // we're filtering this line out any_match = false; } } } } if (any_match) { font_info font = (existing_filter_font ?? font_info.default_font).copy(); int enabled_idx = -1; for (int filter_idx = 0; filter_idx < matches.Length && enabled_idx < 0; ++filter_idx) { if (matches[filter_idx] && rows[filter_idx].enabled) { enabled_idx = filter_idx; } } int used_idx = -1; if (enabled_idx < 0) { for (int filter_idx = 0; filter_idx < matches.Length && used_idx < 0; ++filter_idx) { if (matches[filter_idx] && rows[filter_idx].dimmed) { used_idx = filter_idx; } } } if (enabled_idx >= 0 || used_idx >= 0) { if (enabled_idx >= 0) { // 1.3.29g+ apply and merge all enabled filters for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) { if (matches[filter_idx] && rows[filter_idx].enabled) { font.merge(rows[filter_idx].get_match(line_idx).font); } } } else { font.merge(rows[used_idx].get_match(line_idx).font); } } new_matches.Add(new_match(new BitArray(matches), new_log.line_at(line_idx), line_idx, font)); continue; } if (run_filter_count == 0) { new_matches.Add(new_match(new BitArray(0), new_log.line_at(line_idx), line_idx, font_info.default_font)); } } bool replace = false; lock (this) if (force_recompute_matches_) { replace = true; force_recompute_matches_ = false; } if (new_matches.Count > 0) { if (replace) { matches_.set_range(new_matches); } else { matches_.add_range(new_matches); } lock (this) last_change_ = DateTime.Now; } apply_additions(old_match_count, new_log, rows); if (new_matches.Count > app.inst.no_ui.min_filter_capacity) { logger.Debug("[memory] GC.collect - from filter " + name); GC.Collect(); } } bool is_up_to_date = new_log.up_to_date; lock (this) is_up_to_date_ = is_up_to_date; }
private void compute_matches_impl(log_reader new_log, log_reader old_log) { Debug.Assert(new_log != null); if (app.inst.no_ui.read_full_log_first) { // 1.0.76d+ - wait until log has fully loaded - in the hopes of using less memory if (new_log == old_log) { bool at_least_once; lock (this) at_least_once = new_log_fully_read_at_least_once_; if (!at_least_once) { if (!new_log.parser_up_to_date) { return; } lock (this) new_log_fully_read_at_least_once_ = true; } } } int old_line_count = new_log.line_count; new_log.refresh(); if (new_log != old_log || new_log.forced_reload) { bool changed_log = new_log != old_log && old_log_ == null; if (changed_log || old_log == new_log) { logger.Info((new_log != old_log ? "[filter] new log " : "[filter] forced refresh of ") + new_log.tab_name + " / " + new_log.log_name); } lock (this) if (matches_.count > 0) { force_recompute_matches_ = true; } } lock (this) if (force_recompute_matches_) { old_line_count = 0; } bool has_new_lines = (old_line_count != new_log.line_count); // get a pointer to the rows_; in case it changes on the main thread, we don't care, // since next time we will have the new rows List <filter_row> rows; lock (this) rows = rows_; if (old_line_count == 0) { foreach (filter_row row in rows) { row.refresh(); } } foreach (filter_row row in rows) { row.compute_line_matches(new_log); } if (has_new_lines) { status?.set_status("Computing filters... This might take a moment", status_ctrl.status_type.msg, 10000); bool is_full_log = row_count < 1; int expected_capacity = is_full_log ? (new_log.line_count - old_line_count) : (new_log.line_count - old_line_count) / 5; // the filter matches memory_optimized_list <match> new_matches = new memory_optimized_list <match> { min_capacity = expected_capacity, name = "temp_m " + name, increase_percentage = .7 }; // from old_lines to log.line_count -> these need recomputing int old_match_count = matches_.count; bool[] row_matches_filter = new bool[rows.Count]; // handle the case where all the filters are disabled (thus, show all lines) int run_filter_count = rows.Count(x => x.enabled); for (int line_idx = old_line_count; line_idx < new_log.line_count; ++line_idx) { bool any_match = false; // Go through all filters for (int filter_idx = 0; filter_idx < row_matches_filter.Length; ++filter_idx) { var row = rows[filter_idx]; if (row.enabled && row.line_matches.Contains(line_idx)) { row_matches_filter[filter_idx] = true; any_match = true; } } if (any_match) { font_info font = font_info.default_font.copy(); // 1.3.29g+ apply and merge all enabled filters for (int filter_idx = 0; filter_idx < row_matches_filter.Length; ++filter_idx) { if (row_matches_filter[filter_idx]) { font.merge(rows[filter_idx].get_match(line_idx).font); } } new_matches.Add(new_match(new BitArray(row_matches_filter), new_log.line_at(line_idx), line_idx, font)); continue; } if (run_filter_count == 0) { new_matches.Add(new_match(new BitArray(0), new_log.line_at(line_idx), line_idx, font_info.default_font)); } } bool replace = false; lock (this) if (force_recompute_matches_) { replace = true; force_recompute_matches_ = false; } if (new_matches.Count > 0) { if (replace) { matches_.set_range(new_matches); } else { matches_.add_range(new_matches); } lock (this) last_change_ = DateTime.Now; } apply_additions(old_match_count, new_log, rows); if (new_matches.Count > app.inst.no_ui.min_filter_capacity) { logger.Debug("[memory] GC.collect - from filter " + name); GC.Collect(); } } bool is_up_to_date = new_log.up_to_date; lock (this) is_up_to_date_ = is_up_to_date; }