// computes the line matches - does not care about colors or the additions - just to know which lines actually match public void compute_line_matches(log_reader log) { log.refresh(); if (old_line_matches_log_ != log) { old_line_matches_log_ = log; line_matches_.Clear(); old_line_count_ = 0; } // note: in order to match, all lines must match int new_line_count = log.line_count; try { for (int i = old_line_count_; i < new_line_count; ++i) { bool matches = true; foreach (filter_line fi in items_) if (fi.part != part_type.font) if (!fi.matches(log.line_at(i))) { matches = false; break; } if (matches) line_matches_.Add(i); } // if we have at least one line - we'll recheck this last line next time - just in case we did not fully read it last time old_line_count_ = new_line_count > 0 ? new_line_count - 1 : new_line_count; } catch (Exception e) { logger.Error("[filter] error computing line matches for filter row : " + e.Message); // restart everything - probably the log got re-written old_line_count_ = 0; } }
// once we've been forced to reload - we should return true once per each reader public bool forced_reload(log_reader reader) { lock (this) { if (forced_reload_.Contains(reader)) { return(false); } forced_reload_.Add(reader); return(true); } }
private void add_addition_line(int line_idx, Color fg, log_reader log) { // IMPORTANT: I did NOT test the binary_search_insert! int insert_idx = matches_.insert_line_idx(line_idx); if (insert_idx >= 0) { matches_.insert(insert_idx, new_match(empty_match, log.line_at(line_idx), line_idx, new font_info { bg = util.transparent, fg = fg })); } }
public void compute_matches(log_reader log) { Debug.Assert(log != null); lock (this) { if (new_log_ != log) { is_up_to_date_ = false; } new_log_ = log; } start_compute_matches_thread(); }
public void compute_matches(log_reader log) { Debug.Assert(log != null); lock (this) { if (new_log_ != log) { is_up_to_date_ = false; } new_log_ = log; } matches_.show_elements_in_reverse_order = log.reverse_order; start_compute_matches_thread(); }
/// <summary> /// Computes the line matches - /// sets this.line_matches to a valid value /// - does not care about colors or the additions - just to know which lines actually match /// </summary> /// <param name="log"></param> public void compute_line_matches(log_reader log) { log.refresh(); if (old_line_matches_log_ != log) { old_line_matches_log_ = log; line_matches_.Clear(); old_line_count_ = 0; } // note: in order to match, all lines must match int new_line_count = log.line_count; try { for (int i = old_line_count_; i < new_line_count; ++i) { bool matches = true; foreach (filter_line fi in items_) { if (fi.part != part_type.font) { if (!fi.matches(log.line_at(i))) { matches = false; break; } } } if (matches) { line_matches_.Add(i); } } // if we have at least one line - we'll recheck this last line next time - just in case we did not fully read it last time old_line_count_ = new_line_count > 0 ? new_line_count - 1 : new_line_count; } catch (Exception e) { logger.Error("[filter] error computing line matches for filter row : " + e.Message); // restart everything - probably the log got re-written old_line_count_ = 0; } }
private void add_addition_line(int line_idx, Color fg, log_reader log) { // IMPORTANT: I did NOT test the binary_search_insert! int insert_idx = matches_.insert_line_idx(line_idx); if ( insert_idx >= 0) matches_.insert(insert_idx, new_match(empty_match, log.line_at(line_idx), line_idx, new font_info { bg = util.transparent, fg = fg })); }
private void apply_additions(int old_match_count, log_reader log, List<filter_row> rows ) { // FIXME note: we should normally care about the last match before old_match_count as well, to see maybe it still matches some "addition" lines // but we ignore that for now // // when impleemnting the above, make sure to find the last matched line, not an existing addition bool has_additions = false; foreach( filter_row row in rows) if (row.additions.Count > 0) has_additions = true; if (!has_additions) // optimize for when no additions return; Dictionary<int, Color> additions = new Dictionary<int, Color>(); int new_match_count = matches_.count; for (int match_idx = old_match_count; match_idx < new_match_count; ++match_idx) { int line_idx = matches_.match_at(match_idx).line_idx; var match = match_at(match_idx); int matched_filter = -1; for ( int filter_idx = 0; filter_idx < match.matches.Length && matched_filter < 0; ++filter_idx) if (match.matches[filter_idx]) matched_filter = filter_idx; if (matched_filter >= 0) { Color gray_fg = util.grayer_color(rows[matched_filter].get_match(line_idx).font.fg); foreach (var addition in rows[matched_filter].additions) { switch (addition.type) { case addition.number_type.lines: for (int i = 0; i < addition.number; ++i) { int add_line_idx = line_idx + (addition.add == addition.add_type.after ? i : -i); if (add_line_idx >= 0 && add_line_idx < log.line_count) additions.Add(add_line_idx, gray_fg); } break; case addition.number_type.millisecs: DateTime start = util.str_to_time(log.line_at(line_idx).part(info_type.time)); for (int i = line_idx; i >= 0 && i < log.line_count;) { i = i + (addition.add == addition.add_type.after ? 1 : -1); if (i >= 0 && i < log.line_count) { DateTime now = util.str_to_time(log.line_at(i).part(info_type.time)); int diff = (int) ((now - start).TotalMilliseconds); bool ok = (addition.add == addition.add_type.after && diff <= addition.number) || (addition.add == addition.add_type.before && -diff <= addition.number); if (ok && !additions.ContainsKey(i)) additions.Add(i, gray_fg); else break; } } break; default: Debug.Assert(false); break; } } } } matches_.prepare_add( additions.Count); foreach (var add_idx in additions) add_addition_line(add_idx.Key, add_idx.Value, log); }
private void compute_matches_impl(log_reader new_log, log_reader old_log) { Debug.Assert(new_log != null); if ( app.inst.no_ui.read_full_log_first) // 1.0.76d+ - wait until log has fully loaded - in the hopes of using less memory if (new_log == old_log) { bool at_least_once; lock (this) at_least_once = new_log_fully_read_at_least_once_; if (!at_least_once) { if (!new_log.parser_up_to_date) return; lock (this) new_log_fully_read_at_least_once_ = true; } } int old_line_count = new_log.line_count; new_log.refresh(); if (new_log != old_log || new_log.forced_reload) { bool changed_log = new_log != old_log && old_log_ == null; if ( changed_log || old_log == new_log) logger.Info((new_log != old_log ? "[filter] new log " : "[filter] forced refresh of " ) + new_log.tab_name + " / " + new_log.log_name); lock (this) force_recompute_matches_ = true; } lock(this) if (force_recompute_matches_) old_line_count = 0; bool has_new_lines = (old_line_count != new_log.line_count); // get a pointer to the rows_; in case it changes on the main thread, we don't care, // since next time we will have the new rows List<filter_row> rows; lock (this) rows = rows_; if ( old_line_count == 0) foreach ( filter_row row in rows) row.refresh(); foreach ( filter_row row in rows) row.compute_line_matches(new_log); if (has_new_lines) { bool is_full_log = row_count < 1; int expected_capacity = is_full_log ? (new_log.line_count - old_line_count) : (new_log.line_count - old_line_count) / 5; // the filter matches memory_optimized_list<match> new_matches = new memory_optimized_list<match>() { min_capacity = expected_capacity, name = "temp_m " + name, increase_percentage = .7 }; // from old_lines to log.line_count -> these need recomputing int old_match_count = matches_.count; BitArray matches = new BitArray(rows.Count); // handle the case where all the filters are disabled (thus, show all lines) int run_filter_count = rows.Count(x => x.enabled || x.dimmed) ; for (int line_idx = old_line_count; line_idx < new_log.line_count; ++line_idx) { bool any_match = false; bool any_non_apply_to_existing_lines_filters = false; // 1.0.69 added "apply to existing filters" for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) { var row = rows[filter_idx]; if ((row.enabled || row.dimmed) && !row.apply_to_existing_lines) { matches[filter_idx] = row.line_matches.Contains(line_idx); any_non_apply_to_existing_lines_filters = true; } else matches[filter_idx] = false; if (matches[filter_idx]) any_match = true; } if (!any_non_apply_to_existing_lines_filters) // in this case - all filters apply to existing lines - thus, by default, we show all the lines any_match = true; // 1.0.69 "apply to existing filters" is applied afterwards font_info existing_filter_font = null; if ( any_match) for (int filter_idx = 0; filter_idx < matches.Length && any_match; ++filter_idx) { var row = rows[filter_idx]; if ((row.enabled || row.dimmed) && row.apply_to_existing_lines) { bool is_font_only = row.has_font_info; if (row.line_matches.Contains(line_idx)) { if (existing_filter_font == null && is_font_only) { // in this case, use the font from "apply to existing filters" - only if the user has specifically set it existing_filter_font = row.get_match(line_idx).font; matches[filter_idx] = true; } } else if (!is_font_only) // we're filtering this line out any_match = false; } } if (any_match) { font_info font = existing_filter_font ?? font_info.default_font_copy; int enabled_idx = -1; for (int filter_idx = 0; filter_idx < matches.Length && enabled_idx < 0; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].enabled) enabled_idx = filter_idx; int used_idx = -1; if (enabled_idx < 0) for (int filter_idx = 0; filter_idx < matches.Length && used_idx < 0; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].dimmed) used_idx = filter_idx; if (enabled_idx >= 0 || used_idx >= 0) { if (enabled_idx >= 0) { // 1.3.29g+ apply and merge all enabled filters for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].enabled) font.merge(rows[filter_idx].get_match(line_idx).font); } else font.merge( rows[used_idx].get_match(line_idx).font); } new_matches.Add( new_match(new BitArray(matches), new_log.line_at(line_idx), line_idx, font )); continue; } if (run_filter_count == 0) new_matches.Add( new_match(new BitArray(0), new_log.line_at(line_idx), line_idx, font_info.default_font )); } bool replace = false; lock(this) if (force_recompute_matches_) { replace = true; force_recompute_matches_ = false; } if (new_matches.Count > 0) { if (replace) matches_.set_range(new_matches); else matches_.add_range(new_matches); lock (this) last_change_ = DateTime.Now; } apply_additions(old_match_count, new_log, rows); if (new_matches.Count > app.inst.no_ui.min_filter_capacity) { logger.Debug("[memory] GC.collect - from filter " + name ); GC.Collect(); } } bool is_up_to_date = new_log.up_to_date; lock (this) is_up_to_date_ = is_up_to_date; }
public void compute_matches(log_reader log) { Debug.Assert(log != null); lock (this) { if (new_log_ != log) is_up_to_date_ = false; new_log_ = log; } start_compute_matches_thread(); }
public void set_log(log_reader log) { Debug.Assert(log != null); if (log_ != log) { if (log_ != null) log_.Dispose(); bool was_null = log_ == null; log_ = log; log_.tab_name = name; log_.on_new_lines += filter_.on_new_reader_lines; last_item_count_while_current_view_ = 0; visible_columns_refreshed_ = -1; if ( !was_null) clear(); logger.Debug("[view] new log for " + name + " - " + log.log_name); update_x_of_y(); } }
// called when this log view is not used anymore (like, when it's removed from its tab page) public new void Dispose() { lv_parent.description_pane().on_internal_resize -= on_description_pane_resized; refreshUI.Enabled = false; snooper_.Dispose(); if (log_ != null) { log_.Dispose(); log_ = null; } model_.Dispose(); filter_.Dispose(); }
public void set_log(log_reader log) { Debug.Assert(log != null); if (log_ != log) { if (log_ != null) log_.Dispose(); snooper_.on_new_log(); render_.clear_format_cache("new log"); bool was_null = log_ == null; log_ = log; log_.tab_name = name; log_.on_new_lines += filter_.on_new_reader_lines; last_item_count_while_current_view_ = 0; available_columns_refreshed_ = -1; use_previous_available_columns_ = false; if ( !was_null) clear(); logger.Debug("[view] new log for " + name + " - " + log.log_name); update_x_of_y(); reload_column_formatter(); } }
private void apply_additions(int old_match_count, log_reader log, List <filter_row> rows) { // FIXME note: we should normally care about the last match before old_match_count as well, to see maybe it still matches some "addition" lines // but we ignore that for now // // when impleemnting the above, make sure to find the last matched line, not an existing addition bool has_additions = false; foreach (filter_row row in rows) { if (row.additions.Count > 0) { has_additions = true; } } if (!has_additions) { // optimize for when no additions return; } Dictionary <int, Color> additions = new Dictionary <int, Color>(); int new_match_count = matches_.count; for (int match_idx = old_match_count; match_idx < new_match_count; ++match_idx) { int line_idx = matches_.match_at(match_idx).line_idx; var match = match_at(match_idx); int matched_filter = -1; for (int filter_idx = 0; filter_idx < match.matches.Length && matched_filter < 0; ++filter_idx) { if (match.matches[filter_idx]) { matched_filter = filter_idx; } } if (matched_filter >= 0) { Color gray_fg = util.grayer_color(rows[matched_filter].get_match(line_idx).font.fg); foreach (var addition in rows[matched_filter].additions) { switch (addition.type) { case addition.number_type.lines: for (int i = 0; i < addition.number; ++i) { int add_line_idx = line_idx + (addition.add == addition.add_type.after ? i : -i); if (add_line_idx >= 0 && add_line_idx < log.line_count) { additions.Add(add_line_idx, gray_fg); } } break; case addition.number_type.millisecs: DateTime start = util.str_to_time(log.line_at(line_idx).part(info_type.time)); for (int i = line_idx; i >= 0 && i < log.line_count;) { i = i + (addition.add == addition.add_type.after ? 1 : -1); if (i >= 0 && i < log.line_count) { DateTime now = util.str_to_time(log.line_at(i).part(info_type.time)); int diff = (int)((now - start).TotalMilliseconds); bool ok = (addition.add == addition.add_type.after && diff <= addition.number) || (addition.add == addition.add_type.before && -diff <= addition.number); if (ok && !additions.ContainsKey(i)) { additions.Add(i, gray_fg); } else { break; } } } break; default: Debug.Assert(false); break; } } } } matches_.prepare_add(additions.Count); foreach (var add_idx in additions) { add_addition_line(add_idx.Key, add_idx.Value, log); } }
private void compute_matches_impl(log_reader new_log, log_reader old_log) { Debug.Assert(new_log != null); if (app.inst.no_ui.read_full_log_first) { // 1.0.76d+ - wait until log has fully loaded - in the hopes of using less memory if (new_log == old_log) { bool at_least_once; lock (this) at_least_once = new_log_fully_read_at_least_once_; if (!at_least_once) { if (!new_log.parser_up_to_date) { return; } lock (this) new_log_fully_read_at_least_once_ = true; } } } int old_line_count = new_log.line_count; new_log.refresh(); if (new_log != old_log || new_log.forced_reload) { bool changed_log = new_log != old_log && old_log_ == null; if (changed_log || old_log == new_log) { logger.Info((new_log != old_log ? "[filter] new log " : "[filter] forced refresh of ") + new_log.tab_name + " / " + new_log.log_name); } lock (this) if (matches_.count > 0) { force_recompute_matches_ = true; } } lock (this) if (force_recompute_matches_) { old_line_count = 0; } bool has_new_lines = (old_line_count != new_log.line_count); // get a pointer to the rows_; in case it changes on the main thread, we don't care, // since next time we will have the new rows List <filter_row> rows; lock (this) rows = rows_; if (old_line_count == 0) { foreach (filter_row row in rows) { row.refresh(); } } foreach (filter_row row in rows) { row.compute_line_matches(new_log); } if (has_new_lines) { bool is_full_log = row_count < 1; int expected_capacity = is_full_log ? (new_log.line_count - old_line_count) : (new_log.line_count - old_line_count) / 5; // the filter matches memory_optimized_list <match> new_matches = new memory_optimized_list <match>() { min_capacity = expected_capacity, name = "temp_m " + name, increase_percentage = .7 }; // from old_lines to log.line_count -> these need recomputing int old_match_count = matches_.count; BitArray matches = new BitArray(rows.Count); // handle the case where all the filters are disabled (thus, show all lines) int run_filter_count = rows.Count(x => x.enabled || x.dimmed); for (int line_idx = old_line_count; line_idx < new_log.line_count; ++line_idx) { bool any_match = false; bool any_non_apply_to_existing_lines_filters = false; // 1.0.69 added "apply to existing filters" for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) { var row = rows[filter_idx]; if ((row.enabled || row.dimmed) && !row.apply_to_existing_lines) { matches[filter_idx] = row.line_matches.Contains(line_idx); any_non_apply_to_existing_lines_filters = true; } else { matches[filter_idx] = false; } if (matches[filter_idx]) { any_match = true; } } if (!any_non_apply_to_existing_lines_filters) { // in this case - all filters apply to existing lines - thus, by default, we show all the lines any_match = true; } // 1.0.69 "apply to existing filters" is applied afterwards font_info existing_filter_font = null; if (any_match) { for (int filter_idx = 0; filter_idx < matches.Length && any_match; ++filter_idx) { var row = rows[filter_idx]; if ((row.enabled || row.dimmed) && row.apply_to_existing_lines) { bool is_font_only = row.has_font_info; if (row.line_matches.Contains(line_idx)) { if (existing_filter_font == null && is_font_only) { // in this case, use the font from "apply to existing filters" - only if the user has specifically set it existing_filter_font = row.get_match(line_idx).font; matches[filter_idx] = true; } } else if (!is_font_only) { // we're filtering this line out any_match = false; } } } } if (any_match) { font_info font = (existing_filter_font ?? font_info.default_font).copy(); int enabled_idx = -1; for (int filter_idx = 0; filter_idx < matches.Length && enabled_idx < 0; ++filter_idx) { if (matches[filter_idx] && rows[filter_idx].enabled) { enabled_idx = filter_idx; } } int used_idx = -1; if (enabled_idx < 0) { for (int filter_idx = 0; filter_idx < matches.Length && used_idx < 0; ++filter_idx) { if (matches[filter_idx] && rows[filter_idx].dimmed) { used_idx = filter_idx; } } } if (enabled_idx >= 0 || used_idx >= 0) { if (enabled_idx >= 0) { // 1.3.29g+ apply and merge all enabled filters for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) { if (matches[filter_idx] && rows[filter_idx].enabled) { font.merge(rows[filter_idx].get_match(line_idx).font); } } } else { font.merge(rows[used_idx].get_match(line_idx).font); } } new_matches.Add(new_match(new BitArray(matches), new_log.line_at(line_idx), line_idx, font)); continue; } if (run_filter_count == 0) { new_matches.Add(new_match(new BitArray(0), new_log.line_at(line_idx), line_idx, font_info.default_font)); } } bool replace = false; lock (this) if (force_recompute_matches_) { replace = true; force_recompute_matches_ = false; } if (new_matches.Count > 0) { if (replace) { matches_.set_range(new_matches); } else { matches_.add_range(new_matches); } lock (this) last_change_ = DateTime.Now; } apply_additions(old_match_count, new_log, rows); if (new_matches.Count > app.inst.no_ui.min_filter_capacity) { logger.Debug("[memory] GC.collect - from filter " + name); GC.Collect(); } } bool is_up_to_date = new_log.up_to_date; lock (this) is_up_to_date_ = is_up_to_date; }
// called when this log view is not used anymore (like, when it's removed from its tab page) public new void Dispose() { if (log_ != null) { log_.Dispose(); log_ = null; } model_.Dispose(); filter_.Dispose(); }
private void compute_matches_impl(log_reader new_log, log_reader old_log) { Debug.Assert(new_log != null); if (app.inst.no_ui.read_full_log_first) { // 1.0.76d+ - wait until log has fully loaded - in the hopes of using less memory if (new_log == old_log) { bool at_least_once; lock (this) at_least_once = new_log_fully_read_at_least_once_; if (!at_least_once) { if (!new_log.parser_up_to_date) { return; } lock (this) new_log_fully_read_at_least_once_ = true; } } } int old_line_count = new_log.line_count; new_log.refresh(); if (new_log != old_log || new_log.forced_reload) { bool changed_log = new_log != old_log && old_log_ == null; if (changed_log || old_log == new_log) { logger.Info((new_log != old_log ? "[filter] new log " : "[filter] forced refresh of ") + new_log.tab_name + " / " + new_log.log_name); } lock (this) if (matches_.count > 0) { force_recompute_matches_ = true; } } lock (this) if (force_recompute_matches_) { old_line_count = 0; } bool has_new_lines = (old_line_count != new_log.line_count); // get a pointer to the rows_; in case it changes on the main thread, we don't care, // since next time we will have the new rows List <filter_row> rows; lock (this) rows = rows_; if (old_line_count == 0) { foreach (filter_row row in rows) { row.refresh(); } } foreach (filter_row row in rows) { row.compute_line_matches(new_log); } if (has_new_lines) { status?.set_status("Computing filters... This might take a moment", status_ctrl.status_type.msg, 10000); bool is_full_log = row_count < 1; int expected_capacity = is_full_log ? (new_log.line_count - old_line_count) : (new_log.line_count - old_line_count) / 5; // the filter matches memory_optimized_list <match> new_matches = new memory_optimized_list <match> { min_capacity = expected_capacity, name = "temp_m " + name, increase_percentage = .7 }; // from old_lines to log.line_count -> these need recomputing int old_match_count = matches_.count; bool[] row_matches_filter = new bool[rows.Count]; // handle the case where all the filters are disabled (thus, show all lines) int run_filter_count = rows.Count(x => x.enabled); for (int line_idx = old_line_count; line_idx < new_log.line_count; ++line_idx) { bool any_match = false; // Go through all filters for (int filter_idx = 0; filter_idx < row_matches_filter.Length; ++filter_idx) { var row = rows[filter_idx]; if (row.enabled && row.line_matches.Contains(line_idx)) { row_matches_filter[filter_idx] = true; any_match = true; } } if (any_match) { font_info font = font_info.default_font.copy(); // 1.3.29g+ apply and merge all enabled filters for (int filter_idx = 0; filter_idx < row_matches_filter.Length; ++filter_idx) { if (row_matches_filter[filter_idx]) { font.merge(rows[filter_idx].get_match(line_idx).font); } } new_matches.Add(new_match(new BitArray(row_matches_filter), new_log.line_at(line_idx), line_idx, font)); continue; } if (run_filter_count == 0) { new_matches.Add(new_match(new BitArray(0), new_log.line_at(line_idx), line_idx, font_info.default_font)); } } bool replace = false; lock (this) if (force_recompute_matches_) { replace = true; force_recompute_matches_ = false; } if (new_matches.Count > 0) { if (replace) { matches_.set_range(new_matches); } else { matches_.add_range(new_matches); } lock (this) last_change_ = DateTime.Now; } apply_additions(old_match_count, new_log, rows); if (new_matches.Count > app.inst.no_ui.min_filter_capacity) { logger.Debug("[memory] GC.collect - from filter " + name); GC.Collect(); } } bool is_up_to_date = new_log.up_to_date; lock (this) is_up_to_date_ = is_up_to_date; }
// once we've been forced to reload - we should return true once per each reader public bool forced_reload(log_reader reader) { lock (this) { if (forced_reload_.Contains(reader)) return false; forced_reload_.Add(reader); return true; } }
// called when this log view is not used anymore (like, when it's removed from its tab page) public void mark_as_not_used() { if (log_ != null) { log_.Dispose(); log_ = null; } model_.Dispose(); filter_.Dispose(); }