private void update_filter_thread() { // wait until we have the quick filter set on the main thread while (quick_filter_ == null) { Thread.Sleep(50); } while (!disposed_) { if (!change_event_.wait()) { continue; } // we never run anything on the full-log if (parent_.is_full_log) { return; } bool filter_view; bool show_full_log; lock (this) { filter_view = filter_view_now_; show_full_log = show_full_log_now_; } // see what changed // 1.8.27+ if the quick filter has anything set, we need to run it bool quick_filter_matches_all = quick_filter_.matches_all(); if (filter_view || !quick_filter_matches_all) { // the user toggled on filtering is_running_filter_ = true; run_filter(show_full_log); is_running_filter_ = false; } lock (this) { filter_view_ = filter_view_now_; show_full_log_ = show_full_log_now_; // 1.8.27 - the quick filter can still return something... if ((!filter_view_ || item_filter == null) && quick_filter_matches_all) { // here, user toggled off filtering - so, I'm either showing the view, or the full log sorted_line_indexes_ = null; } needs_ui_update_ = true; } // let parent know we've updated if (parent_.IsHandleCreated) { parent_.async_call(parent_.refresh); } } }
private void run_filter(bool run_on_full_log) { filter_func item_filter; lock (this) item_filter = this.item_filter; bool quick_filter_matches_all = quick_filter_.matches_all(); if (item_filter == null && quick_filter_matches_all) { return; } memory_optimized_list <int> line_indexes = new memory_optimized_list <int>() { min_capacity = app.inst.no_ui.min_list_data_source_capacity }; var items = run_on_full_log ? full_log_items : items_; int count = items.count; for (int idx = 0; idx < count; ++idx) { match_item i; if (!run_on_full_log) { i = items.match_at(idx) as match_item; } else { // at this point - we're run on the full log - however, if we find an item that exists in current view, use that // (so that we can reference the matches) i = items.match_at(idx) as match_item; var in_cur_view = items_.binary_search(i.line_idx).Item1 as match_item; if (in_cur_view != null) { i = in_cur_view; } } // possible optimization: // if the new quick filter is a subset of the former filter, I should run it only on the former sorted_line_indexes if (quick_filter_matches_all || quick_filter_.matches(i)) { if (item_filter == null || item_filter(i, run_on_full_log)) { if (i.line_idx >= 0) { line_indexes.Add(i.line_idx); } } } } lock (this) sorted_line_indexes_ = line_indexes; }
private void run_filter(bool run_on_full_log) { filter_func item_filter; lock (this) item_filter = this.item_filter; Debug.Assert(item_filter != null); if (item_filter == null) { return; } memory_optimized_list <int> line_indexes = new memory_optimized_list <int>() { min_capacity = app.inst.no_ui.min_list_data_source_capacity }; var items = run_on_full_log ? full_log_items : items_; int count = items.count; for (int idx = 0; idx < count; ++idx) { match_item i; if (!run_on_full_log) { i = items.match_at(idx) as match_item; } else { // at this point - we're run on the full log - however, if we find an item that exists in current view, use that // (so that we can reference the matches) i = items.match_at(idx) as match_item; var in_cur_view = items_.binary_search(i.line_idx).Item1 as match_item; if (in_cur_view != null) { i = in_cur_view; } } if (item_filter(i, run_on_full_log)) { if (i.line_idx >= 0) { line_indexes.Add(i.line_idx); } } } lock (this) sorted_line_indexes_ = line_indexes; }
private void update_filter_thread() { while (!disposed_) { if (!change_event_.wait()) { continue; } bool filter_view; bool show_full_log; lock (this) { filter_view = filter_view_now_; show_full_log = show_full_log_now_; } // see what changed if (filter_view) { // the user toggled on filtering is_running_filter_ = true; run_filter(show_full_log); is_running_filter_ = false; } lock (this) { filter_view_ = filter_view_now_; show_full_log_ = show_full_log_now_; if (!filter_view_) { // here, user toggled off filtering - so, I'm either showing the view, or the full log sorted_line_indexes_ = null; } needs_ui_update_ = true; } // let parent know we've updated if (parent_.IsHandleCreated) { parent_.async_call(parent_.refresh); } } }
private void run_filter(bool run_on_full_log) { filter_func item_filter; lock (this) item_filter = this.item_filter; Debug.Assert(item_filter != null); if (item_filter == null) return; memory_optimized_list<int> line_indexes = new memory_optimized_list<int>() { min_capacity = app.inst.no_ui.min_list_data_source_capacity }; var items = run_on_full_log ? full_log_items : items_; int count = items.count; for (int idx = 0; idx < count; ++idx) { match_item i; if (!run_on_full_log) i = items.match_at(idx) as match_item; else { // at this point - we're run on the full log - however, if we find an item that exists in current view, use that // (so that we can reference the matches) i = items.match_at(idx) as match_item; var in_cur_view = items_.binary_search(i.line_idx).Item1 as match_item; if (in_cur_view != null) i = in_cur_view; } if ( item_filter(i, run_on_full_log)) if ( i.line_idx >= 0) line_indexes.Add( i.line_idx); } lock (this) sorted_line_indexes_ = line_indexes; }
private void update_filter_thread() { while (!disposed_) { if (!change_event_.wait()) continue; bool filter_view; bool show_full_log; lock (this) { filter_view = filter_view_now_; show_full_log = show_full_log_now_; } // see what changed if (filter_view) // the user toggled on filtering run_filter(show_full_log); lock (this) { filter_view_ = filter_view_now_; show_full_log_ = show_full_log_now_; if (!filter_view_) // here, user toggled off filtering - so, I'm either showing the view, or the full log sorted_line_indexes_ = null; needs_ui_update_ = true; } // let parent know we've updated if ( parent_.IsHandleCreated) parent_.async_call(parent_.refresh); } }
private void run_filter(bool run_on_full_log) { filter_func item_filter; lock (this) item_filter = this.item_filter; bool quick_filter_matches_all = quick_filter_.matches_all(); if (item_filter == null && quick_filter_matches_all) return; memory_optimized_list<int> line_indexes = new memory_optimized_list<int>() { min_capacity = app.inst.no_ui.min_list_data_source_capacity }; var items = run_on_full_log ? full_log_items : items_; int count = items.count; for (int idx = 0; idx < count; ++idx) { match_item i; if (!run_on_full_log) i = items.match_at(idx) as match_item; else { // at this point - we're run on the full log - however, if we find an item that exists in current view, use that // (so that we can reference the matches) i = items.match_at(idx) as match_item; var in_cur_view = items_.binary_search(i.line_idx).Item1 as match_item; if (in_cur_view != null) i = in_cur_view; } // possible optimization: // if the new quick filter is a subset of the former filter, I should run it only on the former sorted_line_indexes if ( quick_filter_matches_all || quick_filter_.matches(i)) if ( item_filter == null || item_filter(i, run_on_full_log)) if ( i.line_idx >= 0) line_indexes.Add( i.line_idx); } lock (this) sorted_line_indexes_ = line_indexes; }
private void update_filter_thread() { // wait until we have the quick filter set on the main thread while ( quick_filter_ == null) Thread.Sleep(50); while (!disposed_) { if (!change_event_.wait()) continue; // we never run anything on the full-log if (parent_.is_full_log) return; bool filter_view; bool show_full_log; lock (this) { filter_view = filter_view_now_; show_full_log = show_full_log_now_; } // see what changed // 1.8.27+ if the quick filter has anything set, we need to run it bool quick_filter_matches_all = quick_filter_.matches_all(); if (filter_view || !quick_filter_matches_all ) { // the user toggled on filtering is_running_filter_ = true; run_filter(show_full_log); is_running_filter_ = false; } lock (this) { filter_view_ = filter_view_now_; show_full_log_ = show_full_log_now_; // 1.8.27 - the quick filter can still return something... if ((!filter_view_ || item_filter == null) && quick_filter_matches_all) // here, user toggled off filtering - so, I'm either showing the view, or the full log sorted_line_indexes_ = null; needs_ui_update_ = true; } // let parent know we've updated if ( parent_.IsHandleCreated) parent_.async_call(parent_.refresh); } }
private void compute_matches_impl(log_reader new_log, log_reader old_log) { Debug.Assert(new_log != null); if ( app.inst.no_ui.read_full_log_first) // 1.0.76d+ - wait until log has fully loaded - in the hopes of using less memory if (new_log == old_log) { bool at_least_once; lock (this) at_least_once = new_log_fully_read_at_least_once_; if (!at_least_once) { if (!new_log.parser_up_to_date) return; lock (this) new_log_fully_read_at_least_once_ = true; } } int old_line_count = new_log.line_count; new_log.refresh(); if (new_log != old_log || new_log.forced_reload) { bool changed_log = new_log != old_log && old_log_ == null; if ( changed_log || old_log == new_log) logger.Info((new_log != old_log ? "[filter] new log " : "[filter] forced refresh of " ) + new_log.tab_name + " / " + new_log.log_name); lock (this) force_recompute_matches_ = true; } lock(this) if (force_recompute_matches_) old_line_count = 0; bool has_new_lines = (old_line_count != new_log.line_count); // get a pointer to the rows_; in case it changes on the main thread, we don't care, // since next time we will have the new rows List<filter_row> rows; lock (this) rows = rows_; if ( old_line_count == 0) foreach ( filter_row row in rows) row.refresh(); foreach ( filter_row row in rows) row.compute_line_matches(new_log); if (has_new_lines) { bool is_full_log = row_count < 1; int expected_capacity = is_full_log ? (new_log.line_count - old_line_count) : (new_log.line_count - old_line_count) / 5; // the filter matches memory_optimized_list<match> new_matches = new memory_optimized_list<match>() { min_capacity = expected_capacity, name = "temp_m " + name, increase_percentage = .7 }; // from old_lines to log.line_count -> these need recomputing int old_match_count = matches_.count; BitArray matches = new BitArray(rows.Count); // handle the case where all the filters are disabled (thus, show all lines) int run_filter_count = rows.Count(x => x.enabled || x.dimmed) ; for (int line_idx = old_line_count; line_idx < new_log.line_count; ++line_idx) { bool any_match = false; bool any_non_apply_to_existing_lines_filters = false; // 1.0.69 added "apply to existing filters" for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) { var row = rows[filter_idx]; if ((row.enabled || row.dimmed) && !row.apply_to_existing_lines) { matches[filter_idx] = row.line_matches.Contains(line_idx); any_non_apply_to_existing_lines_filters = true; } else matches[filter_idx] = false; if (matches[filter_idx]) any_match = true; } if (!any_non_apply_to_existing_lines_filters) // in this case - all filters apply to existing lines - thus, by default, we show all the lines any_match = true; // 1.0.69 "apply to existing filters" is applied afterwards font_info existing_filter_font = null; if ( any_match) for (int filter_idx = 0; filter_idx < matches.Length && any_match; ++filter_idx) { var row = rows[filter_idx]; if ((row.enabled || row.dimmed) && row.apply_to_existing_lines) { bool is_font_only = row.has_font_info; if (row.line_matches.Contains(line_idx)) { if (existing_filter_font == null && is_font_only) { // in this case, use the font from "apply to existing filters" - only if the user has specifically set it existing_filter_font = row.get_match(line_idx).font; matches[filter_idx] = true; } } else if (!is_font_only) // we're filtering this line out any_match = false; } } if (any_match) { font_info font = existing_filter_font ?? font_info.default_font_copy; int enabled_idx = -1; for (int filter_idx = 0; filter_idx < matches.Length && enabled_idx < 0; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].enabled) enabled_idx = filter_idx; int used_idx = -1; if (enabled_idx < 0) for (int filter_idx = 0; filter_idx < matches.Length && used_idx < 0; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].dimmed) used_idx = filter_idx; if (enabled_idx >= 0 || used_idx >= 0) { if (enabled_idx >= 0) { // 1.3.29g+ apply and merge all enabled filters for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].enabled) font.merge(rows[filter_idx].get_match(line_idx).font); } else font.merge( rows[used_idx].get_match(line_idx).font); } new_matches.Add( new_match(new BitArray(matches), new_log.line_at(line_idx), line_idx, font )); continue; } if (run_filter_count == 0) new_matches.Add( new_match(new BitArray(0), new_log.line_at(line_idx), line_idx, font_info.default_font )); } bool replace = false; lock(this) if (force_recompute_matches_) { replace = true; force_recompute_matches_ = false; } if (new_matches.Count > 0) { if (replace) matches_.set_range(new_matches); else matches_.add_range(new_matches); lock (this) last_change_ = DateTime.Now; } apply_additions(old_match_count, new_log, rows); if (new_matches.Count > app.inst.no_ui.min_filter_capacity) { logger.Debug("[memory] GC.collect - from filter " + name ); GC.Collect(); } } bool is_up_to_date = new_log.up_to_date; lock (this) is_up_to_date_ = is_up_to_date; }
public void add_range(memory_optimized_list<match> new_matches) { lock (this) { bool optimize = matches_.Count == 0 && new_matches.Count > app.inst.no_ui.min_filter_capacity; if (!optimize) { matches_.AddRange(new_matches); } else { // optimization - reuse this memory new_matches.name = matches_.name; matches_ = new_matches; } } }
public void set_range(memory_optimized_list<match> new_matches) { lock (this) { clear(); add_range(new_matches); } }