private void run_filter(bool run_on_full_log) { filter_func item_filter; lock (this) item_filter = this.item_filter; bool quick_filter_matches_all = quick_filter_.matches_all(); if (item_filter == null && quick_filter_matches_all) { return; } memory_optimized_list <int> line_indexes = new memory_optimized_list <int>() { min_capacity = app.inst.no_ui.min_list_data_source_capacity }; var items = run_on_full_log ? full_log_items : items_; int count = items.count; for (int idx = 0; idx < count; ++idx) { match_item i; if (!run_on_full_log) { i = items.match_at(idx) as match_item; } else { // at this point - we're run on the full log - however, if we find an item that exists in current view, use that // (so that we can reference the matches) i = items.match_at(idx) as match_item; var in_cur_view = items_.binary_search(i.line_idx).Item1 as match_item; if (in_cur_view != null) { i = in_cur_view; } } // possible optimization: // if the new quick filter is a subset of the former filter, I should run it only on the former sorted_line_indexes if (quick_filter_matches_all || quick_filter_.matches(i)) { if (item_filter == null || item_filter(i, run_on_full_log)) { if (i.line_idx >= 0) { line_indexes.Add(i.line_idx); } } } } lock (this) sorted_line_indexes_ = line_indexes; }
private void run_filter(bool run_on_full_log) { filter_func item_filter; lock (this) item_filter = this.item_filter; Debug.Assert(item_filter != null); if (item_filter == null) { return; } memory_optimized_list <int> line_indexes = new memory_optimized_list <int>() { min_capacity = app.inst.no_ui.min_list_data_source_capacity }; var items = run_on_full_log ? full_log_items : items_; int count = items.count; for (int idx = 0; idx < count; ++idx) { match_item i; if (!run_on_full_log) { i = items.match_at(idx) as match_item; } else { // at this point - we're run on the full log - however, if we find an item that exists in current view, use that // (so that we can reference the matches) i = items.match_at(idx) as match_item; var in_cur_view = items_.binary_search(i.line_idx).Item1 as match_item; if (in_cur_view != null) { i = in_cur_view; } } if (item_filter(i, run_on_full_log)) { if (i.line_idx >= 0) { line_indexes.Add(i.line_idx); } } } lock (this) sorted_line_indexes_ = line_indexes; }
private void run_filter(bool run_on_full_log) { filter_func item_filter; lock (this) item_filter = this.item_filter; Debug.Assert(item_filter != null); if (item_filter == null) return; memory_optimized_list<int> line_indexes = new memory_optimized_list<int>() { min_capacity = app.inst.no_ui.min_list_data_source_capacity }; var items = run_on_full_log ? full_log_items : items_; int count = items.count; for (int idx = 0; idx < count; ++idx) { match_item i; if (!run_on_full_log) i = items.match_at(idx) as match_item; else { // at this point - we're run on the full log - however, if we find an item that exists in current view, use that // (so that we can reference the matches) i = items.match_at(idx) as match_item; var in_cur_view = items_.binary_search(i.line_idx).Item1 as match_item; if (in_cur_view != null) i = in_cur_view; } if ( item_filter(i, run_on_full_log)) if ( i.line_idx >= 0) line_indexes.Add( i.line_idx); } lock (this) sorted_line_indexes_ = line_indexes; }
private void run_filter(bool run_on_full_log) { filter_func item_filter; lock (this) item_filter = this.item_filter; bool quick_filter_matches_all = quick_filter_.matches_all(); if (item_filter == null && quick_filter_matches_all) return; memory_optimized_list<int> line_indexes = new memory_optimized_list<int>() { min_capacity = app.inst.no_ui.min_list_data_source_capacity }; var items = run_on_full_log ? full_log_items : items_; int count = items.count; for (int idx = 0; idx < count; ++idx) { match_item i; if (!run_on_full_log) i = items.match_at(idx) as match_item; else { // at this point - we're run on the full log - however, if we find an item that exists in current view, use that // (so that we can reference the matches) i = items.match_at(idx) as match_item; var in_cur_view = items_.binary_search(i.line_idx).Item1 as match_item; if (in_cur_view != null) i = in_cur_view; } // possible optimization: // if the new quick filter is a subset of the former filter, I should run it only on the former sorted_line_indexes if ( quick_filter_matches_all || quick_filter_.matches(i)) if ( item_filter == null || item_filter(i, run_on_full_log)) if ( i.line_idx >= 0) line_indexes.Add( i.line_idx); } lock (this) sorted_line_indexes_ = line_indexes; }
private void compute_matches_impl(log_reader new_log, log_reader old_log) { Debug.Assert(new_log != null); if ( app.inst.no_ui.read_full_log_first) // 1.0.76d+ - wait until log has fully loaded - in the hopes of using less memory if (new_log == old_log) { bool at_least_once; lock (this) at_least_once = new_log_fully_read_at_least_once_; if (!at_least_once) { if (!new_log.parser_up_to_date) return; lock (this) new_log_fully_read_at_least_once_ = true; } } int old_line_count = new_log.line_count; new_log.refresh(); if (new_log != old_log || new_log.forced_reload) { bool changed_log = new_log != old_log && old_log_ == null; if ( changed_log || old_log == new_log) logger.Info((new_log != old_log ? "[filter] new log " : "[filter] forced refresh of " ) + new_log.tab_name + " / " + new_log.log_name); lock (this) force_recompute_matches_ = true; } lock(this) if (force_recompute_matches_) old_line_count = 0; bool has_new_lines = (old_line_count != new_log.line_count); // get a pointer to the rows_; in case it changes on the main thread, we don't care, // since next time we will have the new rows List<filter_row> rows; lock (this) rows = rows_; if ( old_line_count == 0) foreach ( filter_row row in rows) row.refresh(); foreach ( filter_row row in rows) row.compute_line_matches(new_log); if (has_new_lines) { bool is_full_log = row_count < 1; int expected_capacity = is_full_log ? (new_log.line_count - old_line_count) : (new_log.line_count - old_line_count) / 5; // the filter matches memory_optimized_list<match> new_matches = new memory_optimized_list<match>() { min_capacity = expected_capacity, name = "temp_m " + name, increase_percentage = .7 }; // from old_lines to log.line_count -> these need recomputing int old_match_count = matches_.count; BitArray matches = new BitArray(rows.Count); // handle the case where all the filters are disabled (thus, show all lines) int run_filter_count = rows.Count(x => x.enabled || x.dimmed) ; for (int line_idx = old_line_count; line_idx < new_log.line_count; ++line_idx) { bool any_match = false; bool any_non_apply_to_existing_lines_filters = false; // 1.0.69 added "apply to existing filters" for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) { var row = rows[filter_idx]; if ((row.enabled || row.dimmed) && !row.apply_to_existing_lines) { matches[filter_idx] = row.line_matches.Contains(line_idx); any_non_apply_to_existing_lines_filters = true; } else matches[filter_idx] = false; if (matches[filter_idx]) any_match = true; } if (!any_non_apply_to_existing_lines_filters) // in this case - all filters apply to existing lines - thus, by default, we show all the lines any_match = true; // 1.0.69 "apply to existing filters" is applied afterwards font_info existing_filter_font = null; if ( any_match) for (int filter_idx = 0; filter_idx < matches.Length && any_match; ++filter_idx) { var row = rows[filter_idx]; if ((row.enabled || row.dimmed) && row.apply_to_existing_lines) { bool is_font_only = row.has_font_info; if (row.line_matches.Contains(line_idx)) { if (existing_filter_font == null && is_font_only) { // in this case, use the font from "apply to existing filters" - only if the user has specifically set it existing_filter_font = row.get_match(line_idx).font; matches[filter_idx] = true; } } else if (!is_font_only) // we're filtering this line out any_match = false; } } if (any_match) { font_info font = existing_filter_font ?? font_info.default_font_copy; int enabled_idx = -1; for (int filter_idx = 0; filter_idx < matches.Length && enabled_idx < 0; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].enabled) enabled_idx = filter_idx; int used_idx = -1; if (enabled_idx < 0) for (int filter_idx = 0; filter_idx < matches.Length && used_idx < 0; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].dimmed) used_idx = filter_idx; if (enabled_idx >= 0 || used_idx >= 0) { if (enabled_idx >= 0) { // 1.3.29g+ apply and merge all enabled filters for (int filter_idx = 0; filter_idx < matches.Length; ++filter_idx) if (matches[filter_idx] && rows[filter_idx].enabled) font.merge(rows[filter_idx].get_match(line_idx).font); } else font.merge( rows[used_idx].get_match(line_idx).font); } new_matches.Add( new_match(new BitArray(matches), new_log.line_at(line_idx), line_idx, font )); continue; } if (run_filter_count == 0) new_matches.Add( new_match(new BitArray(0), new_log.line_at(line_idx), line_idx, font_info.default_font )); } bool replace = false; lock(this) if (force_recompute_matches_) { replace = true; force_recompute_matches_ = false; } if (new_matches.Count > 0) { if (replace) matches_.set_range(new_matches); else matches_.add_range(new_matches); lock (this) last_change_ = DateTime.Now; } apply_additions(old_match_count, new_log, rows); if (new_matches.Count > app.inst.no_ui.min_filter_capacity) { logger.Debug("[memory] GC.collect - from filter " + name ); GC.Collect(); } } bool is_up_to_date = new_log.up_to_date; lock (this) is_up_to_date_ = is_up_to_date; }
private void read_events_thread() { string prefix = use_global_ ? "Global\\" : ""; try { memory_file_ = MemoryMappedFile.CreateNew(prefix + "DBWIN_BUFFER", 4096L); bool created = false; buffer_ready_ = new EventWaitHandle(false, EventResetMode.AutoReset, prefix + "DBWIN_BUFFER_READY", out created); if (!created) { errors_.add("Can't create the DBWIN_BUFFER_READY event/" + use_global_); } if (created) { data_ready_ = new EventWaitHandle(false, EventResetMode.AutoReset, prefix + "DBWIN_DATA_READY", out created); if (!created) { errors_.add("Can't create the DBWIN_DATA_READY event/" + use_global_); } } if (created) { buffer_ready_.Set(); while (!disposed_) { if (!data_ready_.WaitOne(1000)) { continue; } using (var stream = memory_file_.CreateViewStream()) { using (var reader = new BinaryReader(stream, Encoding.Default)) { var process_id = (int)reader.ReadUInt32(); var raw = reader.ReadChars(4092); var idx = Array.IndexOf(raw, '\0'); var msg = new string(raw, 0, idx); find_process_id(process_id); string process_name = pid_to_name_.ContainsKey(process_id) ? pid_to_name_[process_id] : ""; lock (this) events_.Add(new debug_event { unique_id = next_unique_id++, process_id = process_id, msg = msg, lo_process_name = process_name }); } } buffer_ready_.Set(); } } } catch (Exception e) { logger.Fatal("Can't read debug events " + e.Message); errors_.add("Error reading debug events " + e.Message); } if (memory_file_ != null) { memory_file_.Dispose(); } if (data_ready_ != null) { data_ready_.Dispose(); } if (buffer_ready_ != null) { buffer_ready_.Dispose(); } }