/// <summary>Does the donkey work of searching for a pattern. /// Returns the byte address of the first match.</summary> private static void DoFind(Pattern pat, long start, bool backward, BLIData d, Func <RangeI, bool> on_found) { using (d.file) { var line = new Line(); AddLineFunc test_line = (line_rng, baddr, fend, bf, enc) => { // Ignore blanks? if (line_rng.Empty && d.ignore_blanks) { return(true); } // Parse the line from the buffer line.Read(baddr + line_rng.Beg, bf, (int)line_rng.Beg, (int)line_rng.Size, d.encoding, d.col_delim, null, d.transforms); // Keep searching while the text is filtered out or doesn't match the pattern if (!PassesFilters(line.RowText, d.filters) || !pat.IsMatch(line.RowText)) { return(true); } // Found a match return(on_found(new RangeI(baddr + line_rng.Beg, baddr + line_rng.End))); }; // Search for files var line_buf = new byte[d.max_line_length]; long count = backward ? start - 0 : d.fileend - start; FindLines(d.file, start, d.fileend, backward, count, test_line, d.encoding, d.row_delim, line_buf, d.progress); } }
/// <summary>Searches the file from 'start' looking for a match to 'pat'</summary> /// <returns>Returns true if a match is found, false otherwise. If true /// is returned 'found' contains the file byte offset of the first match</returns> private bool Find(Pattern pat, long start, bool backward, out long found) { long at = -1; DialogResult res = DialogResult.Cancel; try { var body = backward ? (start == FileByteRange.End ? "Searching backward from the end of the file..." : "Searching backward from the current selection position...") : (start == FileByteRange.Beg ? "Searching forward from the start of the file..." : "Searching forward from the current selection position..."); // Although this search runs in a background thread, it's wrapped in a modal // dialog box, so it should be ok to use class members directly var search = new ProgressForm("Searching...", body, null, ProgressBarStyle.Marquee, (s, a, cb) => { var d = new BLIData(this, Src, fileend_: m_fileend); int last_progress = 0; d.progress = (scanned, length) => { int progress = (int)(100 * Math_.Frac(0, scanned, length != 0?length:1)); if (progress != last_progress) { cb(new ProgressForm.UserState { FractionComplete = progress * 0.01f }); last_progress = progress; } return(!s.CancelPending); }; // Searching.... DoFind(pat, start, backward, d, rng => { at = rng.Beg; return(false); }); // We can call BuildLineIndex in this thread context because we know // we're in a modal dialog. if (at != -1 && !s.CancelPending) { this.BeginInvoke(() => SelectRowByAddr(at)); } }) { StartPosition = FormStartPosition.CenterParent }; using (search) res = search.ShowDialog(this, 500); } catch (OperationCanceledException) {} catch (Exception ex) { Misc.ShowMessage(this, "Find terminated by an error.", "Find error", MessageBoxIcon.Error, ex); } found = at; return(res == DialogResult.OK); }
/// <summary>Searches the entire file and bookmarks all locations that match the find pattern</summary> private void FindBookmarkAll() { if (!PreFind()) { return; } try { Log.Write(ELogLevel.Info, "FindBookmarkAll"); var pat = FindUI.Pattern; const string body = "Bookmarking all found instances..."; // Although this search runs in a background thread, it's wrapped in a modal // dialog box, so it should be ok to use class members directly var search = new ProgressForm("Searching...", body, null, ProgressBarStyle.Marquee, (s, a, cb) => { var d = new BLIData(this, Src, fileend_: m_fileend); int last_progress = 0; d.progress = (scanned, length) => { int progress = (int)(100 * Math_.Frac(0, scanned, length != 0?length:1)); if (progress != last_progress) { cb(new ProgressForm.UserState { FractionComplete = progress * 0.01f }); last_progress = progress; } return(!s.CancelPending); }; // Searching.... DoFind(pat, 0, false, d, rng => { this.BeginInvoke(() => SetBookmark(rng, Bit.EState.Set)); return(true); }); }) { StartPosition = FormStartPosition.CenterParent }; using (search) search.ShowDialog(this, 500); } catch (OperationCanceledException) {} catch (Exception ex) { Misc.ShowMessage(this, "Find terminated by an error.", "Find error", MessageBoxIcon.Error, ex); } }
/// <summary>Called when building the line index completes (success or failure)</summary> private void BuildLineIndexComplete(BLIData d, RangeI range, List <RangeI> line_index, Exception error, Action on_success) { // This method runs in the main thread, so if the build issue is the same at // the start of this method it can't be changed until after this function returns. if (BuildCancelled(d.build_issue)) { return; } ReloadInProgress = false; UpdateStatusProgress(1, 1); // If an error occurred if (error != null) { if (error is OperationCanceledException) { } else if (error is FileNotFoundException) { SetStaticStatusMessage($"Error reading {Path.GetFileName(d.file.Name)}", Color.White, Color.DarkRed); } else { Log.Write(ELogLevel.Error, error, "Exception ended BuildLineIndex() call"); BuildLineIndexTerminatedWithError(error); } } // Otherwise, merge the results into the main cache else { // Merge the line index results int row_delta = MergeLineIndex(range, line_index, d.file_buffer_size, d.filepos, d.fileend, d.reload); // Ensure the grid is updated UpdateUI(row_delta); if (on_success != null) { on_success(); } // On completion, check if the file has changed again and rerun if it has Watch.CheckForChangedFiles(); // Trigger a collect to free up memory, this also has the // side effect of triggering a signing test of the exe because // that test is done in a destructor GC.Collect(); } }
/// <summary> /// Generates the line index centred around 'filepos'. /// If 'filepos' is within the byte range of 'm_line_index' then an incremental search for /// lines is done in the direction needed to re-centre the line list around 'filepos'. /// If 'reload' is true a full rescan of the file is done</summary> private void BuildLineIndex(long filepos, bool reload, Action on_success = null) { try { // No file open, nothing to do if (Src == null) { return; } // Incremental updates cannot supplant reloads if (ReloadInProgress && reload == false) { return; } // Cause any existing builds to stop by changing the issue number Interlocked.Increment(ref m_build_issue); Log.Write(ELogLevel.Info, $"build start request (id {m_build_issue}, reload: {reload})\n{string.Empty}"); //new StackTrace(0,true))); ReloadInProgress = reload; // Make copies of variables for thread safety var bli_data = new BLIData(this, Src, filepos_: filepos, reload_: reload, build_issue_: m_build_issue) { // Set up callbacks that marshal to the main thread progress = (scanned, length) => { this.BeginInvoke(() => UpdateStatusProgress(scanned, length)); return(true); }, }; // Find the new line indices in a background thread ThreadPool.QueueUserWorkItem( x => BuildLineIndexAsync(bli_data, (d, range, line_index, error) => this.BeginInvoke( // Marshal the results back to the main thread () => BuildLineIndexComplete(d, range, line_index, error, on_success)))); } catch (Exception ex) { BuildLineIndexTerminatedWithError(ex); ReloadInProgress = false; } }
/// <summary> /// Export the file 'filepath' using current filters to the stream 'outp'. /// Note: this method throws if an exception occurs in the background thread.</summary> /// <param name="d">A copy of the data needed to do the export</param> /// <param name="ranges">Byte ranges within 'filepath' to be exported</param> /// <param name="row_delimiter">The delimiter that defines rows (robitised)</param> /// <param name="col_delimiter">The delimiter that defines columns (robitised)</param> /// <param name="outp">The stream to write the exported file to</param> private bool DoExportWithProgress(BLIData d, IEnumerable <RangeI> ranges, string row_delimiter, string col_delimiter, StreamWriter outp) { DialogResult res = DialogResult.Cancel; try { // Although this search runs in a background thread, it's wrapped in a modal // dialog box, so it should be ok to use class members directly var export = new ProgressForm("Exporting...", null, null, ProgressBarStyle.Continuous, (s, a, cb) => { // Report progress and test for cancel int last_progress = -1; d.progress = (scanned, length) => { int progress = (int)(100 * Math_.Frac(0, scanned, length != 0?length:1)); if (progress != last_progress) { cb(new ProgressForm.UserState { FractionComplete = progress * 0.01f }); last_progress = progress; } return(!s.CancelPending); }; // Do the export DoExport(d, ranges, row_delimiter, col_delimiter, outp); }) { StartPosition = FormStartPosition.CenterParent }; using (export) res = export.ShowDialog(this); } catch (OperationCanceledException) { } catch (Exception ex) { Misc.ShowMessage(this, "Exporting terminated due to an error.", "Export error", MessageBoxIcon.Error, ex); } return(res == DialogResult.OK); }
/// <summary>Export 'filepath' to 'outp'.</summary> /// <param name="d">A copy of the data needed to do the export</param> /// <param name="ranges">Byte ranges within the input file to export</param> /// <param name="row_delimiter">The row delimiter to use in the output file (robitised)</param> /// <param name="col_delimiter">The column delimiter to use in the output file (robitised)</param> /// <param name="outp">The output stream to write the exported result to</param> private static void DoExport(BLIData d, IEnumerable <RangeI> ranges, string row_delimiter, string col_delimiter, StreamWriter outp) { var line = new Line(); // Call back for adding lines to the export result AddLineFunc add_line = (line_rng, baddr, fend, bf, enc) => { if (line_rng.Empty && d.ignore_blanks) { return(true); } // Parse the line from the buffer line.Read(baddr + line_rng.Beg, bf, (int)line_rng.Beg, (int)line_rng.Size, d.encoding, d.col_delim, null, d.transforms); // Keep searching while the text is filtered out or doesn't match the pattern if (!PassesFilters(line.RowText, d.filters)) { return(true); } // Write to the output file outp.Write(string.Join(col_delimiter, line.Column)); outp.Write(row_delimiter); return(true); }; byte[] buf = new byte[d.max_line_length]; foreach (var rng in ranges) { // Find the start of a line (grow the range if necessary) var r = new RangeI(Math_.Clamp(rng.Beg, 0, d.file.Stream.Length), Math_.Clamp(rng.End, 0, d.file.Stream.Length)); r.Beg = FindLineStart(d.file, r.Beg, r.End, d.row_delim, d.encoding, buf); // Read lines and write them to the export file FindLines(d.file, r.Beg, r.End, false, r.Size, add_line, d.encoding, d.row_delim, buf, d.progress); } }
/// <summary>Performs an export from the command line</summary> public static void ExportToFile(StartupOptions startup_options) { string tmp_settings_path = Path.Combine(Path.GetTempPath(), "rylog_settings_" + Guid.NewGuid() + ".xml"); try { // Copy the settings to a tmp file so that we don't trash the normal settings if (Path_.FileExists(startup_options.SettingsPath)) { File.Copy(startup_options.SettingsPath, tmp_settings_path); } else { new Settings().Save(tmp_settings_path); } startup_options.SettingsPath = tmp_settings_path; // Load an instance of the app and the options. var m = new Main(startup_options); // Override settings passed on the command line if (startup_options.RowDelim != null) { m.Settings.RowDelimiter = startup_options.RowDelim; } if (startup_options.ColDelim != null) { m.Settings.ColDelimiter = startup_options.ColDelim; } if (startup_options.PatternSetFilepath != null) { // Specifying a pattern set implies the filters and transforms should be enabled m.Settings.Patterns = PatternSet.Load(startup_options.PatternSetFilepath); m.Settings.FiltersEnabled = true; m.Settings.TransformsEnabled = true; } // Do the export using (var outp = new StreamWriter(new FileStream(startup_options.ExportPath, FileMode.Create, FileAccess.Write, FileShare.Read))) { try { var d = new BLIData(m, new SingleFile(startup_options.FileToLoad)); using (d.file) { var rng = new[] { new RangeI(0, long.MaxValue) }; var row_delimiter = Misc.Robitise(m.Settings.RowDelimiter); var col_delimiter = Misc.Robitise(m.Settings.ColDelimiter); if (startup_options.NoGUI) { using (var done = new ManualResetEvent(false)) { ThreadPool.QueueUserWorkItem(x => { d.progress = (c, l) => true; DoExport(d, rng, row_delimiter, col_delimiter, outp); done.Set(); }); done.WaitOne(); if (!startup_options.Silent) { Console.WriteLine("Export completed successfully."); } } } else { if (m.DoExportWithProgress(d, rng, row_delimiter, col_delimiter, outp)) { if (!startup_options.Silent) { Console.WriteLine("Export completed successfully."); } } } } } catch (Exception ex) { Environment.ExitCode = 1; if (!startup_options.Silent) { Console.WriteLine($"Export failed.\r\n{ex.Message}"); } } } } finally { if (Path_.FileExists(tmp_settings_path)) { File.Delete(tmp_settings_path); } } }
/// <summary>Show the export dialog</summary> private void ShowExportDialog() { if (Src == null) { return; } // Determine the export file path var filepath = Settings.ExportFilepath; if (!filepath.HasValue()) { filepath = Path.ChangeExtension(Src.PsuedoFilepath, ".exported" + Path.GetExtension(Src.PsuedoFilepath)); } // Prompt for export settings var dlg = new ExportUI(filepath, Misc.Humanise(m_encoding.GetString(m_row_delim)), Misc.Humanise(m_encoding.GetString(m_col_delim)), FileByteRange); using (dlg) { if (dlg.ShowDialog(this) != DialogResult.OK) { return; } // Save the export filepath to the settings Settings.ExportFilepath = dlg.OutputFilepath; // Find the range to export IEnumerable <RangeI> rng; switch (dlg.RangeToExport) { default: throw new ArgumentOutOfRangeException(); case ExportUI.ERangeToExport.WholeFile: rng = new[] { FileByteRange }; break; case ExportUI.ERangeToExport.Selection: rng = SelectedRowRanges; break; case ExportUI.ERangeToExport.ByteRange: rng = new[] { dlg.ByteRange }; break; } // Delimiters var row_delimiter = Misc.Robitise(dlg.RowDelim); var col_delimiter = Misc.Robitise(dlg.ColDelim); // Do the export using (var outp = new StreamWriter(new FileStream(dlg.OutputFilepath, FileMode.Create, FileAccess.Write, FileShare.Read))) { try { var d = new BLIData(this, Src); if (DoExportWithProgress(d, rng, row_delimiter, col_delimiter, outp)) { MsgBox.Show(this, "Export completed successfully.", Application.ProductName, MessageBoxButtons.OK); } } catch (Exception ex) { Log.Write(ELogLevel.Error, ex, "Export failed"); MsgBox.Show(this, string.Format("Export failed.\r\n{0}", ex.Message), Application.ProductName, MessageBoxButtons.OK); } } } }
/// <summary>The grunt work of building the new line index.</summary> private static void BuildLineIndexAsync(BLIData d, Action <BLIData, RangeI, List <RangeI>, Exception> on_complete) { // This method runs in a background thread // All we're doing here is loading data around 'd.filepos' so that there are an equal number // of lines on either side. This can be optimised however because the existing range of // cached data probably overlaps the range we want loaded. try { Log.Write(ELogLevel.Info, "BLIAsync", $"build started. (id {d.build_issue}, reload {d.reload})"); if (BuildCancelled(d.build_issue)) { return; } using (d.file) { // A temporary buffer for reading sections of the file var buf = new byte[d.max_line_length]; // Seek to the first line that starts immediately before 'filepos' d.filepos = FindLineStart(d.file, d.filepos, d.fileend, d.row_delim, d.encoding, buf); if (BuildCancelled(d.build_issue)) { return; } // Determine the range to scan and the number of lines in each direction var scan_backward = (d.fileend - d.filepos) > (d.filepos - 0); // scan in the most bound direction first var scan_range = CalcBufferRange(d.filepos, d.fileend, d.file_buffer_size); var line_range = CalcLineRange(d.line_cache_count); var bwd_lines = line_range.Begi; var fwd_lines = line_range.Endi; // Incremental loading - only load what isn't already cached. // If the 'filepos' is left of the cache centre, try to extent in left direction first. // If the scan range in that direction is empty, try extending at the other end. The // aim is to try to get d.line_index_count as close to d.line_cache_count as possible // without loading data that is already cached. #region Incremental loading if (!d.reload && !d.cached_whole_line_range.Empty) { // Determine the direction the cached range is moving based on where 'filepos' is relative // to the current cache centre and which range contains an valid area to be scanned. // With incremental scans we can only update one side of the cache because the returned line index has to // be a contiguous block of lines. This means one of 'bwd_lines' or 'fwd_lines' must be zero. var Lrange = new RangeI(scan_range.Beg, d.cached_whole_line_range.Beg); var Rrange = new RangeI(d.cached_whole_line_range.End, scan_range.End); var dir = (!Lrange.Empty && !Rrange.Empty) ? Math.Sign(2 * d.filepos_line_index - d.line_cache_count) : (!Lrange.Empty) ? -1 : (!Rrange.Empty) ? +1 : 0; // Determine the number of lines to scan, based on direction if (dir < 0) { scan_backward = true; scan_range = Lrange; bwd_lines -= Math_.Clamp(d.filepos_line_index - 0, 0, bwd_lines); fwd_lines = 0; } else if (dir > 0) { scan_backward = false; scan_range = Rrange; bwd_lines = 0; fwd_lines -= Math_.Clamp(d.line_index_count - d.filepos_line_index - 1, 0, fwd_lines); } else if (dir == 0) { bwd_lines = 0; fwd_lines = 0; scan_range = RangeI.Zero; } } #endregion Debug.Assert(bwd_lines + fwd_lines <= d.line_cache_count); // Build the collection of line byte ranges to add to the cache var line_index = new List <RangeI>(); if (bwd_lines != 0 || fwd_lines != 0) { // Line index buffers for collecting the results var fwd_line_buf = new List <RangeI>(); var bwd_line_buf = new List <RangeI>(); // Data used in the 'add_line' callback. Updated for forward and backward passes var lbd = new LineBufferData { line_buf = null, // pointer to either 'fwd_line_buf' or 'bwd_line_buf' line_limit = 0, // Caps the number of lines read for each of the forward and backward searches }; // Callback for adding line byte ranges to a line buffer AddLineFunc add_line = (line, baddr, fend, bf, enc) => { if (line.Empty && d.ignore_blanks) { return(true); } // Test 'text' against each filter to see if it's included // Note: not caching this string because we want to read immediate data // from the file to pick up file changes. string text = d.encoding.GetString(buf, (int)line.Beg, (int)line.Size); if (!PassesFilters(text, d.filters)) { return(true); } // Convert the byte range to a file range line = line.Shift(baddr); Debug.Assert(new RangeI(0, d.fileend).Contains(line)); lbd.line_buf.Add(line); Debug.Assert(lbd.line_buf.Count <= lbd.line_limit); return((fwd_line_buf.Count + bwd_line_buf.Count) < lbd.line_limit); }; // Callback for updating progress ProgressFunc progress = (scanned, length) => { int numer = fwd_line_buf.Count + bwd_line_buf.Count, denom = lbd.line_limit; return(d.progress(numer, denom) && !BuildCancelled(d.build_issue)); }; // Scan twice, starting in the direction of the smallest range so that any // unused cache space is used by the search in the other direction var scan_from = Math_.Clamp(d.filepos, scan_range.Beg, scan_range.End); for (int a = 0; a != 2; ++a, scan_backward = !scan_backward) { if (BuildCancelled(d.build_issue)) { return; } lbd.line_buf = scan_backward ? bwd_line_buf : fwd_line_buf; lbd.line_limit += scan_backward ? bwd_lines : fwd_lines; if ((bwd_line_buf.Count + fwd_line_buf.Count) < lbd.line_limit) { var length = scan_backward ? scan_from - scan_range.Beg : scan_range.End - scan_from; FindLines(d.file, scan_from, d.fileend, scan_backward, length, add_line, d.encoding, d.row_delim, buf, progress); } } // Scanning backward adds lines to the line index in reverse order. bwd_line_buf.Reverse(); // 'line_index' should be a contiguous block of byte offset ranges for // the lines around 'd.filepos'. If 'd.reload' is false, then the line // index will only contain byte offset ranges that are not currently cached. line_index.Capacity = bwd_line_buf.Count + fwd_line_buf.Count; line_index.AddRange(bwd_line_buf); line_index.AddRange(fwd_line_buf); } // Job done on_complete(d, scan_range, line_index, null); } } catch (Exception ex) { on_complete(d, RangeI.Zero, null, ex); } }