private void OnLogTextChanged(RawContentsChangedEventArgs args) { Debug.WriteLine("RawContentChanged event fired"); LogLines.Clear(); LogLines.AddRange(args.NewText .Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries) .Select(line => new LogLine(line.Trim(), LogHighlight.None))); RawContentChanged?.Invoke(this, args); }
public void CopyProperties(FileSystemScanService fromScanService) { BaseFolder = fromScanService.BaseFolder; ComputerName = fromScanService.ComputerName; OSVersionName = fromScanService.OSVersionName; ScanDate = fromScanService.ScanDate; ScanOptions.BaseFolderPath = fromScanService.ScanOptions.BaseFolderPath; ScanOptions.IncludeSubFolders = fromScanService.ScanOptions.IncludeSubFolders; ScanOptions.SearchPattern = fromScanService.ScanOptions.SearchPattern; ScanOptions.ShowMinimumFolderLevelInLog = fromScanService.ScanOptions.ShowMinimumFolderLevelInLog; UserName = fromScanService.UserName; DetailType = fromScanService.DetailType; Clear(); ScanResult.AllFolders.AddRange(fromScanService.ScanResult.AllFolders); ScanResult.AllFiles.AddRange(fromScanService.ScanResult.AllFiles); ScanResult.ScanExceptions.AddRange(fromScanService.ScanResult.ScanExceptions); LogLines.AddRange(fromScanService.LogLines); }
private void UpdateTail() { // lock this or else you'll run into internal array sizing issues with the bounded ItemSource lock (_lockObject) { // handle case where file may have been deleted while tailing if (!File.Exists(LogInfo.Filename)) { // we want to keep tailing for now in case it was just a case of the file be republished, for example // so just skip Trace.WriteLine("File disappeared. Ignoring until next pass."); return; } using (var fs = new FileStream(LogInfo.Filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { // handle case where the file may have been reset (e.g. republished) if (_lastIndex > fs.Length) { Trace.WriteLine("File shrank. Assuming it's new and reseting lastIndex to zero!"); _lastIndex = 0; } // fs.Length == 0 is just handling a case where filestream returns 0 for no apparent reason if (fs.Length == 0 || !fs.CanRead || fs.Length == _lastIndex) { return; // no change } // avoid reading the entire file on startup var startAt = _lastIndex; if (startAt == 0 && fs.Length > _displayBuffer) { startAt = fs.Length - _displayBuffer; Debug.WriteLine( $"{LogInfo.Alias} file was larger than buffer ({_displayBuffer}). Starting at i={startAt} instead of beginning."); } // create a container for the new data var newContentSize = fs.Length - startAt; var newContent = new byte[newContentSize]; Debug.WriteLine($"This chunk will be {newContent.Length} bytes."); // fast forward to our starting point fs.Seek(startAt, SeekOrigin.Begin); // read the new data fs.Read(newContent, 0, newContent.Length); // detect new lines before attempting to update // if there aren't any, treat it as if the file is untouched var newContentString = Encoding.UTF8.GetString(newContent); if (LogLines.Count > 1 && newContentString.IndexOf(Environment.NewLine, StringComparison.OrdinalIgnoreCase) == -1) { return; } // get the new lines var newLines = newContentString .Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries) .Select(line => new LogLine(line.Trim(), LogHighlight.None)) .ToList(); // update the log collection LogLines.AddRange(newLines); OnNewContentedAdded(new NewContentEventArgs(this, newLines)); _lastLineIsDirty = true; // trim the log if necessary LineCount = TrimLog(); _lastIndex = startAt; _lastIndex = fs.Position; } } }