public void ReadSpecificFileLines() { var file = Path.GetTempFileName(); var info = new FileInfo(file); File.AppendAllLines(file, Enumerable.Range(1, 100).Select(i => i.ToString())); var lines = info.ReadLines(new[] { 1, 2, 3, 10, 100, 105 }); lines.Select(l=>l.Number).ShouldAllBeEquivalentTo(new[] { 1, 2, 3, 10, 100 }); File.Delete(file); }
public void ReadLines() { // Type var @this = new FileInfo(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Examples_System_IO_FileInfo_ReadLines.txt")); // Intialization using (FileStream stream = @this.Create()) { byte[] byteToWrites = Encoding.Default.GetBytes("Fizz" + Environment.NewLine + "Buzz"); stream.Write(byteToWrites, 0, byteToWrites.Length); } // Examples List<string> result = @this.ReadLines().ToList(); // return new [] {"Fizz", "Buzz"}; // Unit Test Assert.AreEqual("Fizz", result[0]); Assert.AreEqual("Buzz", result[1]); }
public FileTailer(FileInfo file, IObservable<string> textToMatch, IObservable<ScrollRequest> scrollRequest, IScheduler scheduler=null) { if (file == null) throw new ArgumentNullException(nameof(file)); if (textToMatch == null) throw new ArgumentNullException(nameof(textToMatch)); //create list of lines which contain the observable text var matchedLines = textToMatch .Select(searchText => { Func<string, bool> predicate = null; if (!string.IsNullOrEmpty(searchText)) predicate = s => s.Contains(searchText, StringComparison.OrdinalIgnoreCase); return file.WatchFile(scheduler:scheduler ?? Scheduler.Default) .TakeWhile(notification => notification.Exists) .Repeat() .ScanFile(predicate); }).Switch() .Replay(1).RefCount(); MatchedLines = matchedLines.Select(x => x.MatchingLines.Length); TotalLines = matchedLines.Select(x => x.TotalLines); //todo: plug in file missing or error into the screen var lines = new SourceList<Line>(); Lines = lines.AsObservableList(); //this is the beast! Dynamically combine lines requested by the consumer //with the lines which exist in the file. This enables proper virtualisation of the file var scroller = matchedLines .CombineLatest(scrollRequest, (scanResult, request) => new {scanResult , request }) .Subscribe(x => { var mode = x.request.Mode; var pageSize = x.request.PageSize; var endOfTail = x.scanResult.EndOfTail; var isInitial = x.scanResult.Index==0; var allLines = x.scanResult.MatchingLines; var previousPage = lines.Items.Select(l => new LineIndex(l.Number, l.Index)).ToArray(); //Otherwise take the page size and start index from the request var currentPage = (mode == ScrollingMode.Tail ? allLines.Skip(allLines.Length-pageSize).Take(pageSize).ToArray() : allLines.Skip(x.request.FirstIndex).Take(pageSize)).ToArray(); var added = currentPage.Except(previousPage).ToArray(); var removed = previousPage.Except(currentPage).Select(li=>li.Line).ToArray(); if (added.Length + removed.Length == 0) return; try { var addedLines = file.ReadLines(added, (lineIndex, text) => { var isEndOfTail = !isInitial && lineIndex.Line > endOfTail; return new Line(lineIndex.Line, lineIndex.Index, text, isEndOfTail ? DateTime.Now : (DateTime?)null); }).ToArray(); //get old lines from the current collection var removedLines = lines.Items.Where(l => removed.Contains(l.Number)).ToArray(); //finally relect changes in the list lines.Edit(innerList => { innerList.RemoveMany(removedLines); innerList.AddRange(addedLines); }); } catch (Exception) { //Very small chance of an error here but if one is causght the next successful read will recify this //TODO: 1. Feedback to user that steaming has stopped //TODO: 2. Place the ReadLines(..) method with the select of an observable } }); _cleanUp = new CompositeDisposable(Lines, scroller, lines); }
public FileTailer(FileInfo file, IObservable<string> textToMatch, IObservable<ScrollRequest> scrollRequest, IScheduler scheduler=null) { if (file == null) throw new ArgumentNullException(nameof(file)); if (textToMatch == null) throw new ArgumentNullException(nameof(textToMatch)); var lines = new SourceList<Line>(); Lines = lines.AsObservableList(); var locker = new object(); scrollRequest = scrollRequest.Synchronize(locker); var metronome = Observable .Interval(TimeSpan.FromMilliseconds(250), scheduler ?? Scheduler.Default) .ToUnit() .Replay().RefCount(); //temp mess for a few days var indexer = file.WatchFile(metronome) .TakeWhile(notification => notification.Exists) .Repeat() .Index() .Synchronize(locker) .Replay(1).RefCount(); var matcher = textToMatch.Select(searchText => { if (string.IsNullOrEmpty(searchText) || searchText.Length < 3) return Observable.Return(LineMatches.None); return file.WatchFile(metronome) .TakeWhile(notification => notification.Exists) .Repeat() .Match(s => s.Contains(searchText, StringComparison.OrdinalIgnoreCase)); }).Switch() .Synchronize(locker) .Replay(1).RefCount(); //count matching lines (all if no filter is specified) MatchedLines = indexer.CombineLatest(matcher, (indicies, matches) => matches == LineMatches.None ? indicies.Count : matches.Count); //count total line TotalLines = indexer.Select(x => x.Count); FileSize = file.WatchFile(metronome).Select(notification => notification.Size); var aggregator = indexer.CombineLatest(matcher, scrollRequest,(idx, mtch, scroll) => new CombinedResult(scroll, mtch, idx)) .Select(result => { var scroll = result.Scroll; var indicies = result.Incidies; var matched = result.MatchedLines; IEnumerable<LineIndex> indices; if (result.MatchedLines.ChangedReason == LineMatchChangedReason.None) { indices = scroll.Mode == ScrollingMode.Tail ? indicies.GetTail(scroll) : indicies.GetFromIndex(scroll); } else { indices = scroll.Mode == ScrollingMode.Tail ? indicies.GetTail(scroll, matched) : indicies.GetFromIndex(scroll, matched); } var currentPage = indices.ToArray(); var previous = lines.Items.Select(l => l.LineIndex).ToArray(); var removed = previous.Except(currentPage).ToArray(); var removedLines = lines.Items.Where(l=> removed.Contains(l.LineIndex)).ToArray(); var added = currentPage.Except(previous).ToArray(); //finally we can load the line from the file var newLines = file.ReadLines(added, (lineIndex, text) => { var isEndOfTail = indicies.ChangedReason != LinesChangedReason.Loaded && lineIndex.Line > indicies.TailStartsAt; return new Line(lineIndex, text, isEndOfTail ? DateTime.Now : (DateTime?) null); }, indicies.Encoding).ToArray(); return new { NewLines = newLines, OldLines = removedLines }; }) .RetryWithBackOff((Exception error, int attempts) => { //todo: plug in file missing or error into the screen return TimeSpan.FromSeconds(1); }) .Where(fn=> fn.NewLines.Length + fn.OldLines.Length > 0) .Subscribe(changes => { //update observable list lines.Edit(innerList => { if (changes.OldLines.Any()) innerList.RemoveMany(changes.OldLines); if (changes.NewLines.Any()) innerList.AddRange(changes.NewLines); }); }); _cleanUp = new CompositeDisposable(Lines, lines, aggregator); }
/// <summary> /// Fetch the files, and if requested, all the include files as well. /// </summary> /// <param name="ds"></param> /// <param name="extractionPath"></param> /// <returns></returns> private IEnumerable<FileInfo> GetSvnFiles(SvnTarget ds, PathInfo extractionPath) { // Build the location of this file to write out. var outfile = new FileInfo(Path.Combine(extractionPath.Path, ds.FileName)); WriteVerbose($"Downloading svn file {ds.TargetName}"); MCJobSVNHelpers.ExtractFile(ds, outfile); yield return outfile; // Next, we need to dip into all the levels down to see if we can't // figure out if there are includes. var includeFiles = outfile .ReadLines() .SelectMany(l => ExtractIncludedFiles(l, extractionPath)); foreach (var l in includeFiles) { yield return l; } }
public FileTailer(FileInfo file, IObservable<string> textToMatch, IObservable<ScrollRequest> scrollRequest, IScheduler scheduler=null) { if (file == null) throw new ArgumentNullException(nameof(file)); if (textToMatch == null) throw new ArgumentNullException(nameof(textToMatch)); var lines = new SourceList<Line>(); Lines = lines.AsObservableList(); var matcher = textToMatch.Select(searchText => { if (string.IsNullOrEmpty(searchText) || searchText.Length < 3) return Observable.Return(LineMatches.None); return file.WatchFile(scheduler: scheduler) .TakeWhile(notification => notification.Exists) .Repeat() .Match(s => s.Contains(searchText, StringComparison.OrdinalIgnoreCase)); }).Switch() .Replay(1).RefCount(); //temp mess for a few days var indexer = file.WatchFile(scheduler: scheduler) .TakeWhile(notification => notification.Exists) .Repeat() .Index() .Replay(1).RefCount(); //count matching lines (all if no filter is specified) MatchedLines = indexer.CombineLatest(matcher, (indicies, matches) => matches == LineMatches.None ? indicies.Count : matches.Count); //count total line TotalLines = indexer.Select(x => x.Count); //todo: plug in file missing or error into the screen var locker = new object(); var theBeast = indexer.Synchronize(locker) .CombineLatest(matcher.Synchronize(locker), scrollRequest.Synchronize(locker),(idx, mtch, scroll) => new CombinedResult(scroll, mtch, idx)) .Select(result => { var scroll = result.Scroll; var allLines = result.Incidies; var matched = result.MatchedLines; IEnumerable<LineIndex> indices; if (result.MatchedLines.ChangedReason == LineMatchChangedReason.None) { indices = scroll.Mode == ScrollingMode.Tail ? allLines.GetTail(scroll) : allLines.GetFromIndex(scroll); } else { indices = scroll.Mode == ScrollingMode.Tail ? allLines.GetTail(scroll, matched) : allLines.GetFromIndex(scroll, matched); } return file.ReadLines(indices, (lineIndex, text) => { var isEndOfTail = allLines.ChangedReason != LinesChangedReason.Loaded && lineIndex.Line > allLines.TailStartsAt; return new Line(lineIndex.Line, lineIndex.Index, text,isEndOfTail ? DateTime.Now : (DateTime?) null); }).ToArray(); }) //.RetryWithBackOff((error, attempts) => //{ // //TODO: Log // return TimeSpan.FromSeconds(1); //}) .Subscribe(newPage => { //update observable list lines.Edit(innerList => { var removed = innerList.Except(newPage).ToArray(); var added = newPage.Except(innerList).ToArray(); if (removed.Any()) innerList.RemoveMany(removed); if (added.Any()) innerList.AddRange(added); }); }); ////this is the beast! Dynamically combine lines requested by the consumer ////with the lines which exist in the file. This enables proper virtualisation of the file //var scroller = matchedLines // .CombineLatest(scrollRequest, (scanResult, request) => new {scanResult , request }) // .Subscribe(x => // { // var mode = x.request.Mode; // var pageSize = x.request.PageSize; // var endOfTail = x.scanResult.EndOfTail; // var isInitial = x.scanResult.Index==0; // var allLines = x.scanResult.MatchingLines; // var previousPage = lines.Items.Select(l => new LineIndex(l.Number, l.Index, 0, 0)).ToArray(); // //Otherwise take the page size and start index from the request // var currentPage = (mode == ScrollingMode.Tail // ? allLines.Skip(allLines.Length-pageSize).Take(pageSize).ToArray() // : allLines.Skip(x.request.FirstIndex).Take(pageSize)).ToArray(); // var added = currentPage.Except(previousPage).ToArray(); // var removed = previousPage.Except(currentPage).Select(li=>li.Line).ToArray(); // if (added.Length + removed.Length == 0) return; // try // { // var addedLines = file.ReadLines(added, (lineIndex, text) => // { // var isEndOfTail = !isInitial && lineIndex.Line > endOfTail; // return new Line(lineIndex.Line, lineIndex.Index, text, isEndOfTail ? DateTime.Now : (DateTime?)null); // }).ToArray(); // //get old lines from the current collection // var removedLines = lines.Items.Where(l => removed.Contains(l.Number)).ToArray(); // //finally relect changes in the list // lines.Edit(innerList => // { // innerList.RemoveMany(removedLines); // innerList.AddRange(addedLines); // }); // } // catch (Exception) // { // //Very small chance of an error here but if one is causght the next successful read will recify this // //TODO: 1. Feedback to user that steaming has stopped // //TODO: 2. Place the ReadLines(..) method with the select of an observable // } // }); _cleanUp = new CompositeDisposable(Lines, lines); }