public void NotifiesOfSegmentWhenFileIsCreated() { //need to make this test var file = Path.GetTempFileName(); var info = new FileInfo(file); var refresher = new Subject<Unit>(); var segmenter = new FileSegmenter(info.WatchFile(refresher), 1000); FileSegmentCollection result = null; using (var indexer = segmenter.Segments.Subscribe(segment => result = segment)) { result.Should().NotBeNull(); File.AppendAllLines(file, Enumerable.Range(1, 10000).Select(i => $"This is line number {i.ToString("00000000")}").ToArray()); refresher.Once(); result.Should().NotBeNull(); result.Count.Should().BeGreaterOrEqualTo(2); result.Segments.Select(fs => fs.Type).Should().Contain(FileSegmentType.Head); result.Segments.Select(fs => fs.Type).Should().Contain(FileSegmentType.Tail); result.FileLength.Should().Be(info.Length); File.AppendAllLines(file, Enumerable.Range(101, 10).Select(i => $"{i}")); refresher.Once(); info.Refresh(); result.FileLength.Should().Be(info.Length); File.Delete(file); } File.Delete(file); }
public void Notify() { var file = Path.GetTempFileName(); File.Delete(file); var info = new FileInfo(file); var scheduler = new TestScheduler(); FileNotification result = null; using (info.WatchFile(TimeSpan.FromSeconds(1), scheduler).Subscribe(x => result = x)) { scheduler.AdvanceBySeconds(1); result.NotificationType.Should().Be(FileNotificationType.Missing); File.AppendAllLines(file, Enumerable.Range(1, 10).Select(i => i.ToString())); scheduler.AdvanceBySeconds(1); result.NotificationType.Should().Be(FileNotificationType.Created); result.NotificationType.Should().NotBe(0); File.AppendAllLines(file, Enumerable.Range(11, 10).Select(i => i.ToString())); scheduler.AdvanceBySeconds(1); result.NotificationType.Should().Be(FileNotificationType.Changed); File.Delete(file); scheduler.AdvanceBySeconds(1); result.NotificationType.Should().Be(FileNotificationType.Missing); } }
public FileRewriter([NotNull] IObservable<FileNotification> fileWatcher, long startFrom = -1, TimeSpan? refreshPeriod=null, IScheduler scheduler=null) { if (fileWatcher == null) throw new ArgumentNullException(nameof(fileWatcher)); var locker = new object(); //TODO: WHEN ORIGINAL FILE HAS ROLLED OVER, WE NEED TO CLEAR OUT THE LOCAL TEMP FILE Notifications = Observable.Create<FileNotification>(observer => { var newFile = Path.GetTempFileName(); Console.WriteLine("created {0}", newFile); var info = new FileInfo(newFile); var origStream = fileWatcher.Synchronize(locker).Publish(); var newStream = info.WatchFile(refreshPeriod, scheduler).Synchronize(locker); //Watch the new file var resultStream = newStream.CombineLatest(origStream, (NewStream, OldStream) => { return !OldStream.Exists ? OldStream : NewStream; }).SubscribeSafe(observer); // var resultStream = newStream.SubscribeSafe(observer); //Create a new file from the old one, starting at the spcified index var fileWriter = origStream .TakeWhile(notification => notification.Exists).Repeat() .Scan(new FileReadResult(Enumerable.Empty<string>(), startFrom), (state, notification) => { //read lines from the source file. return ReadLines(notification.FullName, state.EndPosition); }) .Subscribe(result => { //Write lines to the file WriteLines(newFile, result.Lines.AsArray()); }); var connected = origStream.Connect(); return Disposable.Create(() => { Console.WriteLine("deleting {0}", newFile); connected.Dispose(); fileWriter.Dispose(); resultStream.Dispose(); File.Delete(newFile); }); }); }
public void CanStreamFileWithPredicate() { var file = Path.GetTempFileName(); var info = new FileInfo(file); int[] result = new int[0]; var scheduler = new TestScheduler(); File.AppendAllLines(file, Enumerable.Range(1, 100).Select(i => $"{i}").ToArray()); //filter by odd numbers using (info.WatchFile(scheduler: scheduler).ScanFile(l=>int.Parse(l) % 2 == 1).Subscribe(x => result = x.MatchingLines.Select(l=>l.Line).ToArray())) { result.ShouldAllBeEquivalentTo(Enumerable.Range(1, 100).Where(i=>i % 2 == 1)); File.AppendAllLines(file, Enumerable.Range(101, 10).Select(i => $"{i}")); scheduler.AdvanceByMilliSeconds(250); File.Delete(file); result.ShouldAllBeEquivalentTo(Enumerable.Range(1, 110).Where(i => i % 2 == 1)); } }
public void CanStreamFile() { var file = Path.GetTempFileName(); var info = new FileInfo(file); int[] result=new int[0]; var scheduler = new TestScheduler(); File.AppendAllLines(file, Enumerable.Range(1, 100).Select(i => $"{i}").ToArray()); using (info.WatchFile(TimeSpan.FromMilliseconds(1), scheduler).Index().Subscribe(x => result = x.Lines.Select((_,idx)=>idx+1).ToArray())) { scheduler.AdvanceByMilliSeconds(1); result.ShouldAllBeEquivalentTo(Enumerable.Range(1, 100)); File.AppendAllLines(file, Enumerable.Range(101, 10).Select(i => $"{i}")); scheduler.AdvanceByMilliSeconds(1); File.Delete(file); result.ShouldAllBeEquivalentTo(Enumerable.Range(1, 110)); } }
public void IndexLines() { var file = Path.GetTempFileName(); var info = new FileInfo(file); int[] result=new int[0]; var subject = new Subject<Unit>(); File.AppendAllLines(file, Enumerable.Range(1, 100).Select(i => $"{i}").ToArray()); using (info.WatchFile(subject).Index().Subscribe(x => result = x.Lines.Select((_,idx)=>idx+1).ToArray())) { result.ShouldAllBeEquivalentTo(Enumerable.Range(1, 100)); File.AppendAllLines(file, Enumerable.Range(101, 10).Select(i => $"{i}")); subject.OnNext(Unit.Default); File.Delete(file); result.ShouldAllBeEquivalentTo(Enumerable.Range(1, 110)); } }
public void MatchLines() { var file = Path.GetTempFileName(); var info = new FileInfo(file); int[] result = new int[0]; File.AppendAllLines(file, Enumerable.Range(1, 100).Select(i => $"{i}").ToArray()); var subject = new Subject<Unit>(); //filter by odd numbers using (info.WatchFile(subject).Match(i => int.Parse(i) % 2 == 1).Subscribe(x => result = x.Lines)) { var expected = Enumerable.Range(1, 100).Where(i => i%2 == 1).Select(i=>i-1).ToArray(); result.ShouldAllBeEquivalentTo(expected); File.AppendAllLines(file, Enumerable.Range(101, 10).Select(i => $"{i}")); subject.OnNext(Unit.Default); File.Delete(file); expected = Enumerable.Range(1, 110).Where(i => i % 2 == 1).Select(i => i - 1).ToArray(); result.ShouldAllBeEquivalentTo(expected); } }
public FileTailer(FileInfo file, IObservable<FileSearchResult> filter, IObservable<ScrollRequest> scrollRequest, ILogger logger, IScheduler scheduler = null) { if (file == null) throw new ArgumentNullException(nameof(file)); if (filter == null) throw new ArgumentNullException(nameof(filter)); if (logger == null) throw new ArgumentNullException(nameof(logger)); logger.Info($"Constructing file tailer for {file.FullName}"); var lines = new SourceList<Line>(); Lines = lines.AsObservableList(); var isBusy = new Subject<bool>(); IsSearching = isBusy.AsObservable(); var locker = new object(); scrollRequest = scrollRequest.Synchronize(locker); var fileWatcher = file.WatchFile(scheduler: scheduler) .DistinctUntilChanged() .TakeWhile(notification => notification.Exists).Repeat() .Replay(1).RefCount(); var indexer = fileWatcher.Index().Replay(1).RefCount(); //compare latest lines and latest filter and only take the filtered results it is not empty var latestLines = indexer.Cast<ILineProvider>().Synchronize(locker); var latestFilter = filter.Cast<ILineProvider>().Synchronize(locker); var latest = latestLines.CombineLatest(latestFilter, (l, f) => f.IsEmpty ? l : f); MatchedLines = latest.Select(provider => provider.Count); TotalLines = latestLines.Select(x => x.Count); FileSize = fileWatcher.Select(notification => notification.Size); IsLoading = indexer.Take(1).Select(_ => false).StartWith(true); var aggregator = latest.CombineLatest(scrollRequest, (currentLines, scroll) => { return currentLines.ReadLines(scroll).ToArray(); }) .Subscribe(currentPage => { var previous = lines.Items.ToArray(); var added = currentPage.Except(previous).ToArray(); var removed = previous.Except(currentPage).ToArray(); lines.Edit(innerList => { if (removed.Any()) innerList.RemoveMany(removed); if (added.Any()) innerList.AddRange(added); }); }); //var aggregator = latest.CombineLatest(scrollRequest, (currentLines, scroll) => //{ // //TODO: Read entire page, the check which lines should be added and which shold be removed // //as part of that work, get the maximum inded [this is the head!] // // Debug.WriteLine($"{scroll.Mode}, {scroll.FirstIndex}, {scroll.PageSize}"); // var currentPage = currentLines.GetIndicies(scroll).ToArray(); // var previous = lines.Items.Select(l => l.LineInfo).ToArray(); // var removed = previous.Except(currentPage, LineInfo.LineIndexComparer).ToArray(); // var added = currentPage.Except(previous, LineInfo.LineIndexComparer).ToArray(); // //calculated added and removed lines // var removedLines = lines.Items.Where(l => removed.Contains(l.LineInfo)).ToArray(); // Func<long, DateTime?> isTail = l => // { // //account for time with tail (i.e. add time to ILineProvider.TailStartsAt ) // var tail = currentLines.TailStartsAt; // var onTail = tail != -1 && l >= tail; // return onTail ? DateTime.Now : (DateTime?)null; // }; // //Console.WriteLine(); // //finally we can load the line from the file todo: Add encdoing back in // var newLines = file.ReadLine(added, (lineIndex, text, position) => new Line(lineIndex, text, isTail(position)), Encoding.UTF8).ToArray(); // return new { NewLines = newLines, OldLines = removedLines }; //}) //.Where(fn => fn.NewLines.Length + fn.OldLines.Length > 0) //.Subscribe(changes => //{ // //update observable list // lines.Edit(innerList => //{ // if (changes.OldLines.Any()) innerList.RemoveMany(changes.OldLines); // if (changes.NewLines.Any()) innerList.AddRange(changes.NewLines); //}); //}); _cleanUp = new CompositeDisposable(Lines, lines, aggregator, Disposable.Create(() => isBusy.OnCompleted())); }
public FileTailer(FileInfo file, IObservable<string> textToMatch, IObservable<ScrollRequest> scrollRequest, IScheduler scheduler=null) { if (file == null) throw new ArgumentNullException(nameof(file)); if (textToMatch == null) throw new ArgumentNullException(nameof(textToMatch)); //create list of lines which contain the observable text var matchedLines = textToMatch .Select(searchText => { Func<string, bool> predicate = null; if (!string.IsNullOrEmpty(searchText)) predicate = s => s.Contains(searchText, StringComparison.OrdinalIgnoreCase); return file.WatchFile(scheduler:scheduler ?? Scheduler.Default) .TakeWhile(notification => notification.Exists) .Repeat() .ScanFile(predicate); }).Switch() .Replay(1).RefCount(); MatchedLines = matchedLines.Select(x => x.MatchingLines.Length); TotalLines = matchedLines.Select(x => x.TotalLines); //todo: plug in file missing or error into the screen var lines = new SourceList<Line>(); Lines = lines.AsObservableList(); //this is the beast! Dynamically combine lines requested by the consumer //with the lines which exist in the file. This enables proper virtualisation of the file var scroller = matchedLines .CombineLatest(scrollRequest, (scanResult, request) => new {scanResult , request }) .Subscribe(x => { var mode = x.request.Mode; var pageSize = x.request.PageSize; var endOfTail = x.scanResult.EndOfTail; var isInitial = x.scanResult.Index==0; var allLines = x.scanResult.MatchingLines; var previousPage = lines.Items.Select(l => new LineIndex(l.Number, l.Index)).ToArray(); //Otherwise take the page size and start index from the request var currentPage = (mode == ScrollingMode.Tail ? allLines.Skip(allLines.Length-pageSize).Take(pageSize).ToArray() : allLines.Skip(x.request.FirstIndex).Take(pageSize)).ToArray(); var added = currentPage.Except(previousPage).ToArray(); var removed = previousPage.Except(currentPage).Select(li=>li.Line).ToArray(); if (added.Length + removed.Length == 0) return; try { var addedLines = file.ReadLines(added, (lineIndex, text) => { var isEndOfTail = !isInitial && lineIndex.Line > endOfTail; return new Line(lineIndex.Line, lineIndex.Index, text, isEndOfTail ? DateTime.Now : (DateTime?)null); }).ToArray(); //get old lines from the current collection var removedLines = lines.Items.Where(l => removed.Contains(l.Number)).ToArray(); //finally relect changes in the list lines.Edit(innerList => { innerList.RemoveMany(removedLines); innerList.AddRange(addedLines); }); } catch (Exception) { //Very small chance of an error here but if one is causght the next successful read will recify this //TODO: 1. Feedback to user that steaming has stopped //TODO: 2. Place the ReadLines(..) method with the select of an observable } }); _cleanUp = new CompositeDisposable(Lines, scroller, lines); }
public FileTailer(FileInfo file, IObservable<string> textToMatch, IObservable<ScrollRequest> scrollRequest, IScheduler scheduler=null) { if (file == null) throw new ArgumentNullException(nameof(file)); if (textToMatch == null) throw new ArgumentNullException(nameof(textToMatch)); var lines = new SourceList<Line>(); Lines = lines.AsObservableList(); var locker = new object(); scrollRequest = scrollRequest.Synchronize(locker); var metronome = Observable .Interval(TimeSpan.FromMilliseconds(250), scheduler ?? Scheduler.Default) .ToUnit() .Replay().RefCount(); //temp mess for a few days var indexer = file.WatchFile(metronome) .TakeWhile(notification => notification.Exists) .Repeat() .Index() .Synchronize(locker) .Replay(1).RefCount(); var matcher = textToMatch.Select(searchText => { if (string.IsNullOrEmpty(searchText) || searchText.Length < 3) return Observable.Return(LineMatches.None); return file.WatchFile(metronome) .TakeWhile(notification => notification.Exists) .Repeat() .Match(s => s.Contains(searchText, StringComparison.OrdinalIgnoreCase)); }).Switch() .Synchronize(locker) .Replay(1).RefCount(); //count matching lines (all if no filter is specified) MatchedLines = indexer.CombineLatest(matcher, (indicies, matches) => matches == LineMatches.None ? indicies.Count : matches.Count); //count total line TotalLines = indexer.Select(x => x.Count); FileSize = file.WatchFile(metronome).Select(notification => notification.Size); var aggregator = indexer.CombineLatest(matcher, scrollRequest,(idx, mtch, scroll) => new CombinedResult(scroll, mtch, idx)) .Select(result => { var scroll = result.Scroll; var indicies = result.Incidies; var matched = result.MatchedLines; IEnumerable<LineIndex> indices; if (result.MatchedLines.ChangedReason == LineMatchChangedReason.None) { indices = scroll.Mode == ScrollingMode.Tail ? indicies.GetTail(scroll) : indicies.GetFromIndex(scroll); } else { indices = scroll.Mode == ScrollingMode.Tail ? indicies.GetTail(scroll, matched) : indicies.GetFromIndex(scroll, matched); } var currentPage = indices.ToArray(); var previous = lines.Items.Select(l => l.LineIndex).ToArray(); var removed = previous.Except(currentPage).ToArray(); var removedLines = lines.Items.Where(l=> removed.Contains(l.LineIndex)).ToArray(); var added = currentPage.Except(previous).ToArray(); //finally we can load the line from the file var newLines = file.ReadLines(added, (lineIndex, text) => { var isEndOfTail = indicies.ChangedReason != LinesChangedReason.Loaded && lineIndex.Line > indicies.TailStartsAt; return new Line(lineIndex, text, isEndOfTail ? DateTime.Now : (DateTime?) null); }, indicies.Encoding).ToArray(); return new { NewLines = newLines, OldLines = removedLines }; }) .RetryWithBackOff((Exception error, int attempts) => { //todo: plug in file missing or error into the screen return TimeSpan.FromSeconds(1); }) .Where(fn=> fn.NewLines.Length + fn.OldLines.Length > 0) .Subscribe(changes => { //update observable list lines.Edit(innerList => { if (changes.OldLines.Any()) innerList.RemoveMany(changes.OldLines); if (changes.NewLines.Any()) innerList.AddRange(changes.NewLines); }); }); _cleanUp = new CompositeDisposable(Lines, lines, aggregator); }
public FileTailer(FileInfo file, IObservable<string> textToMatch, IObservable<ScrollRequest> scrollRequest, IScheduler scheduler=null) { if (file == null) throw new ArgumentNullException(nameof(file)); if (textToMatch == null) throw new ArgumentNullException(nameof(textToMatch)); var lines = new SourceList<Line>(); Lines = lines.AsObservableList(); var matcher = textToMatch.Select(searchText => { if (string.IsNullOrEmpty(searchText) || searchText.Length < 3) return Observable.Return(LineMatches.None); return file.WatchFile(scheduler: scheduler) .TakeWhile(notification => notification.Exists) .Repeat() .Match(s => s.Contains(searchText, StringComparison.OrdinalIgnoreCase)); }).Switch() .Replay(1).RefCount(); //temp mess for a few days var indexer = file.WatchFile(scheduler: scheduler) .TakeWhile(notification => notification.Exists) .Repeat() .Index() .Replay(1).RefCount(); //count matching lines (all if no filter is specified) MatchedLines = indexer.CombineLatest(matcher, (indicies, matches) => matches == LineMatches.None ? indicies.Count : matches.Count); //count total line TotalLines = indexer.Select(x => x.Count); //todo: plug in file missing or error into the screen var locker = new object(); var theBeast = indexer.Synchronize(locker) .CombineLatest(matcher.Synchronize(locker), scrollRequest.Synchronize(locker),(idx, mtch, scroll) => new CombinedResult(scroll, mtch, idx)) .Select(result => { var scroll = result.Scroll; var allLines = result.Incidies; var matched = result.MatchedLines; IEnumerable<LineIndex> indices; if (result.MatchedLines.ChangedReason == LineMatchChangedReason.None) { indices = scroll.Mode == ScrollingMode.Tail ? allLines.GetTail(scroll) : allLines.GetFromIndex(scroll); } else { indices = scroll.Mode == ScrollingMode.Tail ? allLines.GetTail(scroll, matched) : allLines.GetFromIndex(scroll, matched); } return file.ReadLines(indices, (lineIndex, text) => { var isEndOfTail = allLines.ChangedReason != LinesChangedReason.Loaded && lineIndex.Line > allLines.TailStartsAt; return new Line(lineIndex.Line, lineIndex.Index, text,isEndOfTail ? DateTime.Now : (DateTime?) null); }).ToArray(); }) //.RetryWithBackOff((error, attempts) => //{ // //TODO: Log // return TimeSpan.FromSeconds(1); //}) .Subscribe(newPage => { //update observable list lines.Edit(innerList => { var removed = innerList.Except(newPage).ToArray(); var added = newPage.Except(innerList).ToArray(); if (removed.Any()) innerList.RemoveMany(removed); if (added.Any()) innerList.AddRange(added); }); }); ////this is the beast! Dynamically combine lines requested by the consumer ////with the lines which exist in the file. This enables proper virtualisation of the file //var scroller = matchedLines // .CombineLatest(scrollRequest, (scanResult, request) => new {scanResult , request }) // .Subscribe(x => // { // var mode = x.request.Mode; // var pageSize = x.request.PageSize; // var endOfTail = x.scanResult.EndOfTail; // var isInitial = x.scanResult.Index==0; // var allLines = x.scanResult.MatchingLines; // var previousPage = lines.Items.Select(l => new LineIndex(l.Number, l.Index, 0, 0)).ToArray(); // //Otherwise take the page size and start index from the request // var currentPage = (mode == ScrollingMode.Tail // ? allLines.Skip(allLines.Length-pageSize).Take(pageSize).ToArray() // : allLines.Skip(x.request.FirstIndex).Take(pageSize)).ToArray(); // var added = currentPage.Except(previousPage).ToArray(); // var removed = previousPage.Except(currentPage).Select(li=>li.Line).ToArray(); // if (added.Length + removed.Length == 0) return; // try // { // var addedLines = file.ReadLines(added, (lineIndex, text) => // { // var isEndOfTail = !isInitial && lineIndex.Line > endOfTail; // return new Line(lineIndex.Line, lineIndex.Index, text, isEndOfTail ? DateTime.Now : (DateTime?)null); // }).ToArray(); // //get old lines from the current collection // var removedLines = lines.Items.Where(l => removed.Contains(l.Number)).ToArray(); // //finally relect changes in the list // lines.Edit(innerList => // { // innerList.RemoveMany(removedLines); // innerList.AddRange(addedLines); // }); // } // catch (Exception) // { // //Very small chance of an error here but if one is causght the next successful read will recify this // //TODO: 1. Feedback to user that steaming has stopped // //TODO: 2. Place the ReadLines(..) method with the select of an observable // } // }); _cleanUp = new CompositeDisposable(Lines, lines); }