public void DoNotThrowAWobblyWhenRemovingaMutatedValue()
        {

            var pageController = new PageController();
            var sortController = new SortController<TestVm>(SortExpressionComparer<TestVm>.Ascending(t => t.DateFavorited ?? DateTime.MinValue));
            var filterController = new FilterController<TestVm>(myVm => myVm.Id != 0);
            var items = new ObservableCollectionExtended<TestVm>();
            var itemCache = new SourceCache<TestVm, int>(myVm => myVm.Id);

            var item1 = new TestVm(1) { DateFavorited = DateTime.Now };
            var item2 = new TestVm(2) { DateFavorited = DateTime.Now };

            itemCache.AddOrUpdate(item1);
            itemCache.AddOrUpdate(item2);

            bool error = false;
            itemCache.Connect()
                .Filter(filterController)
                .Sort(sortController)
                .Page(pageController)//error doesnt occur with paging disabled
                .Bind(items)
                .Subscribe(changes => { }, ex => error = true);

            pageController.Change(new PageRequest(1, 100));

            //NB: never errored if it was the first item which was removed
            item2.DateFavorited = null;
            itemCache.Remove(item2); //ERROR!

            Assert.IsFalse(error, "Error has been thrown");
        }
        public SearchInfoCollection(ISearchMetadataCollection searchMetadataCollection,
            ISearchMetadataFactory searchMetadataFactory,
            IFileWatcher fileWatcher)
        {
            _metadataCollection = searchMetadataCollection;
            _searchMetadataFactory = searchMetadataFactory;
            _fileWatcher = fileWatcher;

            //Add a complete file display
            All = _fileWatcher.Latest.Index().Replay(1).RefCount();

            //create a collection with 1 item, which is used to show entire file
            var systemSearches = new SourceCache<SearchInfo, string>(t => t.SearchText);
            systemSearches.AddOrUpdate(new SearchInfo("<All>", All, SearchType.All));

            //create a collection of all possible user filters
            var userSearches = searchMetadataCollection.Metadata
                .Connect(meta => meta.Filter)
                .IgnoreUpdateWhen((current,previous)=> SearchMetadata.EffectsFilterComparer.Equals(current, previous))
                .Transform(meta =>
                {
                    var latest = _fileWatcher.Latest
                        .Search(meta.BuildPredicate())
                        .Replay(1).RefCount();

                    return new SearchInfo(meta.SearchText, latest, SearchType.User);
                });

            //combine te results into a single collection
            Searches = systemSearches.Connect()
                    .Or(userSearches)
                    .AsObservableCache();

            _cleanUp = new CompositeDisposable(Searches, systemSearches);
        }
        public SearchInfoCollection(ISearchMetadataCollection searchMetadataCollection, IFileWatcher fileWatcher)
        {
            _metadataCollection = searchMetadataCollection;
            _fileWatcher = fileWatcher;

            //Add a complete file display
            All = fileWatcher.Latest.Index().Replay(1).RefCount();

            //create a collection with 1 item, which is used to show entire file
            var systemSearches = new SourceCache<SearchInfo, CaseInsensitiveString>(t => (CaseInsensitiveString)t.SearchText);
            systemSearches.AddOrUpdate(new SearchInfo("<All>", All, SearchType.All));

            //create a collection of all possible user filters
            var userSearches = searchMetadataCollection.Metadata
                .Connect(meta => meta.Filter)
                .IgnoreUpdateWhen((current,previous)=>current.Filter == previous.Filter)
                .Transform(meta =>
                {
                    var latest = _fileWatcher.Latest
                        .Search(s => s.Contains(meta.SearchText, StringComparison.OrdinalIgnoreCase))
                        .Replay(1).RefCount();

                    return new SearchInfo(meta.SearchText, latest, SearchType.User);
                });

            //combine te results into a single collection
            Searches = systemSearches.Connect()
                    .Or(userSearches)
                    .AsObservableCache();

            _cleanUp = new CompositeDisposable(Searches, systemSearches);
        }
        public void SkipInitialDoesNotReturnTheFirstBatchOfData()
        {
            bool updateReceived = false;

            var cache = new SourceCache<Person, string>(p => p.Name);


            var deferStream = cache.Connect().SkipInitial()
                                .Subscribe(changes => updateReceived = true);

            Assert.IsFalse(updateReceived, "No update should be received");

            cache.AddOrUpdate(new Person("P1", 1));

            Assert.IsFalse(updateReceived, "No update should be received for initial batch of changes");

            cache.AddOrUpdate(new Person("P2", 2));
            Assert.IsTrue(updateReceived, "Replace should be received");
            deferStream.Dispose();
        }
        public void TransformError()
        {

            bool completed = false;
            bool error = false;


            var cache = new SourceCache<Entity, int>(e => e.Key);

            var subscriber = cache.Connect()
                            .Transform(e => new TransformEntityWithError(e))

                            .Finally(() => completed = true)
                            .Subscribe(updates => { Console.WriteLine(); }, ex => error = true);

            cache.AddOrUpdate(Enumerable.Range(0, 10000).Select(_ => new Entity()).ToArray());
            cache.AddOrUpdate(new Entity());


            subscriber.Dispose();

            Assert.IsTrue(error, "Error has not been invoked");
            Assert.IsTrue(completed, "Completed has not been called");
        }
Example #6
0
        public void CachePerformance(int n)
        {

            /*
                Tricks to make it fast = 

                1) use cache.AddRange(Enumerable.Range(0, n))
                instead of  for (int i = 0; i < n; i++) cache.AddOrUpdate(i);

                2)  Uncomment Buffer(n/10).FlattenBufferResult()
                or just use buffer by time functions

                With both of these the speed can be almost negligable

            */
            var cache = new SourceCache<int, int>(i => i);
            double calculated = 0;

            var sw = Stopwatch.StartNew();

            var summation = cache.Connect()
                .StdDev(i => i)
                .Subscribe(result => calculated = result);


            //1. this is very slow if there are loads of updates (each updates causes a new summation)
            for (int i = 1; i < n; i++)
                cache.AddOrUpdate(i);

            //2. much faster to to this (whole range is 1 update and 1 calculation):
            //  cache.AddOrUpdate(Enumerable.Range(0,n));

            sw.Stop();

            summation.Dispose();
            cache.Dispose();

            Console.WriteLine("Total items: {0}. Value = {1}", n, calculated);
            Console.WriteLine("Cache: {0} updates took {1} ms {2:F3} ms each. {3}", n, sw.ElapsedMilliseconds, sw.Elapsed.TotalMilliseconds / n, DateTime.Now.ToShortDateString());

        }
        public void DeferUntilLoadedDoesNothingUntilDataHasBeenReceived()
        {
            bool updateReceived = false;
            IChangeSet<Person, string> result = null;

            var cache = new SourceCache<Person, string>(p => p.Name);


            var deferStream = cache.Connect().DeferUntilLoaded()
                                .Subscribe(changes =>
                                           {
                                               updateReceived = true;
                                               result = changes;
                                           });

            Assert.IsFalse(updateReceived,"No update should be received");
            cache.AddOrUpdate(new Person("Test",1));

            Assert.IsTrue(updateReceived,"Replace should be received");
            Assert.AreEqual(1,result.Adds);
            Assert.AreEqual(new Person("Test",1), result.First().Current);
            deferStream.Dispose();
        }
        public FileSearcher([NotNull] IObservable<FileSegmentCollection> fileSegments, 
            [NotNull] Func<string, bool> predicate,
            int arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered = 50000,
            Encoding encoding = null,
            IScheduler scheduler =null)
        {
            if (fileSegments == null) throw new ArgumentNullException(nameof(fileSegments));
            if (predicate == null) throw new ArgumentNullException(nameof(predicate));

            _predicate = predicate;
            _arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered = arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered;
            _scheduler = scheduler ?? Scheduler.Default;

            var shared = fileSegments.Replay(1).RefCount();

            var infoSubscriber = shared.Select(segments => segments.Info)
                .Take(1)
                .Subscribe(info =>
                {
                    Info = info;
                    Encoding = encoding ?? info.GetEncoding();
                });
            //Create a cache of segments which are to be searched
            var segmentCache = shared.Select(s => s.Segments)
                .ToObservableChangeSet(s => s.Key)
                .IgnoreUpdateWhen((current,previous)=>current==previous)
                .AsObservableCache();

            //manually maintained search results and status
            var searchData= new SourceCache<FileSegmentSearch, FileSegmentKey>(s=>s.Key);

            SearchResult = searchData.Connect()
                .Flatten()
                .Select(change=>change.Current)
                .Scan((FileSearchResult)null, (previous, current) => previous==null
                                ? new FileSearchResult(current, Info, Encoding, _arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered)
                                : new FileSearchResult(previous, current, Info, Encoding, _arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered))
                .StartWith(FileSearchResult.None)
                .Replay(1).RefCount();

            //initialise a pending state for all segments
            var loader = segmentCache.Connect()
                .Transform(fs => new FileSegmentSearch(fs))
                .WhereReasonsAre(ChangeReason.Add)
                .PopulateInto(searchData);

            //scan end of file, then tail
            var tailSearch = segmentCache.WatchValue(FileSegmentKey.Tail)
                .Scan((FileSegmentSearch) null, (previous, current) =>
                {
                    if (previous == null)
                    {
                        var result = Search(current.Start, current.End);
                        return new FileSegmentSearch(current, result);
                    }
                    else
                    {
                        var result = Search(previous.Segment.End, current.End);
                        return result == null ? previous : new FileSegmentSearch(previous, result);
                    }
                })
                .DistinctUntilChanged()
                .Publish();

            //start tailing
            var tailSubscriber = tailSearch.Subscribe(tail => searchData.AddOrUpdate(tail));

            //load the rest of the file segment by segment, reporting status after each search
            var headSubscriber = tailSearch.Take(1).WithContinuation(() =>
            {
                var locker = new object();

                return searchData.Connect(fss=>fss.Segment.Type == FileSegmentType.Head )
                    .Do(head => Debug.WriteLine(head.First().Current))
                    .WhereReasonsAre(ChangeReason.Add)
                    .SelectMany(changes=>changes.Select(c=>c.Current).OrderByDescending(c=>c.Segment.Index).ToArray())
                    .ObserveOn(_scheduler)
                    .Synchronize(locker)
                    .Do(head => searchData.AddOrUpdate(new FileSegmentSearch(head.Segment,FileSegmentSearchStatus.Searching) ))
                   .Select(fileSegmentSearch =>
                   {
                       /*
                            This hack imposes a limitation on the number of items returned as memory can be
                            absolutely hammered [I have seen 20MB memory when searching a 1 GB file - obviously not an option]
                           TODO: A proper solution.
                                   1. How about index to file?
                                   2. Allow auto pipe of large files
                                   3. Allow user to have some control here
                       */

                       var sum = searchData.Items.Sum(fss => fss.Lines.Length);

                       if (sum >= _arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered)
                       {
                           return new FileSegmentSearch(fileSegmentSearch,  FileSegmentSearchStatus.Complete);
                       }
                       var result = Search(fileSegmentSearch.Segment.Start, fileSegmentSearch.Segment.End);
                       return new FileSegmentSearch(fileSegmentSearch, result);
                   });
            })
               .Subscribe(head => searchData.AddOrUpdate(head));

            _cleanUp = new CompositeDisposable(
                segmentCache,
                loader,
                tailSubscriber,
                headSubscriber,
                tailSearch.Connect(),
                infoSubscriber);
        }
Example #9
0
        public FileSearch([NotNull] IObservable<FileSegmentCollection> fileSegments, [NotNull] Func<string, bool> predicate,
             Encoding encoding = null,
            IScheduler scheduler =null)
        {
            if (fileSegments == null) throw new ArgumentNullException(nameof(fileSegments));
            if (predicate == null) throw new ArgumentNullException(nameof(predicate));

            _predicate = predicate;
            _scheduler = scheduler ?? Scheduler.Default;

            var shared = fileSegments.Replay(1).RefCount();

            var infoSubscriber = shared.Select(segments => segments.Info)
                .Take(1)
                .Subscribe(info =>
                {
                    Info = info;
                    Encoding = encoding ?? info.GetEncoding();
                });
            //Create a cache of segments which are to be searched
            var segmentCache = shared.Select(s => s.Segments)
                .ToObservableChangeSet(s => s.Key)
                .IgnoreUpdateWhen((current,previous)=>current==previous)
                .AsObservableCache();

            //manually maintained search results and status
            var searchData= new SourceCache<FileSegmentSearch, FileSegmentKey>(s=>s.Key);

            SearchResult = searchData.Connect()
                .Flatten()
                .Select(change=>change.Current)
                .Scan((FileSearchResult)null, (previous, current) => previous==null
                                ? new FileSearchResult(current, Info, Encoding)
                                : new FileSearchResult(previous, current, Info, Encoding))
                .StartWith(FileSearchResult.None)
                .Replay(1).RefCount();

            //initialise a pending state for all segments
            var loader = segmentCache.Connect()
                .Transform(fs => new FileSegmentSearch(fs))
                .WhereReasonsAre(ChangeReason.Add)
                .PopulateInto(searchData);

            //scan end of file, then tail
            var tailSearch = segmentCache.WatchValue(FileSegmentKey.Tail)
                .Scan((FileSegmentSearch) null, (previous, current) =>
                {
                    if (previous == null)
                    {
                        var result = Search(current.Start, current.End);
                        return new FileSegmentSearch(current, result);
                    }
                    else
                    {
                        var result = Search(previous.Segment.End, current.End);
                        return result == null ? previous : new FileSegmentSearch(previous, result);
                    }
                })
                .DistinctUntilChanged()
                .Publish();

            //start tailing
            var tailSubscriber = tailSearch.Subscribe(tail => searchData.AddOrUpdate(tail));

            //load the rest of the file segment by segment, reporting status after each search
            var headSubscriber = tailSearch.Take(1).WithContinuation(() =>
            {
                var locker = new object();
                return searchData.Connect(fss=>fss.Segment.Type == FileSegmentType.Head )
                    .Do(head => Debug.WriteLine(head.First().Current))
                    .WhereReasonsAre(ChangeReason.Add)
                    .SelectMany(changes=>changes.Select(c=>c.Current).OrderByDescending(c=>c.Segment.Index).ToArray())
                    .ObserveOn(_scheduler)
                    .Synchronize(locker)
                    .Do(head => searchData.AddOrUpdate(new FileSegmentSearch(head.Segment,FileSegmentSearchStatus.Searching) ))
                    .Select(fss =>
                    {
                       // Debug.WriteLine($"Running Search For: {fss.Key}");
                        var result = Search(fss.Segment.Start, fss.Segment.End);
                        return new FileSegmentSearch(fss, result);
                    });
            })
               .Subscribe(head => searchData.AddOrUpdate(head));

            _cleanUp = new CompositeDisposable(
                segmentCache,
                loader,
                tailSubscriber,
                headSubscriber,
                tailSearch.Connect(),
                infoSubscriber);
        }
        public SearchInfoCollection(ICombinedSearchMetadataCollection combinedSearchMetadataCollection,
            ISearchMetadataFactory searchMetadataFactory,
            IFileWatcher fileWatcher)
        {
            _localMetadataCollection = combinedSearchMetadataCollection.Local;
            _combinedSearchMetadataCollection = combinedSearchMetadataCollection;
            _searchMetadataFactory = searchMetadataFactory;
            _fileWatcher = fileWatcher;

            var exclusionPredicate = combinedSearchMetadataCollection.Combined.Connect()
                    .IncludeUpdateWhen((current, previous) => !SearchMetadata.EffectsFilterComparer.Equals(current, previous))
                    .Filter(meta=> meta.IsExclusion)
                    .ToCollection()
                    .Select(searchMetadataItems =>
                    {
                        Func<string, bool> predicate = null;

                        if (searchMetadataItems.Count == 0)
                            return predicate;

                        var predicates = searchMetadataItems.Select(meta => meta.BuildPredicate()).ToArray();
                        predicate = str =>
                        {
                            return !predicates.Any(item => item(str));
                        };
                        return predicate;
                    }).StartWith((Func<string, bool>)null)
                    .Replay(1).RefCount();

            All = exclusionPredicate.Select(predicate =>
            {
                if (predicate==null)
                    return _fileWatcher.Latest.Index();

                return _fileWatcher.Latest.Search(predicate);

            }).Switch().Replay(1).RefCount();

            //create a collection with 1 item, which is used to show entire file
            var systemSearches = new SourceCache<SearchInfo, string>(t => t.SearchText);
            systemSearches.AddOrUpdate(new SearchInfo("<All>", false, All, SearchType.All));

            //create a collection of all possible user filters
            var userSearches = combinedSearchMetadataCollection.Combined
                .Connect(meta => meta.Filter)
                .IgnoreUpdateWhen((current,previous)=> SearchMetadata.EffectsFilterComparer.Equals(current, previous))
                .Transform(meta =>
                {
                    var latest = exclusionPredicate
                                .Select(exclpredicate =>
                                {
                                    Func<string, bool> resultingPredicate;
                                    if (exclpredicate == null)
                                    {
                                        resultingPredicate = meta.BuildPredicate();
                                    }
                                    else
                                    {
                                        var toMatch = meta.BuildPredicate();
                                        resultingPredicate =  str=> toMatch(str) && exclpredicate(str);
                                    }
                                    return _fileWatcher.Latest.Search(resultingPredicate);

                                })
                                .Switch()
                                .Replay(1).RefCount();

                    return new SearchInfo(meta.SearchText, meta.IsGlobal, latest, SearchType.User);
                });

            //combine te results into a single collection
            Searches = systemSearches.Connect()
                    .Or(userSearches)
                    .AsObservableCache();

            _cleanUp = new CompositeDisposable(Searches, systemSearches);
        }
 public void AddOrUpdate(FileReference entity)
 {
     _fileReferenceCache.AddOrUpdate(entity);
 }
Example #12
0
 protected void AddOrUpdateMany(IEnumerable <TViewModel> viewModels)
 {
     _sourceCache.AddOrUpdate(viewModels);
 }
Example #13
0
        public static async Task AddDatabaseRoutine(IDialogCoordinator dialogCoordinator, object dialogContext, SourceCache <Database, string> databasesSourceCache)
        {
            var maxCharLength   = 100;
            var keepPrompting   = true;
            var dialogSettings1 = new MetroDialogSettings()
            {
                AffirmativeButtonText = "Add",
            };

            while (keepPrompting)
            {
                var newDatabaseName = dialogCoordinator.ShowModalInputExternal(dialogContext, "Add Database", $"Enter the database name in the format {_databaseNameFormat}.{Environment.NewLine + _databaseNameFormatNote}", dialogSettings1);
                dialogSettings1.DefaultText = newDatabaseName;
                if (newDatabaseName == null)
                {
                    keepPrompting = false;
                }
                else if (string.IsNullOrWhiteSpace(newDatabaseName))
                {
                    await dialogCoordinator.ShowMessageAsync(dialogContext, "Invalid Database Name", "The new database name cannot be empty or whitespace.");
                }
                else if (newDatabaseName.Length > maxCharLength)
                {
                    await dialogCoordinator.ShowMessageAsync(dialogContext, "Invalid Database Name", $"The new database name cannot be greater than {maxCharLength} characters.");

                    dialogSettings1.DefaultText = newDatabaseName.Substring(0, maxCharLength);
                }
                else if (!newDatabaseName.Contains(AddDatabaseInfo.DatabaseInfoSplitter) || newDatabaseName.Contains("{") || newDatabaseName.Contains("}"))
                {
                    await dialogCoordinator.ShowMessageAsync(dialogContext, "Invalid Database Name", $"The new database name must be in the format {_databaseNameFormat}.{Environment.NewLine + _databaseNameFormatNote}" + Environment.NewLine + $"Example: 198.163.22.10\\MSSQLSERVER,1433{AddDatabaseInfo.DatabaseInfoSplitter}CustomerInfo*username*password");
                }
                else
                {
                    var addDatabaseInfo = new AddDatabaseInfo(newDatabaseName);
                    if (SettingsService.SettingsContainsDatabaseName(addDatabaseInfo.ToFormattedString()))
                    {
                        await dialogCoordinator.ShowMessageAsync(dialogContext, "Database Already Added", $"Database \"{newDatabaseName}\" has already been added.");

                        keepPrompting = false;
                    }
                    else
                    {
                        //Show progress dialog while searching for new database
                        var progressController = await dialogCoordinator.ShowProgressAsync(dialogContext, "Looking for Database", $"Trying to find database \"{newDatabaseName}\"...");

                        progressController.SetIndeterminate();
                        var databaseExists = false;
                        await Task.Run(() =>
                        {
                            databaseExists = TestDatabaseConnection(addDatabaseInfo.ToConnectionString());
                        });

                        await progressController.CloseAsync();

                        //If database can't be found, ask user if they want to continue adding database
                        var dialogSettings = new MetroDialogSettings()
                        {
                            AffirmativeButtonText = "Yes",
                            NegativeButtonText    = "No",
                            DefaultButtonFocus    = MessageDialogResult.Affirmative,
                        };
                        if (!databaseExists && await dialogCoordinator.ShowMessageAsync(dialogContext, "Database Not Found", $"The database \"{newDatabaseName}\" could not be found. Would you still like to add it?", MessageDialogStyle.AffirmativeAndNegative, dialogSettings) != MessageDialogResult.Affirmative)
                        {
                            return;
                        }

                        var newDatabase            = new Database(addDatabaseInfo.ToFormattedString());
                        var serviceOperationHelper = new ServiceOperationHelper(typeof(Database), Plurality.Single, ServiceOperation.Add, "databases source cache", addDatabaseInfo.ToFormattedString());

                        await Task.Run(() =>
                        {
                            try
                            {
                                serviceOperationHelper.LogServiceOperation(ServiceOperationStatus.Attempting);
                                databasesSourceCache.AddOrUpdate(newDatabase);
                                serviceOperationHelper.LogServiceOperation(ServiceOperationStatus.Succeeded);
                            }
                            catch (Exception ex)
                            {
                                serviceOperationHelper.LogServiceOperation(ex.Message);
                            }
                        });

                        keepPrompting = false;

                        if (serviceOperationHelper.ServiceOperationResult.OperationSuceeded)
                        {
                            serviceOperationHelper.ServiceOperationResult = await SettingsService.AddOrUpdateDatabase(newDatabase, true);
                        }

                        if (serviceOperationHelper.ServiceOperationResult.OperationFailed)
                        {
                            await serviceOperationHelper.ServiceOperationResult.ShowUserErrorMessage(dialogCoordinator, dialogContext);
                        }
                    }
                }
            }
        }
Example #14
0
        public FileSearcher([NotNull] IObservable <FileSegmentCollection> fileSegments,
                            [NotNull] Func <string, bool> predicate,
                            int arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered = 50000,
                            Encoding encoding    = null,
                            IScheduler scheduler = null)
        {
            if (fileSegments == null)
            {
                throw new ArgumentNullException(nameof(fileSegments));
            }
            if (predicate == null)
            {
                throw new ArgumentNullException(nameof(predicate));
            }

            _predicate = predicate;
            _arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered = arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered;
            _scheduler = scheduler ?? Scheduler.Default;

            var shared = fileSegments.Replay(1).RefCount();

            var infoSubscriber = shared.Select(segments => segments.Info)
                                 .Take(1)
                                 .Subscribe(info =>
            {
                Info     = info;
                Encoding = encoding ?? info.GetEncoding();
            });
            //Create a cache of segments which are to be searched
            var segmentCache = shared.Select(s => s.Segments)
                               .ToObservableChangeSet(s => s.Key)
                               .IgnoreUpdateWhen((current, previous) => current == previous)
                               .AsObservableCache();

            //manually maintained search results and status
            var searchData = new SourceCache <FileSegmentSearch, FileSegmentKey>(s => s.Key);

            SearchResult = searchData.Connect()
                           .Flatten()
                           .Select(change => change.Current)
                           .Scan((FileSearchResult)null, (previous, current) => previous == null
                                ? new FileSearchResult(current, Info, Encoding, _arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered)
                                : new FileSearchResult(previous, current, Info, Encoding, _arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered))
                           .StartWith(FileSearchResult.None)
                           .Replay(1).RefCount();

            //initialise a pending state for all segments
            var loader = segmentCache.Connect()
                         .Transform(fs => new FileSegmentSearch(fs))
                         .WhereReasonsAre(ChangeReason.Add)
                         .PopulateInto(searchData);

            //scan end of file, then tail
            var tailSearch = segmentCache.WatchValue(FileSegmentKey.Tail)
                             .Scan((FileSegmentSearch)null, (previous, current) =>
            {
                if (previous == null)
                {
                    var result = Search(current.Start, current.End);
                    return(new FileSegmentSearch(current, result));
                }
                else
                {
                    var result = Search(previous.Segment.End, current.End);
                    return(result == null ? previous : new FileSegmentSearch(previous, result));
                }
            })
                             .DistinctUntilChanged()
                             .Publish();

            //start tailing
            var tailSubscriber = tailSearch.Subscribe(tail => searchData.AddOrUpdate(tail));

            //load the rest of the file segment by segment, reporting status after each search
            var headSubscriber = tailSearch.Take(1).WithContinuation(() =>
            {
                var locker = new object();

                return(searchData.Connect(fss => fss.Segment.Type == FileSegmentType.Head)
                       .Do(head => Debug.WriteLine(head.First().Current))
                       .WhereReasonsAre(ChangeReason.Add)
                       .SelectMany(changes => changes.Select(c => c.Current).OrderByDescending(c => c.Segment.Index).ToArray())
                       .ObserveOn(_scheduler)
                       .Synchronize(locker)
                       .Do(head => searchData.AddOrUpdate(new FileSegmentSearch(head.Segment, FileSegmentSearchStatus.Searching)))
                       .Select(fileSegmentSearch =>
                {
                    /*
                     *   This hack imposes a limitation on the number of items returned as memory can be
                     *   absolutely hammered [I have senn 20MB memory when searching a 1 GB file - obviously not an option]
                     *  TODO: A proper solution.
                     *  1. How about index to file?
                     *  2. Allow auto pipe of large files
                     *  3. Allow user to have some control here
                     */

                    var sum = searchData.Items.Sum(fss => fss.Lines.Length);

                    if (sum >= _arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered)
                    {
                        return new FileSegmentSearch(fileSegmentSearch, FileSegmentSearchStatus.Complete);
                    }
                    var result = Search(fileSegmentSearch.Segment.Start, fileSegmentSearch.Segment.End);
                    return new FileSegmentSearch(fileSegmentSearch, result);
                }));
            })
                                 .Subscribe(head => searchData.AddOrUpdate(head));

            _cleanUp = new CompositeDisposable(
                segmentCache,
                loader,
                tailSubscriber,
                headSubscriber,
                tailSearch.Connect(),
                infoSubscriber);
        }
Example #15
0
 /// <inheritdoc/>
 public void StartMonitoring(GeofenceRegion region) => _store.AddOrUpdate(region);