public void DoNotThrowAWobblyWhenRemovingaMutatedValue() { var pageController = new PageController(); var sortController = new SortController<TestVm>(SortExpressionComparer<TestVm>.Ascending(t => t.DateFavorited ?? DateTime.MinValue)); var filterController = new FilterController<TestVm>(myVm => myVm.Id != 0); var items = new ObservableCollectionExtended<TestVm>(); var itemCache = new SourceCache<TestVm, int>(myVm => myVm.Id); var item1 = new TestVm(1) { DateFavorited = DateTime.Now }; var item2 = new TestVm(2) { DateFavorited = DateTime.Now }; itemCache.AddOrUpdate(item1); itemCache.AddOrUpdate(item2); bool error = false; itemCache.Connect() .Filter(filterController) .Sort(sortController) .Page(pageController)//error doesnt occur with paging disabled .Bind(items) .Subscribe(changes => { }, ex => error = true); pageController.Change(new PageRequest(1, 100)); //NB: never errored if it was the first item which was removed item2.DateFavorited = null; itemCache.Remove(item2); //ERROR! Assert.IsFalse(error, "Error has been thrown"); }
public CombinedSearchMetadataCollection([NotNull] ISearchMetadataCollection metadataCollection, [NotNull] IGlobalSearchOptions globalSearchOptions) { if (metadataCollection == null) throw new ArgumentNullException(nameof(metadataCollection)); if (globalSearchOptions == null) throw new ArgumentNullException(nameof(globalSearchOptions)); Local = metadataCollection; Global = globalSearchOptions.Metadata; var cache = new SourceCache<SearchMetadata, string>(t => t.SearchText); ////Prioritise local before global and renumber var localItems = metadataCollection.Metadata.Connect().ToCollection().Select(items => items.ToArray()).StartWith(Enumerable.Empty<SearchMetadata>()); var globalItems = globalSearchOptions.Metadata.Metadata.Connect().ToCollection().Select(items => items.ToArray()).StartWith(Enumerable.Empty<SearchMetadata>()); var combiner = localItems.CombineLatest(globalItems, (local, global) => new {local, global}).Select(x => Combine(x.local, x.global)).Subscribe(uppdatedItems => { cache.Edit(innerCache => { var toRemove = innerCache.Items.Except(uppdatedItems,SearchMetadata.SearchTextComparer).ToArray(); innerCache.Remove(toRemove); innerCache.AddOrUpdate(uppdatedItems); }); }); Combined = cache.Connect().IgnoreUpdateWhen((current, previous) => current.Equals(previous)).AsObservableCache(); _cleanUp = new CompositeDisposable(Combined, cache, combiner); }
public SearchInfoCollection(ISearchMetadataCollection searchMetadataCollection, ISearchMetadataFactory searchMetadataFactory, IFileWatcher fileWatcher) { _metadataCollection = searchMetadataCollection; _searchMetadataFactory = searchMetadataFactory; _fileWatcher = fileWatcher; //Add a complete file display All = _fileWatcher.Latest.Index().Replay(1).RefCount(); //create a collection with 1 item, which is used to show entire file var systemSearches = new SourceCache<SearchInfo, string>(t => t.SearchText); systemSearches.AddOrUpdate(new SearchInfo("<All>", All, SearchType.All)); //create a collection of all possible user filters var userSearches = searchMetadataCollection.Metadata .Connect(meta => meta.Filter) .IgnoreUpdateWhen((current,previous)=> SearchMetadata.EffectsFilterComparer.Equals(current, previous)) .Transform(meta => { var latest = _fileWatcher.Latest .Search(meta.BuildPredicate()) .Replay(1).RefCount(); return new SearchInfo(meta.SearchText, latest, SearchType.User); }); //combine te results into a single collection Searches = systemSearches.Connect() .Or(userSearches) .AsObservableCache(); _cleanUp = new CompositeDisposable(Searches, systemSearches); }
public SearchInfoCollection(ISearchMetadataCollection searchMetadataCollection, IFileWatcher fileWatcher) { _metadataCollection = searchMetadataCollection; _fileWatcher = fileWatcher; //Add a complete file display All = fileWatcher.Latest.Index().Replay(1).RefCount(); //create a collection with 1 item, which is used to show entire file var systemSearches = new SourceCache<SearchInfo, CaseInsensitiveString>(t => (CaseInsensitiveString)t.SearchText); systemSearches.AddOrUpdate(new SearchInfo("<All>", All, SearchType.All)); //create a collection of all possible user filters var userSearches = searchMetadataCollection.Metadata .Connect(meta => meta.Filter) .IgnoreUpdateWhen((current,previous)=>current.Filter == previous.Filter) .Transform(meta => { var latest = _fileWatcher.Latest .Search(s => s.Contains(meta.SearchText, StringComparison.OrdinalIgnoreCase)) .Replay(1).RefCount(); return new SearchInfo(meta.SearchText, latest, SearchType.User); }); //combine te results into a single collection Searches = systemSearches.Connect() .Or(userSearches) .AsObservableCache(); _cleanUp = new CompositeDisposable(Searches, systemSearches); }
public void ReapplyFilterDoesntThrow() { using var source = new SourceCache <Person, string>(p => p.Key); source.AddOrUpdate(Enumerable.Range(1, 100).Select(i => new Person("P" + i, i)).ToArray()); var ex = Record.Exception(() => source.Connect().Filter(Observable.Return(Unit.Default)).AsObservableCache()); Assert.Null(ex); }
public void JoinMany() { var domainCache = new SourceCache <DomainDto, int>(d => d.Id); var projectCache = new SourceCache <ProjectDto, int>(p => p.Id); IObservable <IChangeSet <ProjectWithDomainItems, int> > combined = projectCache.Connect() .InnerJoinMany(domainCache.Connect(), domain => domain.ProjectId, (key, left, right) => new ProjectWithDomainItems(left, right.Items)); //From here we would need another tranform and some manual editing of a cache }
public LeftJoinFixture() { _left = new SourceCache <Device, string>(device => device.Name); _right = new SourceCache <DeviceMetaData, string>(device => device.Name); _result = _left.Connect() .LeftJoin(_right.Connect(), meta => meta.Name, (key, device, meta) => new DeviceWithMetadata(device, meta)) .AsAggregator(); }
public void Filter_RangeAfterSubscribe() { using (var list = new SourceCache <Person, string>(i => i.Name)) { list.Connect().Filter(v => v.Age % 2 == 0).Subscribe(v => { }); list.AddOrUpdate(People); } }
public void InvocationOnlyWhenChildIsInvoked() { bool invoked = false; var stream = _source.Connect() .MergeMany(o => o.Observable) .Subscribe(o => { invoked = true; }); var item = new ObjectWithObservable(1); _source.AddOrUpdate(item); invoked.Should().BeFalse(); item.InvokeObservable(true); invoked.Should().BeTrue(); stream.Dispose(); }
public void Does_Not_Reuse_Disposed_Wrappers_FIXED() { var subject = new SourceCache <Widget, int>(w => w.Id); var activeObservable = new ObservableCollectionExtended <WidgetWrapper>(); var inactiveObservable = new ObservableCollectionExtended <WidgetWrapper>(); var activeFilter = new FilterController <WidgetWrapper>(wrapper => wrapper.Wrapped.Active); var inactiveFilter = new FilterController <WidgetWrapper>(wrapper => !wrapper.Wrapped.Active); //nothing gets removed from here var converted = subject .Connect() .Transform(w => new WidgetWrapper(w)) .DisposeMany() .AsObservableCache(); var active = converted.Connect() .Filter(activeFilter) .Bind(activeObservable) .Subscribe(); var inactive = converted.Connect() .Filter(inactiveFilter) .Bind(inactiveObservable) .Subscribe(); var w1 = new Widget() { Id = 0, Active = true }; var w2 = new Widget() { Id = 1, Active = true }; subject.AddOrUpdate(w1); subject.AddOrUpdate(w2); Assert.AreEqual(2, activeObservable.Count); Assert.AreEqual(0, inactiveObservable.Count); Assert.IsFalse(activeObservable[0].IsDisposed); Assert.IsFalse(activeObservable[1].IsDisposed); //This needs to be done twice to trigger the behavior w1.Active = !w1.Active; activeFilter.Reevaluate(); inactiveFilter.Reevaluate(); w1.Active = !w1.Active; activeFilter.Reevaluate(); inactiveFilter.Reevaluate(); Assert.AreEqual(2, activeObservable.Count); Assert.False(activeObservable[0].IsDisposed); Assert.False(activeObservable[1].IsDisposed); }
public void DoesNotThrow2() { var cache = new SourceCache <Data, int>(d => d.Id); var disposable = cache.Connect() .Sort(new BehaviorSubject <IComparer <Data> >(SortExpressionComparer <Data> .Ascending(d => d.Id))) .Subscribe(); disposable.Dispose(); }
public void TreatMovesAsRemoveAdd() { var cache = new SourceCache <Person, string>(p => p.Name); var people = Enumerable.Range(0, 10).Select(age => new Person("Person" + age, age)).ToList(); var importantGuy = people.First(); cache.AddOrUpdate(people); ISortedChangeSet <Person, string> latestSetWithoutMoves = null; ISortedChangeSet <Person, string> latestSetWithMoves = null; var boundList1 = new ReactiveList <Person>(); var boundList2 = new ReactiveList <Person>(); using (cache.Connect() .AutoRefresh(p => p.Age) .Sort(SortExpressionComparer <Person> .Ascending(p => p.Age)) .TreatMovesAsRemoveAdd() .Bind(boundList1) .Subscribe(set => latestSetWithoutMoves = set)) using (cache.Connect() .AutoRefresh(p => p.Age) .Sort(SortExpressionComparer <Person> .Ascending(p => p.Age)) .Bind(boundList2) .Subscribe(set => latestSetWithMoves = set)) { importantGuy.Age = importantGuy.Age + 200; latestSetWithoutMoves.Removes.Should().Be(1); latestSetWithoutMoves.Adds.Should().Be(1); latestSetWithoutMoves.Moves.Should().Be(0); latestSetWithoutMoves.Updates.Should().Be(0); latestSetWithMoves.Moves.Should().Be(1); latestSetWithMoves.Updates.Should().Be(0); latestSetWithMoves.Removes.Should().Be(0); latestSetWithMoves.Adds.Should().Be(0); } }
public void TransformToNull() { using var source = new SourceCache <Person, string>(p => p.Name); using var results = new ChangeSetAggregator <PersonWithGender?, string>(source.Connect().Transform((Func <Person, PersonWithGender?>)(p => null))); source.AddOrUpdate(new Person("Adult1", 50)); results.Messages.Count.Should().Be(1, "Should be 1 updates"); results.Data.Count.Should().Be(1, "Should be 1 item in the cache"); results.Data.Items.First().Should().Be(null, "Should be same person"); }
public void DoesNotThrow1() { var cache = new SourceCache <Data, int>(d => d.Id); var sortPump = new Subject <Unit>(); var disposable = cache.Connect() .Sort(SortExpressionComparer <Data> .Ascending(d => d.Id), sortPump) .Subscribe(); disposable.Dispose(); }
public LineScroller([NotNull] IObservable <ILineProvider> latest, [NotNull] IObservable <ScrollRequest> scrollRequest) { if (latest == null) { throw new ArgumentNullException(nameof(latest)); } if (scrollRequest == null) { throw new ArgumentNullException(nameof(scrollRequest)); } var lines = new SourceCache <Line, LineKey>(l => l.Key); Lines = lines.Connect().IgnoreUpdateWhen((current, previous) => current.Key == previous.Key).AsObservableCache(); var locker = new object(); scrollRequest = scrollRequest.Synchronize(locker); latest = latest.Synchronize(locker); var aggregator = latest .CombineLatest(scrollRequest, (currentLines, scroll) => new { currentLines, scroll }) .Sample(TimeSpan.FromMilliseconds(50)) .Select(x => { if (x.scroll == ScrollRequest.None || x.scroll.PageSize == 0 || x.currentLines.Count == 0) { return(new Line[0]); } return(x.currentLines.ReadLines(x.scroll).ToArray()); }) .Subscribe(currentPage => { var previous = lines.Items.ToArray(); var added = currentPage.Except(previous, Line.TextStartComparer).ToArray(); var removed = previous.Except(currentPage, Line.TextStartComparer).ToArray(); lines.Edit(innerCache => { if (removed.Any()) { innerCache.Remove(removed); } if (added.Any()) { innerCache.AddOrUpdate(added); } }); }); _cleanUp = new CompositeDisposable(Lines, lines, aggregator); }
public void TreatMovesAsRemoveAdd() { var cache = new SourceCache <Person, string>(p => p.Name); var people = Enumerable.Range(0, 10).Select(age => new Person("Person" + age, age)).ToList(); var importantGuy = people.First(); cache.AddOrUpdate(people); ISortedChangeSet <Person, string>?latestSetWithoutMoves = null; ISortedChangeSet <Person, string>?latestSetWithMoves = null; var boundList1 = new ObservableCollectionExtended <Person>(); var boundList2 = new ObservableCollectionExtended <Person>(); using (cache.Connect().AutoRefresh(p => p.Age).Sort(SortExpressionComparer <Person> .Ascending(p => p.Age)).TreatMovesAsRemoveAdd().Bind(boundList1).Subscribe(set => latestSetWithoutMoves = set)) using (cache.Connect().AutoRefresh(p => p.Age).Sort(SortExpressionComparer <Person> .Ascending(p => p.Age)).Bind(boundList2).Subscribe(set => latestSetWithMoves = set)) { if (latestSetWithoutMoves is null) { throw new InvalidOperationException(nameof(latestSetWithoutMoves)); } if (latestSetWithMoves is null) { throw new InvalidOperationException(nameof(latestSetWithMoves)); } importantGuy.Age += 200; latestSetWithoutMoves.Should().NotBeNull(); latestSetWithoutMoves.Removes.Should().Be(1); latestSetWithoutMoves.Adds.Should().Be(1); latestSetWithoutMoves.Moves.Should().Be(0); latestSetWithoutMoves.Updates.Should().Be(0); latestSetWithMoves.Moves.Should().Be(1); latestSetWithMoves.Updates.Should().Be(0); latestSetWithMoves.Removes.Should().Be(0); latestSetWithMoves.Adds.Should().Be(0); } }
public DynamicDataExampleViewModel() { _sourceCache = new SourceCache <string, int>(x => x.GetHashCode()); _disposable = _sourceCache.Connect() .ObserveOnDispatcher() .Transform(x => (Brush) new SolidColorBrush((Color)ColorConverter.ConvertFromString(x))) .StartWithItem(CreateDrawingBrush(), 123) .Bind(out _brushes) .Subscribe(); }
public void Filter_ItemsBeforeSubscribe() { using (var list = new SourceCache <Person, string>(i => i.Name)) { foreach (var item in People) { list.AddOrUpdate(item); } list.Connect().Filter(v => v.Age % 2 == 0).Subscribe(v => { }); } }
public void DoSomeStuffWithAnExtraOrdinarilySimplisticMeansOfMeasuringPerformance() { var mySubscriptions = _peopleCache .Connect() .Do(_ => { }) .Transform(x => x) // .Do(_ => { }) .Subscribe(); _peopleCache.AddOrUpdate(_people); }
public void Connect() { var items = Enumerable.Range(1, 10_000).Select(j => new Person("P" + j, j)).ToList(); var cache = new SourceCache <Person, string>(p => p.Name); cache.Connect().Subscribe();//do warm up var added = Allocations.Run(() => cache.AddOrUpdate(items)); Console.WriteLine(added); var connected = Allocations.Run(() => cache.Connect().Subscribe()); Console.WriteLine(connected); var filtered = Allocations.Run(() => cache.Connect(p => p.Age < 5000).Subscribe()); Console.WriteLine(filtered); }
public void TopShouldRefresh() { var source = new SourceCache <int, int>(i => i); var dest = source.Connect().Top(Comparer <int> .Create((_, __) => 0), 2).AsObservableCache(); source.AddOrUpdate(Enumerable.Range(0, 5)); dest.Items.Should().BeEquivalentTo(Enumerable.Range(0, 2)); source.RemoveKey(0); source.RemoveKey(1); dest.Items.Should().BeEquivalentTo(Enumerable.Range(2, 2)); }
public void TransformManyWithKey() { var children = Enumerable.Range(1, 100).Select(i => new Person("Name" + i, i)).ToArray(); int childIndex = 0; var parents = Enumerable.Range(1, 50) .Select(i => { var parent = new Parent(i, new[] { children[childIndex], children[childIndex + 1] }); childIndex = childIndex + 2; return(parent); }).ToArray(); using (var source = new SourceCache <Parent, int>(x => x.Id)) using (var aggregator = source.Connect() .TransformMany(p => p.Children, c => c.Name) .AsAggregator()) { source.AddOrUpdate(parents); aggregator.Data.Count.Should().Be(100); //add a child to an observable collection and check the new item is added parents[0].Children.Add(new Person("NewlyAddded", 100)); aggregator.Data.Count.Should().Be(101); ////remove first parent and check children have gone source.RemoveKey(1); aggregator.Data.Count.Should().Be(98); //check items can be cleared and then added back in var childrenInZero = parents[1].Children.ToArray(); parents[1].Children.Clear(); aggregator.Data.Count.Should().Be(96); parents[1].Children.AddRange(childrenInZero); aggregator.Data.Count.Should().Be(98); //replace produces an update var replacedChild = parents[1].Children[0]; parents[1].Children[0] = new Person("Replacement", 100); aggregator.Data.Count.Should().Be(98); aggregator.Data.Lookup(replacedChild.Name).HasValue.Should().BeFalse(); aggregator.Data.Lookup("Replacement").HasValue.Should().BeTrue(); } }
public void Grouping() { var domainCache = new SourceCache <DomainDto, int>(d => d.Id); var projectCache = new SourceCache <ProjectDto, int>(p => p.Id); //if domain has a project id, it is very efficient to use Group var domainWithInnerGroup = domainCache.Connect().Group(d => d.ProjectId); IObservable <IChangeSet <ProjectWithDomainCache, int> > combind = projectCache.Connect() .InnerJoin(domainWithInnerGroup, domain => domain.Key, (key, left, right) => new ProjectWithDomainCache(left, right.Cache)); }
public Session(string id) { Requires.NotNullOrWhiteSpace(id, nameof(id)); Id = id; Debug.WriteLine($"{nameof(Session)} #{Id}: Created"); _players.Connect() .Count() .Select(x => x / 2) .BindTo(this, x => x.MaximumTeamCount);
public void OnItemAddCalled() { var called = false; var source = new SourceCache <Person, int>(x => x.Age); source.Connect().OnItemAdded(_ => called = true).Subscribe(); var person = new Person("A", 1); source.AddOrUpdate(person); Assert.True(called); }
public void PopulateCache_Observers() { var items = Enumerable.Range(1, 10_000).Select(j => new Person("P" + j, j)).ToList(); var cache = new SourceCache <Person, string>(p => p.Name); var subscribed = cache.Connect().Subscribe(); var result = Allocations.Run(() => { cache.AddOrUpdate(items); }); Console.WriteLine(result); }
public void GroupTest_Add() { using (var cache = new SourceCache <Groupable <int, string>, Guid>(x => x.Key)) using (var aggregator = cache.Connect() .Group(x => x.GroupKey) .AsAggregator()) { cache.AddOrUpdate(_items); cache.AddOrUpdate(new Groupable <int, string>(1, "Group 2")); Assert.AreEqual(2, aggregator.Messages.Count); } }
public void RefreshAllTest() { using (var refresher = new Subject <Unit>()) using (var cache = new SourceCache <Valuable <string>, Guid>(x => x.Key)) using (var aggregator = cache.Connect().AutoRefreshOnObservable(x => refresher).AsAggregator()) { cache.AddOrUpdate(_items); refresher.OnNext(Unit.Default); Assert.AreEqual(101, aggregator.Messages.Count); EnumerableAssert.All(aggregator.Messages.SelectMany(x => x), x => x.Reason == ChangeReason.Refresh); } }
public void InvocationOnlyWhenChildIsInvoked() { bool invoked = false; var stream = _source.Connect() .MergeMany(o => o.Observable) .Subscribe(o => { invoked = true; }); var item = new ObjectWithObservable(1); _source.AddOrUpdate(item); Assert.IsFalse(invoked, "Error. The operator should not have been invoked"); item.InvokeObservable(true); Assert.IsTrue(invoked, "The observable should have notified"); stream.Dispose(); }
public void GroupTest_Update() { using (var cache = new SourceCache <Groupable <int, string>, Guid>(x => x.Key)) using (var aggregator = cache.Connect() .Group(x => x.GroupKey) .AsAggregator()) { cache.AddOrUpdate(_items); cache.AddOrUpdate(_items[0]); Assert.AreEqual(2, aggregator.Messages.Count); EnumerableAssert.All(aggregator.Messages.SelectMany(x => x), x => x.Reason == ChangeReason.Refresh); } }
public void GroupTest_RefreshGroupsWithoutGroupKeyChanged() { using (var refresher = new Subject <Unit>()) using (var cache = new SourceCache <Groupable <int, string>, Guid>(x => x.Key)) using (var aggregator = cache.Connect() .Group(x => x.GroupKey, refresher) .AsAggregator()) { cache.AddOrUpdate(_items); refresher.OnNext(Unit.Default); Assert.AreEqual(1, aggregator.Messages.Count); EnumerableAssert.None(aggregator.Messages.SelectMany(x => x), x => x.Reason == ChangeReason.Refresh); } }
public AppKeys() { Type t = typeof(AppKeys); // Building a list of keys / key names from properties, because lazy var keyProps = t.GetRuntimeProperties().Where(prop => Attribute.IsDefined(prop, typeof(ReactiveAttribute)) && prop.GetGetMethod() != null).ToList(); foreach (var prop in keyProps) { var hotkey = (Hotkey)t.GetProperty(prop.Name).GetValue(this); hotkey.ID = prop.Name; keyMap.AddOrUpdate(hotkey); } keyMap.Connect().Bind(out allKeys).Subscribe(); this.RaisePropertyChanged("All"); }
public void FlattenObservableCollectionWithProjectionFromObservableCache() { var children = new[] { new NestedChild("A", "ValueA"), new NestedChild("B", "ValueB"), new NestedChild("C", "ValueC"), new NestedChild("D", "ValueD"), new NestedChild("E", "ValueE"), new NestedChild("F", "ValueF") }; var parents = new[] { new ClassWithNestedObservableCollection(1, new[] { children[0], children[1] }), new ClassWithNestedObservableCollection(2, new[] { children[2], children[3] }), new ClassWithNestedObservableCollection(3, new[] { children[4] }) }; using (var source = new SourceCache <ClassWithNestedObservableCollection, int>(x => x.Id)) using (var sut = source.Connect() .AutoRefreshOnObservable(self => self.Children.ToObservableChangeSet()) .TransformMany(parent => parent.Children.Select(c => new ProjectedNestedChild(parent, c)), c => c.Child.Name) .AsObservableCache()) { source.AddOrUpdate(parents); sut.Count.Should().Be(5); sut.Items.ShouldBeEquivalentTo(parents.SelectMany(p => p.Children.Take(5).Select(c => new ProjectedNestedChild(p, c)))); //add a child to the observable collection parents[2].Children.Add(children[5]); sut.Count.Should().Be(6); sut.Items.ShouldBeEquivalentTo(parents.SelectMany(p => p.Children.Select(c => new ProjectedNestedChild(p, c)))); //remove a parent and check children have moved source.RemoveKey(1); sut.Count.Should().Be(4); sut.Items.ShouldBeEquivalentTo(parents.Skip(1).SelectMany(p => p.Children.Select(c => new ProjectedNestedChild(p, c)))); //add a parent and check items have been added back in source.AddOrUpdate(parents[0]); sut.Count.Should().Be(6); sut.Items.ShouldBeEquivalentTo(parents.SelectMany(p => p.Children.Select(c => new ProjectedNestedChild(p, c)))); } }
public void ErrorUpdatingStreamIsHandled() { bool completed = false; bool error = false; var feeder = new SourceCache<ErrorInKey, int>(p=>p.Key); var subscriber = feeder.Connect().Finally(() => completed = true) .Subscribe(updates => { Console.WriteLine(); }, ex => error = true); feeder.BatchUpdate(updater => updater.AddOrUpdate(new ErrorInKey())); subscriber.Dispose(); Assert.IsTrue(error, "Error has not been invoked"); Assert.IsTrue(completed, "Completed has not been called"); }
public void ErrorUpdatingStreamIsHandled() { bool completed = false; bool error = false; var feeder = new SourceCache <ErrorInKey, int>(p => p.Key); var subscriber = feeder.Connect().Finally(() => completed = true) .Subscribe(updates => { Console.WriteLine(); }, ex => error = true); feeder.BatchUpdate(updater => updater.AddOrUpdate(new ErrorInKey())); subscriber.Dispose(); Assert.IsTrue(error, "Error has not been invoked"); Assert.IsTrue(completed, "Completed has not been called"); }
public void Initialise() { _source = new SourceCache<Person, string>(p => p.Name); _accumulator = _source.Connect().ForAggregation() .Scan(0, (current, items) => { items.ForEach(x => { if (x.Type == AggregateType.Add) current = current + x.Item.Age; else current = current - x.Item.Age; }); return current; }); }
public void FilterError() { bool completed = false; bool error = false; var feeder = new SourceCache<TransformEntityWithError, int>(e=>e.Key); var subscriber = feeder.Connect() .Filter(x=>true) .Finally(() => completed = true) .Subscribe(updates => { Console.WriteLine(); }, ex => error = true); feeder.BatchUpdate(updater => updater.AddOrUpdate(new TransformEntityWithError(new Entity()))); subscriber.Dispose(); Assert.IsTrue(error, "Error has not been invoked"); Assert.IsTrue(completed, "Completed has not been called"); }
public void SkipInitialDoesNotReturnTheFirstBatchOfData() { bool updateReceived = false; var cache = new SourceCache<Person, string>(p => p.Name); var deferStream = cache.Connect().SkipInitial() .Subscribe(changes => updateReceived = true); Assert.IsFalse(updateReceived, "No update should be received"); cache.AddOrUpdate(new Person("P1", 1)); Assert.IsFalse(updateReceived, "No update should be received for initial batch of changes"); cache.AddOrUpdate(new Person("P2", 2)); Assert.IsTrue(updateReceived, "Replace should be received"); deferStream.Dispose(); }
public void CachePerformance(int n) { /* Tricks to make it fast = 1) use cache.AddRange(Enumerable.Range(0, n)) instead of for (int i = 0; i < n; i++) cache.AddOrUpdate(i); 2) Uncomment Buffer(n/10).FlattenBufferResult() or just use buffer by time functions With both of these the speed can be almost negligable */ var cache = new SourceCache<int, int>(i => i); double calculated = 0; var sw = Stopwatch.StartNew(); var summation = cache.Connect() .StdDev(i => i) .Subscribe(result => calculated = result); //1. this is very slow if there are loads of updates (each updates causes a new summation) for (int i = 1; i < n; i++) cache.AddOrUpdate(i); //2. much faster to to this (whole range is 1 update and 1 calculation): // cache.AddOrUpdate(Enumerable.Range(0,n)); sw.Stop(); summation.Dispose(); cache.Dispose(); Console.WriteLine("Total items: {0}. Value = {1}", n, calculated); Console.WriteLine("Cache: {0} updates took {1} ms {2:F3} ms each. {3}", n, sw.ElapsedMilliseconds, sw.Elapsed.TotalMilliseconds / n, DateTime.Now.ToShortDateString()); }
public void TransformError() { bool completed = false; bool error = false; var feeder = new SourceCache<Entity, int>(e => e.Key); var subscriber = feeder.Connect() .Transform(e => new TransformEntityWithError(e)) .Finally(() => completed = true) .Subscribe(updates => { Console.WriteLine(); }, ex => error = true); feeder.BatchUpdate(updater => updater.AddOrUpdate(Enumerable.Range(0, 10000).Select(_ => new Entity()).ToArray())); feeder.BatchUpdate(updater => updater.AddOrUpdate(new Entity())); subscriber.Dispose(); Assert.IsTrue(error, "Error has not been invoked"); Assert.IsTrue(completed, "Completed has not been called"); }
public LineScroller([NotNull] IObservable<ILineProvider> latest, [NotNull] IObservable<ScrollRequest> scrollRequest) { if (latest == null) throw new ArgumentNullException(nameof(latest)); if (scrollRequest == null) throw new ArgumentNullException(nameof(scrollRequest)); var lines = new SourceCache<Line, LineKey>(l => l.Key); Lines = lines.Connect().IgnoreUpdateWhen((current,previous)=> current.Key==previous.Key).AsObservableCache(); var locker = new object(); scrollRequest = scrollRequest.Synchronize(locker); latest = latest.Synchronize(locker); var aggregator = latest .CombineLatest(scrollRequest, (currentLines, scroll) => new { currentLines, scroll}) .Sample(TimeSpan.FromMilliseconds(50)) .Select(x => { if (x.scroll== ScrollRequest.None || x.scroll.PageSize == 0 || x.currentLines.Count == 0) return new Line[0]; return x.currentLines.ReadLines(x.scroll).ToArray(); }) .RetryWithBackOff<Line[], Exception>((ex, i) => TimeSpan.FromSeconds(1)) .Subscribe(currentPage => { var previous = lines.Items.ToArray(); var added = currentPage.Except(previous, Line.TextStartComparer).ToArray(); var removed = previous.Except(currentPage, Line.TextStartComparer).ToArray(); lines.Edit(innerCache => { if (currentPage.Length == 0) innerCache.Clear(); if (removed.Any()) innerCache.Remove(removed); if (added.Any()) innerCache.AddOrUpdate(added); }); }); _cleanUp = new CompositeDisposable(Lines, lines, aggregator); }
public void DeferUntilLoadedDoesNothingUntilDataHasBeenReceived() { bool updateReceived = false; IChangeSet<Person, string> result = null; var cache = new SourceCache<Person, string>(p => p.Name); var deferStream = cache.Connect().DeferUntilLoaded() .Subscribe(changes => { updateReceived = true; result = changes; }); Assert.IsFalse(updateReceived,"No update should be received"); cache.AddOrUpdate(new Person("Test",1)); Assert.IsTrue(updateReceived,"Replace should be received"); Assert.AreEqual(1,result.Adds); Assert.AreEqual(new Person("Test",1), result.First().Current); deferStream.Dispose(); }
public FileSearch([NotNull] IObservable<FileSegmentCollection> fileSegments, [NotNull] Func<string, bool> predicate, Encoding encoding = null, IScheduler scheduler =null) { if (fileSegments == null) throw new ArgumentNullException(nameof(fileSegments)); if (predicate == null) throw new ArgumentNullException(nameof(predicate)); _predicate = predicate; _scheduler = scheduler ?? Scheduler.Default; var shared = fileSegments.Replay(1).RefCount(); var infoSubscriber = shared.Select(segments => segments.Info) .Take(1) .Subscribe(info => { Info = info; Encoding = encoding ?? info.GetEncoding(); }); //Create a cache of segments which are to be searched var segmentCache = shared.Select(s => s.Segments) .ToObservableChangeSet(s => s.Key) .IgnoreUpdateWhen((current,previous)=>current==previous) .AsObservableCache(); //manually maintained search results and status var searchData= new SourceCache<FileSegmentSearch, FileSegmentKey>(s=>s.Key); SearchResult = searchData.Connect() .Flatten() .Select(change=>change.Current) .Scan((FileSearchResult)null, (previous, current) => previous==null ? new FileSearchResult(current, Info, Encoding) : new FileSearchResult(previous, current, Info, Encoding)) .StartWith(FileSearchResult.None) .Replay(1).RefCount(); //initialise a pending state for all segments var loader = segmentCache.Connect() .Transform(fs => new FileSegmentSearch(fs)) .WhereReasonsAre(ChangeReason.Add) .PopulateInto(searchData); //scan end of file, then tail var tailSearch = segmentCache.WatchValue(FileSegmentKey.Tail) .Scan((FileSegmentSearch) null, (previous, current) => { if (previous == null) { var result = Search(current.Start, current.End); return new FileSegmentSearch(current, result); } else { var result = Search(previous.Segment.End, current.End); return result == null ? previous : new FileSegmentSearch(previous, result); } }) .DistinctUntilChanged() .Publish(); //start tailing var tailSubscriber = tailSearch.Subscribe(tail => searchData.AddOrUpdate(tail)); //load the rest of the file segment by segment, reporting status after each search var headSubscriber = tailSearch.Take(1).WithContinuation(() => { var locker = new object(); return searchData.Connect(fss=>fss.Segment.Type == FileSegmentType.Head ) .Do(head => Debug.WriteLine(head.First().Current)) .WhereReasonsAre(ChangeReason.Add) .SelectMany(changes=>changes.Select(c=>c.Current).OrderByDescending(c=>c.Segment.Index).ToArray()) .ObserveOn(_scheduler) .Synchronize(locker) .Do(head => searchData.AddOrUpdate(new FileSegmentSearch(head.Segment,FileSegmentSearchStatus.Searching) )) .Select(fss => { // Debug.WriteLine($"Running Search For: {fss.Key}"); var result = Search(fss.Segment.Start, fss.Segment.End); return new FileSegmentSearch(fss, result); }); }) .Subscribe(head => searchData.AddOrUpdate(head)); _cleanUp = new CompositeDisposable( segmentCache, loader, tailSubscriber, headSubscriber, tailSearch.Connect(), infoSubscriber); }
public FileSearcher([NotNull] IObservable<FileSegmentCollection> fileSegments, [NotNull] Func<string, bool> predicate, int arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered = 50000, Encoding encoding = null, IScheduler scheduler =null) { if (fileSegments == null) throw new ArgumentNullException(nameof(fileSegments)); if (predicate == null) throw new ArgumentNullException(nameof(predicate)); _predicate = predicate; _arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered = arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered; _scheduler = scheduler ?? Scheduler.Default; var shared = fileSegments.Replay(1).RefCount(); var infoSubscriber = shared.Select(segments => segments.Info) .Take(1) .Subscribe(info => { Info = info; Encoding = encoding ?? info.GetEncoding(); }); //Create a cache of segments which are to be searched var segmentCache = shared.Select(s => s.Segments) .ToObservableChangeSet(s => s.Key) .IgnoreUpdateWhen((current,previous)=>current==previous) .AsObservableCache(); //manually maintained search results and status var searchData= new SourceCache<FileSegmentSearch, FileSegmentKey>(s=>s.Key); SearchResult = searchData.Connect() .Flatten() .Select(change=>change.Current) .Scan((FileSearchResult)null, (previous, current) => previous==null ? new FileSearchResult(current, Info, Encoding, _arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered) : new FileSearchResult(previous, current, Info, Encoding, _arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered)) .StartWith(FileSearchResult.None) .Replay(1).RefCount(); //initialise a pending state for all segments var loader = segmentCache.Connect() .Transform(fs => new FileSegmentSearch(fs)) .WhereReasonsAre(ChangeReason.Add) .PopulateInto(searchData); //scan end of file, then tail var tailSearch = segmentCache.WatchValue(FileSegmentKey.Tail) .Scan((FileSegmentSearch) null, (previous, current) => { if (previous == null) { var result = Search(current.Start, current.End); return new FileSegmentSearch(current, result); } else { var result = Search(previous.Segment.End, current.End); return result == null ? previous : new FileSegmentSearch(previous, result); } }) .DistinctUntilChanged() .Publish(); //start tailing var tailSubscriber = tailSearch.Subscribe(tail => searchData.AddOrUpdate(tail)); //load the rest of the file segment by segment, reporting status after each search var headSubscriber = tailSearch.Take(1).WithContinuation(() => { var locker = new object(); return searchData.Connect(fss=>fss.Segment.Type == FileSegmentType.Head ) .Do(head => Debug.WriteLine(head.First().Current)) .WhereReasonsAre(ChangeReason.Add) .SelectMany(changes=>changes.Select(c=>c.Current).OrderByDescending(c=>c.Segment.Index).ToArray()) .ObserveOn(_scheduler) .Synchronize(locker) .Do(head => searchData.AddOrUpdate(new FileSegmentSearch(head.Segment,FileSegmentSearchStatus.Searching) )) .Select(fileSegmentSearch => { /* This hack imposes a limitation on the number of items returned as memory can be absolutely hammered [I have seen 20MB memory when searching a 1 GB file - obviously not an option] TODO: A proper solution. 1. How about index to file? 2. Allow auto pipe of large files 3. Allow user to have some control here */ var sum = searchData.Items.Sum(fss => fss.Lines.Length); if (sum >= _arbitaryNumberOfMatchesBeforeWeBailOutBecauseMemoryGetsHammered) { return new FileSegmentSearch(fileSegmentSearch, FileSegmentSearchStatus.Complete); } var result = Search(fileSegmentSearch.Segment.Start, fileSegmentSearch.Segment.End); return new FileSegmentSearch(fileSegmentSearch, result); }); }) .Subscribe(head => searchData.AddOrUpdate(head)); _cleanUp = new CompositeDisposable( segmentCache, loader, tailSubscriber, headSubscriber, tailSearch.Connect(), infoSubscriber); }
public SearchInfoCollection(ICombinedSearchMetadataCollection combinedSearchMetadataCollection, ISearchMetadataFactory searchMetadataFactory, IFileWatcher fileWatcher) { _localMetadataCollection = combinedSearchMetadataCollection.Local; _combinedSearchMetadataCollection = combinedSearchMetadataCollection; _searchMetadataFactory = searchMetadataFactory; _fileWatcher = fileWatcher; var exclusionPredicate = combinedSearchMetadataCollection.Combined.Connect() .IncludeUpdateWhen((current, previous) => !SearchMetadata.EffectsFilterComparer.Equals(current, previous)) .Filter(meta=> meta.IsExclusion) .ToCollection() .Select(searchMetadataItems => { Func<string, bool> predicate = null; if (searchMetadataItems.Count == 0) return predicate; var predicates = searchMetadataItems.Select(meta => meta.BuildPredicate()).ToArray(); predicate = str => { return !predicates.Any(item => item(str)); }; return predicate; }).StartWith((Func<string, bool>)null) .Replay(1).RefCount(); All = exclusionPredicate.Select(predicate => { if (predicate==null) return _fileWatcher.Latest.Index(); return _fileWatcher.Latest.Search(predicate); }).Switch().Replay(1).RefCount(); //create a collection with 1 item, which is used to show entire file var systemSearches = new SourceCache<SearchInfo, string>(t => t.SearchText); systemSearches.AddOrUpdate(new SearchInfo("<All>", false, All, SearchType.All)); //create a collection of all possible user filters var userSearches = combinedSearchMetadataCollection.Combined .Connect(meta => meta.Filter) .IgnoreUpdateWhen((current,previous)=> SearchMetadata.EffectsFilterComparer.Equals(current, previous)) .Transform(meta => { var latest = exclusionPredicate .Select(exclpredicate => { Func<string, bool> resultingPredicate; if (exclpredicate == null) { resultingPredicate = meta.BuildPredicate(); } else { var toMatch = meta.BuildPredicate(); resultingPredicate = str=> toMatch(str) && exclpredicate(str); } return _fileWatcher.Latest.Search(resultingPredicate); }) .Switch() .Replay(1).RefCount(); return new SearchInfo(meta.SearchText, meta.IsGlobal, latest, SearchType.User); }); //combine te results into a single collection Searches = systemSearches.Connect() .Or(userSearches) .AsObservableCache(); _cleanUp = new CompositeDisposable(Searches, systemSearches); }