public void GetTest()
        {
            (new TestScheduler()).With(sched => {
                var input = new[] {1, 1, 1, 1, 1};
                var delay = TimeSpan.FromSeconds(1.0);
                var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x*5).Delay(delay, sched), 5, 2);

                int result = 0;
                var t = new Thread(() => {
                    // We use this side thread because there's no way to tell
                    // the cache to Run the Test Scheduler. So the side thread
                    // will do the waiting while the main thread advances the
                    // Scheduler
                    foreach (int x in input.Select(x => fixture.Get(x))) { result += x; }
                });
                t.Start();

                sched.RunTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));

                // NB: The Thread.Sleep is to let our other thread catch up
                Thread.Sleep(100);
                Assert.Equal(0, result);

                sched.RunTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1200)));

                Thread.Sleep(100);
                Assert.Equal(25, result);

                this.Log().Info("Running to end");
                sched.Run();
                t.Join();
                Assert.Equal(25, result);
            });
        }
        /// <summary>
        /// RegisterMemoizedObservable is similar to RegisterAsyncObservable, but
        /// caches its results so that subsequent Execute calls with the same
        /// CommandParameter will not need to be run in the background.
        /// </summary>
        /// <param name="calculationFunc">The function that performs the
        /// expensive or asyncronous calculation and returns the result.
        ///
        /// Note that this function *must* return an equivalently-same result given a
        /// specific input - because the function is being memoized, if the
        /// calculationFunc depends on other varables other than the input
        /// value, the results will be unpredictable.
        /// </param>
        /// <param name="maxSize">The number of items to cache. When this limit
        /// is reached, not recently used items will be discarded.</param>
        /// <param name="onRelease">This optional method is called when an item
        /// is evicted from the cache - this can be used to clean up / manage an
        /// on-disk cache; the calculationFunc can download a file and save it
        /// to a temporary folder, and the onRelease action will delete the
        /// file.</param>
        /// <param name="sched">The scheduler to run asynchronous operations on
        /// - defaults to TaskpoolScheduler</param>
        /// <returns>An Observable representing the items returned by the
        /// calculation result. Note that with this method it is possible with a
        /// calculationFunc to return multiple items per invocation of Execute.</returns>
        public IObservable <TResult> RegisterMemoizedObservable <TResult>(
            Func <object, IObservable <TResult> > calculationFunc,
            int maxSize = 50,
            Action <TResult> onRelease = null,
            IScheduler sched           = null)
        {
            Contract.Requires(calculationFunc != null);
            Contract.Requires(maxSize > 0);

            sched = sched ?? RxApp.TaskpoolScheduler;
            var cache = new ObservableAsyncMRUCache <object, TResult>(
                calculationFunc, maxSize, _maximumConcurrent, onRelease, sched);

            return(this.RegisterAsyncObservable(cache.AsyncGet));
        }
Esempio n. 3
0
        public void CacheShouldEatExceptionsAndMarshalThemToObservable()
        {
            /* This is a bit tricky:
             *
             * 5,2 complete at t=1000 simultaneously
             * 10,0 get queued up, 0 fails immediately (delay() doesn't delay the OnError),
             *    so 10 completes at t=2000
             * The 7 completes at t=3000
             */
            var input = new[] { 5, 2, 10, 0 /*boom!*/, 7 };
            var sched = new TestScheduler();

            Observable.Throw <int>(new Exception("Foo")).Subscribe(x => {
                Console.WriteLine(x);
            }, ex => {
                Console.WriteLine(ex);
            }, () => {
                Console.WriteLine("Completed");
            });

            var delay   = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache <int, int>(x =>
                                                                 (x == 0 ? Observable.Throw <int>(new Exception("Boom!")) : Observable.Return(10 * x)).Delay(delay, sched), 5, 2, null, sched);

            Exception exception = null;
            int       completed = 0;

            input.ToObservable()
            .SelectMany(x => fixture.AsyncGet(x))
            .Subscribe(x => {
                this.Log().InfoFormat("Result = {0}", x);
                completed++;
            }, ex => exception = exception ?? ex);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));
            Assert.Null(exception);
            Assert.Equal(0, completed);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1500)));
            Assert.NotNull(exception);
            Assert.Equal(2, completed);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(7500)));
            Assert.NotNull(exception);
            Assert.Equal(4, completed);
            this.Log().Info(exception);
        }
        public void CacheShouldEatExceptionsAndMarshalThemToObservable()
        {
            /* This is a bit tricky:
             *
             * 5,2 complete at t=1000 simultaneously
             * 10,0 get queued up, 0 fails immediately (delay() doesn't delay the OnError),
             *    so 10 completes at t=2000
             * The 7 completes at t=3000
             */
            var input = new[] { 5, 2, 10, 0/*boom!*/, 7 };
            var sched = new TestScheduler();

            Observable.Throw<int>(new Exception("Foo")).Subscribe(x => {
                Console.WriteLine(x);
            }, ex => {
                Console.WriteLine(ex);
            }, () => {
                Console.WriteLine("Completed");
            });

            var delay = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache<int, int>(x =>
                (x == 0 ? Observable.Throw<int>(new Exception("Boom!")) : Observable.Return(10 * x)).Delay(delay, sched), 5, 2, sched);

            Exception exception = null;
            int completed = 0;
            input.ToObservable()
                .SelectMany(x => (IObservable<int>)fixture.AsyncGet(x))
                .Subscribe(x => {
                    this.Log().InfoFormat("Result = {0}", x);
                    completed++;
                }, ex => exception = exception ?? ex);

            sched.RunTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));
            Assert.IsNull(exception);
            Assert.AreEqual(0, completed);

            sched.RunTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1500)));
            Assert.IsNotNull(exception);
            Assert.AreEqual(2, completed);

            sched.RunTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(7500)));
            Assert.IsNotNull(exception);
            Assert.AreEqual(4, completed);
            this.Log().Info(exception);
        }
        public void EnsureCacheDoesNotBlockOnRelease()
        {
            var input = new[] { 1, 2 };
            var sched = new TestScheduler();
            var releaseCount = 0;

            var delay = TimeSpan.FromSeconds(1.0);
            //set the cache to only hold one value, which forces an eviction of an inflight request from the inner cache
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x * 5).Delay(delay, sched), 1, 2, x=>releaseCount+=1, sched);

            int result = 0;
            input.ToObservable(sched).SelectMany<int, int>(x => (IObservable<int>)fixture.AsyncGet(x)).Subscribe(x => result += x);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1000)));
            Assert.Equal(0,releaseCount);
            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1001)));
            Assert.Equal(1, releaseCount);
        }
        public void AsyncGetTest()
        {
            var input = new[] { 1, 1, 1, 1, 1 };
            var sched = new TestScheduler();

            var delay = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x*5).Delay(delay, sched), 5, 2, null, sched);

            int result = 0;
            input.ToObservable(sched).SelectMany<int, int>(x => (IObservable<int>)fixture.AsyncGet(x)).Subscribe(x => result += x);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));
            Assert.Equal(0, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1200)));
            Assert.Equal(25, result);

            sched.Start();
            Assert.Equal(25, result);
        }
        public void AsyncGetTest()
        {
            var input = new[] { 1, 1, 1, 1, 1 };
            var sched = new TestScheduler();

            var delay = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x*5).Delay(delay, sched), 5, 2, null, sched);

            int result = 0;
            input.ToObservable(sched).SelectMany<int, int>(x => (IObservable<int>)fixture.AsyncGet(x)).Subscribe(x => result += x);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));
            Assert.Equal(0, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1200)));
            Assert.Equal(25, result);

            sched.Start();
            Assert.Equal(25, result);
        }
Esempio n. 8
0
        public void CachedValuesTest()
        {
            var input = new[] { 1, 2, 1, 3, 1 };
            var sched = new TestScheduler();

            var delay   = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache <int, int>(x => Observable.Return(x * 5).Delay(delay, sched), 2, 2);

            var results = input.ToObservable().SelectMany(fixture.AsyncGet).CreateCollection();

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));

            Assert.Equal(0, fixture.CachedValues().Count());

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1500)));

            var output = fixture.CachedValues().ToArray();

            Assert.IsTrue(output.Length == 2);
            Assert.Equal(input.Length, results.Count);
        }
        public void EnsureCacheDoesNotBlockOnRelease()
        {
            var input = new[] { 1, 2 };
            var sched = new TestScheduler();
            var releaseCount = 0;

            var delay = TimeSpan.FromSeconds(1.0);
            //set the cache to only hold one value, which forces an eviction of an inflight request from the inner cache
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x * 5).Delay(delay, sched), 1, 2, x=>releaseCount+=1, sched);


            int result = 0;
            input.ToObservable(sched).SelectMany<int, int>(x => (IObservable<int>)fixture.AsyncGet(x)).Subscribe(x => result += x);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1000)));
            Assert.Equal(0,releaseCount);
            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1001)));
            Assert.Equal(1, releaseCount);
           

        }
Esempio n. 10
0
        public void GetTest()
        {
            (new TestScheduler()).With(sched => {
                var input   = new[] { 1, 1, 1, 1, 1 };
                var delay   = TimeSpan.FromSeconds(1.0);
                var fixture = new ObservableAsyncMRUCache <int, int>(x => Observable.Return(x * 5).Delay(delay, sched), 5, 2);

                int result = 0;
                var t      = new Thread(() => {
                    // We use this side thread because there's no way to tell
                    // the cache to Run the Test Scheduler. So the side thread
                    // will do the waiting while the main thread advances the
                    // Scheduler
                    foreach (int x in input.Select(x => fixture.Get(x)))
                    {
                        this.Log().DebugFormat("Adding {0} to result", x);
                        result += x;
                    }
                });
                t.Start();

                sched.Start();
                sched.RunToMilliseconds(500);

                // NB: The Thread.Sleep is to let our other thread catch up
                Thread.Sleep(100);
                Assert.Equal(0, result);

                sched.RunToMilliseconds(1200);

                Thread.Sleep(100);
                Assert.Equal(25, result);

                this.Log().Info("Running to end");
                sched.Start();
                t.Join();
                Assert.Equal(25, result);
            });
        }
        public void CacheShouldQueueOnceWeHitOurConcurrentLimit()
        {
            var input = new[] { 1, 2, 3, 4, 1 };
            var sched = new TestScheduler();

            var delay = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x*5).Delay(delay, sched), 5, 2, null, sched);

            int result = 0;
            input.ToObservable(sched).SelectMany<int, int>(x => (IObservable<int>)fixture.AsyncGet(x)).Subscribe(x => result += x);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));
            Assert.Equal(0, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1500)));
            Assert.Equal(1*5 + 2*5 + 1*5, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(2500)));
            Assert.Equal(1*5 + 2*5 + 3*5 + 4*5 + 1*5, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(5000)));
            Assert.Equal(1*5 + 2*5 + 3*5 + 4*5 + 1*5, result);
        }
        public void CacheShouldQueueOnceWeHitOurConcurrentLimit()
        {
            var input = new[] { 1, 2, 3, 4, 1 };
            var sched = new TestScheduler();

            var delay = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x*5).Delay(delay, sched), 5, 2, null, sched);

            int result = 0;
            input.ToObservable(sched).SelectMany<int, int>(x => (IObservable<int>)fixture.AsyncGet(x)).Subscribe(x => result += x);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));
            Assert.Equal(0, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1500)));
            Assert.Equal(1*5 + 2*5 + 1*5, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(2500)));
            Assert.Equal(1*5 + 2*5 + 3*5 + 4*5 + 1*5, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(5000)));
            Assert.Equal(1*5 + 2*5 + 3*5 + 4*5 + 1*5, result);
        }
        public void CachedValuesTest()
        {
            var input = new[] { 1, 2, 1, 3, 1 };
            var sched = new TestScheduler();

            var delay = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x*5).Delay(delay, sched), 2, 2);

            var results = input.ToObservable().SelectMany(fixture.AsyncGet).CreateCollection();
            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));

            Assert.Equal(0, fixture.CachedValues().Count());

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1500)));

            var output = fixture.CachedValues().ToArray();
            Assert.IsTrue(output.Length == 2);
            Assert.Equal(input.Length, results.Count);
        }
        public void GetTest()
        {
            var input = new[] {1, 1, 1, 1, 1};
            var sched = new TestScheduler();
            ObservableAsyncMRUCache<int, int> fixture;

            var delay = TimeSpan.FromSeconds(1.0);
            fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x*5).Delay(delay, sched), 5, 2);

            int result = 0;
            var t = new Thread(() => {
                foreach (int x in input.Select(x => fixture.Get(x))) {
                    result += x;
                }
            });
            t.Start();

            sched.RunTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));

            Thread.Sleep(100);
            Assert.AreEqual(0, result);

            sched.RunTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1200)));

            Thread.Sleep(100);
            Assert.AreEqual(25, result);

            this.Log().Info("Running to end");
            sched.Run();
            t.Join();
            Assert.AreEqual(25, result);
        }