예제 #1
0
        public void CacheShouldEatExceptionsAndMarshalThemToObservable()
        {
            /* This is a bit tricky:
             *
             * 5,2 complete at t=1000 simultaneously
             * 10,0 get queued up, 0 fails immediately (delay() doesn't delay the OnError),
             *    so 10 completes at t=2000
             * The 7 completes at t=3000
             */
            var input = new[] { 5, 2, 10, 0 /*boom!*/, 7 };
            var sched = new TestScheduler();

            Observable.Throw <int>(new Exception("Foo")).Subscribe(x => {
                Console.WriteLine(x);
            }, ex => {
                Console.WriteLine(ex);
            }, () => {
                Console.WriteLine("Completed");
            });

            var delay   = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache <int, int>(x =>
                                                                 (x == 0 ? Observable.Throw <int>(new Exception("Boom!")) : Observable.Return(10 * x)).Delay(delay, sched), 5, 2, null, sched);

            Exception exception = null;
            int       completed = 0;

            input.ToObservable()
            .SelectMany(x => fixture.AsyncGet(x))
            .Subscribe(x => {
                this.Log().InfoFormat("Result = {0}", x);
                completed++;
            }, ex => exception = exception ?? ex);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));
            Assert.Null(exception);
            Assert.Equal(0, completed);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1500)));
            Assert.NotNull(exception);
            Assert.Equal(2, completed);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(7500)));
            Assert.NotNull(exception);
            Assert.Equal(4, completed);
            this.Log().Info(exception);
        }
        public void CacheShouldEatExceptionsAndMarshalThemToObservable()
        {
            /* This is a bit tricky:
             *
             * 5,2 complete at t=1000 simultaneously
             * 10,0 get queued up, 0 fails immediately (delay() doesn't delay the OnError),
             *    so 10 completes at t=2000
             * The 7 completes at t=3000
             */
            var input = new[] { 5, 2, 10, 0/*boom!*/, 7 };
            var sched = new TestScheduler();

            Observable.Throw<int>(new Exception("Foo")).Subscribe(x => {
                Console.WriteLine(x);
            }, ex => {
                Console.WriteLine(ex);
            }, () => {
                Console.WriteLine("Completed");
            });

            var delay = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache<int, int>(x =>
                (x == 0 ? Observable.Throw<int>(new Exception("Boom!")) : Observable.Return(10 * x)).Delay(delay, sched), 5, 2, sched);

            Exception exception = null;
            int completed = 0;
            input.ToObservable()
                .SelectMany(x => (IObservable<int>)fixture.AsyncGet(x))
                .Subscribe(x => {
                    this.Log().InfoFormat("Result = {0}", x);
                    completed++;
                }, ex => exception = exception ?? ex);

            sched.RunTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));
            Assert.IsNull(exception);
            Assert.AreEqual(0, completed);

            sched.RunTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1500)));
            Assert.IsNotNull(exception);
            Assert.AreEqual(2, completed);

            sched.RunTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(7500)));
            Assert.IsNotNull(exception);
            Assert.AreEqual(4, completed);
            this.Log().Info(exception);
        }
        public void EnsureCacheDoesNotBlockOnRelease()
        {
            var input = new[] { 1, 2 };
            var sched = new TestScheduler();
            var releaseCount = 0;

            var delay = TimeSpan.FromSeconds(1.0);
            //set the cache to only hold one value, which forces an eviction of an inflight request from the inner cache
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x * 5).Delay(delay, sched), 1, 2, x=>releaseCount+=1, sched);

            int result = 0;
            input.ToObservable(sched).SelectMany<int, int>(x => (IObservable<int>)fixture.AsyncGet(x)).Subscribe(x => result += x);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1000)));
            Assert.Equal(0,releaseCount);
            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1001)));
            Assert.Equal(1, releaseCount);
        }
        public void AsyncGetTest()
        {
            var input = new[] { 1, 1, 1, 1, 1 };
            var sched = new TestScheduler();

            var delay = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x*5).Delay(delay, sched), 5, 2, null, sched);

            int result = 0;
            input.ToObservable(sched).SelectMany<int, int>(x => (IObservable<int>)fixture.AsyncGet(x)).Subscribe(x => result += x);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));
            Assert.Equal(0, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1200)));
            Assert.Equal(25, result);

            sched.Start();
            Assert.Equal(25, result);
        }
        public void AsyncGetTest()
        {
            var input = new[] { 1, 1, 1, 1, 1 };
            var sched = new TestScheduler();

            var delay = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x*5).Delay(delay, sched), 5, 2, null, sched);

            int result = 0;
            input.ToObservable(sched).SelectMany<int, int>(x => (IObservable<int>)fixture.AsyncGet(x)).Subscribe(x => result += x);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));
            Assert.Equal(0, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1200)));
            Assert.Equal(25, result);

            sched.Start();
            Assert.Equal(25, result);
        }
        public void EnsureCacheDoesNotBlockOnRelease()
        {
            var input = new[] { 1, 2 };
            var sched = new TestScheduler();
            var releaseCount = 0;

            var delay = TimeSpan.FromSeconds(1.0);
            //set the cache to only hold one value, which forces an eviction of an inflight request from the inner cache
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x * 5).Delay(delay, sched), 1, 2, x=>releaseCount+=1, sched);


            int result = 0;
            input.ToObservable(sched).SelectMany<int, int>(x => (IObservable<int>)fixture.AsyncGet(x)).Subscribe(x => result += x);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1000)));
            Assert.Equal(0,releaseCount);
            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1001)));
            Assert.Equal(1, releaseCount);
           

        }
        public void CacheShouldQueueOnceWeHitOurConcurrentLimit()
        {
            var input = new[] { 1, 2, 3, 4, 1 };
            var sched = new TestScheduler();

            var delay = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x*5).Delay(delay, sched), 5, 2, null, sched);

            int result = 0;
            input.ToObservable(sched).SelectMany<int, int>(x => (IObservable<int>)fixture.AsyncGet(x)).Subscribe(x => result += x);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));
            Assert.Equal(0, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1500)));
            Assert.Equal(1*5 + 2*5 + 1*5, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(2500)));
            Assert.Equal(1*5 + 2*5 + 3*5 + 4*5 + 1*5, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(5000)));
            Assert.Equal(1*5 + 2*5 + 3*5 + 4*5 + 1*5, result);
        }
        public void CacheShouldQueueOnceWeHitOurConcurrentLimit()
        {
            var input = new[] { 1, 2, 3, 4, 1 };
            var sched = new TestScheduler();

            var delay = TimeSpan.FromSeconds(1.0);
            var fixture = new ObservableAsyncMRUCache<int, int>(x => Observable.Return(x*5).Delay(delay, sched), 5, 2, null, sched);

            int result = 0;
            input.ToObservable(sched).SelectMany<int, int>(x => (IObservable<int>)fixture.AsyncGet(x)).Subscribe(x => result += x);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(500)));
            Assert.Equal(0, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(1500)));
            Assert.Equal(1*5 + 2*5 + 1*5, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(2500)));
            Assert.Equal(1*5 + 2*5 + 3*5 + 4*5 + 1*5, result);

            sched.AdvanceTo(sched.FromTimeSpan(TimeSpan.FromMilliseconds(5000)));
            Assert.Equal(1*5 + 2*5 + 3*5 + 4*5 + 1*5, result);
        }