public async Task Should_handle_faulted_tasks()
        {
            var didThrow = false;
            UnhandledExceptionEventHandler exHandler = (s, e) =>
            {
                if (e.ExceptionObject is LazyMemoryCacheSpecException)
                    didThrow = true;
            };

            AppDomain.CurrentDomain.UnhandledException += exHandler;
            using (var cache = new LazyMemoryCache<Uri, Data>("endpoints", async key => { throw new LazyMemoryCacheSpecException(); },
                x => x.SlidingWindow(TimeSpan.FromSeconds(5))))
            {
                try
                {
                    await (await cache.Get(new Uri("loopback://localhost"))).Value;
                }
                catch (LazyMemoryCacheSpecException e)
                {
                }

                await cache.Get(new Uri("loopback://localhost"));
                await Task.Delay(5000);
            }
            AppDomain.CurrentDomain.UnhandledException -= exHandler;
            Assert.That(!didThrow);
        }
        public void ResetWorksCorrectly()
        {
            var factoryCallCount = 0;
            var cacheOptions     = new LazyMemoryCacheOptions(TimeSpan.FromMinutes(1), Guid.NewGuid().ToString(), new object())
            {
                Clock = _systemClockMock.Object
            };
            var lazyMemoryCache = new LazyMemoryCache <int>(
                () =>
            {
                Interlocked.Increment(ref factoryCallCount);

                return(0);
            },
                _memoryCache, cacheOptions);

            Assert.Equal(0, factoryCallCount);

            // Access the value for the first time:
            var value = lazyMemoryCache.Value;

            Assert.Equal(1, factoryCallCount);

            lazyMemoryCache.Reset();

            // Access the value again - the cache has been reset so the value factory method should get called again:
            value = lazyMemoryCache.Value;

            Assert.Equal(2, factoryCallCount);
        }
        public void AsyncCallsWorkCorrectly()
        {
            var factoryCallCount = 0;
            var cacheOptions     = new LazyMemoryCacheOptions(TimeSpan.FromMinutes(1), Guid.NewGuid().ToString(), new object())
            {
                Clock = _systemClockMock.Object
            };
            var lazyMemoryCache = new LazyMemoryCache <int>(
                () =>
            {
                Interlocked.Increment(ref factoryCallCount);

                return(0);
            },
                _memoryCache, cacheOptions);

            Assert.Equal(0, factoryCallCount);

            var task1 = new Task(() => { var value = lazyMemoryCache.Value; });
            var task2 = new Task(() => { var value = lazyMemoryCache.Value; });
            var task3 = new Task(() => { var value = lazyMemoryCache.Value; });

            task1.Start();
            task2.Start();
            task3.Start();

            Task.WaitAll(task1, task2, task3);

            Assert.Equal(1, factoryCallCount);
        }
        public void MultipleInstancesWorkCorrectly()
        {
            var factoryCallCount = 0;
            var cacheOptions     = new LazyMemoryCacheOptions(TimeSpan.FromMinutes(1), Guid.NewGuid().ToString(), new object())
            {
                Clock = _systemClockMock.Object
            };
            var lazyMemoryCache1 = new LazyMemoryCache <int>(
                () =>
            {
                Interlocked.Increment(ref factoryCallCount);

                return(0);
            },
                _memoryCache, cacheOptions);
            var lazyMemoryCache2 = new LazyMemoryCache <int>(
                () =>
            {
                Interlocked.Increment(ref factoryCallCount);

                return(0);
            },
                _memoryCache, cacheOptions);

            Assert.Equal(0, factoryCallCount);

            var value = lazyMemoryCache1.Value;

            Assert.Equal(1, factoryCallCount);

            value = lazyMemoryCache2.Value;

            Assert.Equal(1, factoryCallCount);
        }
        public async Task Should_handle_faulted_tasks()
        {
            var didThrow = false;
            UnhandledExceptionEventHandler exHandler = (s, e) =>
            {
                if (e.ExceptionObject is LazyMemoryCacheSpecException)
                {
                    didThrow = true;
                }
            };

            AppDomain.CurrentDomain.UnhandledException += exHandler;
            using (var cache = new LazyMemoryCache <Uri, Data>("endpoints", async key => { throw new LazyMemoryCacheSpecException(); },
                                                               x => x.SlidingWindow(TimeSpan.FromSeconds(5))))
            {
                try
                {
                    await(await cache.Get(new Uri("loopback://localhost"))).Value;
                }
                catch (LazyMemoryCacheSpecException e)
                {
                }

                await cache.Get(new Uri("loopback://localhost"));

                await Task.Delay(5000);
            }
            AppDomain.CurrentDomain.UnhandledException -= exHandler;
            Assert.That(!didThrow);
        }
Example #6
0
        public static LazyMemoryCache GetInstance()
        {
            if (_instance == null)
            {
                _instance = new LazyMemoryCache();
            }

            return(_instance);
        }
Example #7
0
        public SendEndpointCache(ISendEndpointProvider sendEndpointProvider, Func <Uri, TimeSpan> cacheDurationProvider = null)
        {
            _sendEndpointProvider = sendEndpointProvider;

            _cacheDurationProvider = cacheDurationProvider ?? DefaultCacheDurationProvider;

            var cacheId = NewId.NextGuid().ToString();

            _cache = new LazyMemoryCache <Uri, ISendEndpoint>(cacheId, GetSendEndpointFromProvider, GetEndpointCachePolicy, FormatAddressKey,
                                                              OnCachedEndpointRemoved);
        }
Example #8
0
        public RabbitMqPublishEndpointProvider(IRabbitMqHost host, IMessageSerializer serializer, Uri sourceAddress, IPublishPipe publishPipe)
        {
            _host          = host;
            _serializer    = serializer;
            _sourceAddress = sourceAddress;
            _publishPipe   = publishPipe;

            _publishObservable = new PublishObservable();

            var cacheId = NewId.NextGuid().ToString();

            _cache = new LazyMemoryCache <Type, ISendEndpoint>(cacheId, CreateSendEndpoint, GetEndpointCachePolicy, FormatAddressKey,
                                                               OnCachedEndpointRemoved);
        }
        public void ValueIsCorrect()
        {
            var cacheOptions = new LazyMemoryCacheOptions(TimeSpan.FromMinutes(1), Guid.NewGuid().ToString(), new object())
            {
                Clock = _systemClockMock.Object
            };
            var lazyMemoryCache = new LazyMemoryCache <string>(() =>
            {
                return("Value");
            },
                                                               _memoryCache, cacheOptions);

            Assert.Equal("Value", lazyMemoryCache.Value);
        }
        public void OnBeforeTest()
        {
            if (IntPtr.Size == 4)
            {
                Console.ForegroundColor = ConsoleColor.Red;
                Console.WriteLine("Warning: the application is 32-bit which may cause OutOfMemoryException due to 2GiB limit.");
                Console.ResetColor();
            }

            lazyMatrixCache = new LazyMemoryCache <int, double[, ]>(0.9f, false);

            Console.WriteLine("Filling lazy cache (with constructors)...");
            for (int i = 0; i < 50; i++)
            {
                lazyMatrixCache.AddOrUpdate(i, (key) => createBigMatrix(key));
            }
        }
        public void OnBeforeTest()
        {
            if (IntPtr.Size == 4)
            {
                Console.ForegroundColor = ConsoleColor.Red;
                Console.WriteLine("Warning: the application is 32-bit which may cause OutOfMemoryException due to 2GiB limit.");
                Console.ResetColor();
            }

            lazyMatrixCache = new LazyMemoryCache<int, double[,]>(0.9f, false);

            Console.WriteLine("Filling lazy cache (with constructors)...");
            for (int i = 0; i < 50; i++)
            {
                lazyMatrixCache.AddOrUpdate(i, (key) => createBigMatrix(key));
            }
        }
        public void ValueFactoryIsOnlyCalledWhenCacheExpires()
        {
            var factoryCallCount = 0;
            var cacheOptions     = new LazyMemoryCacheOptions(TimeSpan.FromMinutes(1), Guid.NewGuid().ToString(), new object())
            {
                Clock = _systemClockMock.Object
            };
            var lazyMemoryCache = new LazyMemoryCache <int>(
                () =>
            {
                Interlocked.Increment(ref factoryCallCount);

                return(0);
            },
                _memoryCache, cacheOptions);

            Assert.Equal(0, factoryCallCount);

            // Access the value for the first time:
            var value = lazyMemoryCache.Value;

            Assert.Equal(1, factoryCallCount);

            // Access the value again - this time we should get the cached value:
            value = lazyMemoryCache.Value;

            Assert.Equal(1, factoryCallCount);

            // Fast-forward the time by 59 seconds:
            _systemClockMock.Setup(c => c.UtcNow).Returns(new DateTime(2000, 1, 1, 0, 0, 59));

            // Access the value again - we should get the cached value again:
            value = lazyMemoryCache.Value;

            Assert.Equal(1, factoryCallCount);

            // Fast-forward the time by 2 more seconds (1 minute and 1 second since the test started):
            _systemClockMock.Setup(c => c.UtcNow).Returns(new DateTime(2000, 1, 1, 0, 1, 1));

            // Access the value again - this time the cache should have expired so the value factory method should get called again:
            value = lazyMemoryCache.Value;

            Assert.Equal(2, factoryCallCount);
        }
Example #13
0
        static void populateCache(LazyMemoryCache <int, Bgr <byte> [, ]> cache, int elementCount)
        {
            Console.WriteLine("Filling lazy cache (with constructors)...");
            //******************* adding elements *********************************************************
            for (int key = 0; key < elementCount; key++)
            {
                cache.AddOrUpdate(key, () =>
                {
                    //simulate getting image from a disc (slow operation)
                    var img = new Bgr <byte> [480, 640];
                    img.SetValue <Bgr <byte> >(new Bgr <byte>((byte)key, 0, 0));
                    Thread.Sleep(60);

                    return(img);
                },
                                  //we do not have destructor
                                  (img) => { });
            }
        }
        static void populateCache(LazyMemoryCache<int, Bgr<byte>[,]> cache, int elementCount)
        {
            Console.WriteLine("Filling lazy cache (with constructors)...");
            //******************* adding elements *********************************************************
            for (int key = 0; key < elementCount; key++)
            {
                cache.AddOrUpdate(key, () =>
                {
                    //simulate getting image from a disc (slow operation)
                    var img = new Bgr<byte>[480, 640];
                    img.SetValue<Bgr<byte>>(new Bgr<byte>((byte)key, 0, 0));
                    Thread.Sleep(60);

                    return img;
                },
                    //we do not have destructor
                (img) => { });
            }
        }
        public async Task Should_store_a_cached_item()
        {
            using (var cache = new LazyMemoryCache<Uri, Data>("endpoints", key => Task.FromResult(new Data {Value = $"The Key: {key}"}),
                x => x.SlidingWindow(TimeSpan.FromSeconds(5))))
            {
                var endpoint = await (await cache.Get(new Uri("loopback://localhost"))).Value;
                Console.WriteLine("Endpoint: {0}", endpoint.Created);

                await Task.Delay(TimeSpan.FromSeconds(2));

                endpoint = await (await cache.Get(new Uri("loopback://localhost"))).Value;
                Console.WriteLine("Endpoint: {0}", endpoint.Created);

                await Task.Delay(TimeSpan.FromSeconds(10));

                endpoint = await (await cache.Get(new Uri("loopback://localhost"))).Value;
                Console.WriteLine("Endpoint: {0}", endpoint.Created);
            }
        }
        public async Task Should_store_a_cached_item()
        {
            using (var cache = new LazyMemoryCache <Uri, Data>("endpoints", key => Task.FromResult(new Data {
                Value = $"The Key: {key}"
            }),
                                                               x => x.SlidingWindow(TimeSpan.FromSeconds(5))))
            {
                var endpoint = await(await cache.Get(new Uri("loopback://localhost"))).Value;
                Console.WriteLine("Endpoint: {0}", endpoint.Created);

                await Task.Delay(TimeSpan.FromSeconds(2));

                endpoint = await(await cache.Get(new Uri("loopback://localhost"))).Value;
                Console.WriteLine("Endpoint: {0}", endpoint.Created);

                await Task.Delay(TimeSpan.FromSeconds(10));

                endpoint = await(await cache.Get(new Uri("loopback://localhost"))).Value;
                Console.WriteLine("Endpoint: {0}", endpoint.Created);
            }
        }
Example #17
0
        /// <summary>
        /// Creates and tests lazy memory cache where elements are constructed on demand and evicted based on LRU strategy only if the RAM usage above specified limits.
        /// </summary>
        static void testLazyMemCache()
        {
            //********************* construction *********************************************************
            var memCache = new LazyMemoryCache <int, Bgr <byte> [, ]>
                           (
                //should we start evicting ?
                isCacheReachedCapacity,
                //get element size
                (img) => (ulong)(img.LongLength * img.ColorInfo().Size),
                //should we collect after each eviction ?
                forceCollectionOnRemoval: false
                           );

            const int MAX_KEY = 100;

            //******************* adding elements *********************************************************
            populateCache(memCache, MAX_KEY + 1);

            //******************* accessing elements (run Task Manager to see memory allocation!) **************
            Console.WriteLine("Accessing elements (run Task Manager to see memory allocation!):");
            Random rand = new Random();

            while (!Console.KeyAvailable)
            {
                var key = rand.Next(0, MAX_KEY + 1);
                ILazy <Bgr <byte> [, ]> lazyVal;
                memCache.TryGetValue(key, out lazyVal);

                Console.ForegroundColor = lazyVal.IsValueCreated ? ConsoleColor.Green: ConsoleColor.Red;
                Bgr <byte>[,] val       = null;
                var elapsed = Diagnostics.MeasureTime(() =>
                {
                    val = lazyVal.Value;
                });

                Console.Write("\r Accessing {0}. Access time: {1} ms.", key, elapsed);
            }
        }
Example #18
0
 LazyMemoryCache <Type, ISendEndpoint> .ICacheExpiration GetEndpointCachePolicy(LazyMemoryCache <Type, ISendEndpoint> .ICacheExpirationSelector selector)
 {
     return(selector.SlidingWindow(TimeSpan.FromDays(1)));
 }
        /// <summary>
        /// Creates and tests lazy memory cache where elements are constructed on demand and evicted based on LRU strategy only if the RAM usage above specified limits.
        /// </summary>
        static void testLazyMemCache()
        {
            ComputerInfo computerInfo = new ComputerInfo(); //reference to Microsoft.VisualBasic assembly.

            //construction
            var memCache = new LazyMemoryCache<int, Image<Gray, int>>
              (
               (currentSize) =>
                {
                    var occupied = computerInfo.TotalPhysicalMemory - computerInfo.AvailablePhysicalMemory;
                    var occupiedPercentage = (float)occupied / computerInfo.TotalPhysicalMemory;

                    //WATCH OUT! You can get OutOfMemoryException although the RAM is not full:
                       //when creating fields with zeros I assume there are some OS optimizations like block sharing
                       //if you compile this as 32-bit (when it consumes 2 GiB it will throw OutOfMemoryException)
                    if (occupiedPercentage > 0.45)
                       return true;

                    return false;
                },

                (img) => (ulong)(img.Stride * img.Height),

                //set false to not to call GC.Collect() when an item is evicted => may fill more RAM than it has been set, but shortens delays caused by GC
                forceCollectionOnRemoval: true
               );

            Console.WriteLine("Filling lazy cache (with constructors)...");
            const int MAX_KEY = 100;
            //adding elements (you can also use stream as IEnumerable to populate cache)
            for (int key = 0; key <= MAX_KEY; key++)
            {
                memCache.AddOrUpdate(key, () =>
                {
                    //simulate getting image from a disc (slow operation)
                    var img = new Image<Gray, int>(640, 480);
                    img.SetValue(key);
                    Thread.Sleep(60);

                    return img;
                },
                (img) => { img.Dispose(); });
            }

            //accessing elements (run Task Manager to see memory allocation!)
            Console.WriteLine("Accessing elements (run Task Manager to see memory allocation!):");
            Random rand = new Random();
            while (!Console.KeyAvailable)
            {
                var key  = rand.Next(0, MAX_KEY + 1);
                ILazy<Image<Gray, int>> lazyVal;
                memCache.TryGetValue(key, out lazyVal);

                Console.ForegroundColor = lazyVal.IsValueCreated ? ConsoleColor.Green: ConsoleColor.Red;
                Image<Gray, int> val = null;
                var elapsed = Diagnostics.MeasureTime(() =>
                {
                    val = lazyVal.Value;
                });

                Console.Write("\r Accessing {0}. Access time: {1} ms.", key, elapsed);
            }

            //accessing elements (run Task Manager to see memory allocation!)
            /*foreach (var item in memCache)
            {
                 var lazyVal = item.Value;

                Console.WriteLine(lazyVal.Value);
                Console.WriteLine(memCache.HardFaults);
            }*/
        }
        /// <summary>
        /// Creates and tests lazy memory cache where elements are constructed on demand and evicted based on LRU strategy only if the RAM usage above specified limits.
        /// </summary>
        static void testLazyMemCache()
        {
            ComputerInfo computerInfo = new ComputerInfo(); //reference to Microsoft.VisualBasic assembly.

            //construction
            var memCache = new LazyMemoryCache <int, Image <Gray, int> >
                           (
                (currentSize) =>
            {
                var occupied           = computerInfo.TotalPhysicalMemory - computerInfo.AvailablePhysicalMemory;
                var occupiedPercentage = (float)occupied / computerInfo.TotalPhysicalMemory;

                //WATCH OUT! You can get OutOfMemoryException although the RAM is not full:
                //when creating fields with zeros I assume there are some OS optimizations like block sharing
                //if you compile this as 32-bit (when it consumes 2 GiB it will throw OutOfMemoryException)
                if (occupiedPercentage > 0.45)
                {
                    return(true);
                }

                return(false);
            },

                (img) => (ulong)(img.Stride * img.Height),

                //set false to not to call GC.Collect() when an item is evicted => may fill more RAM than it has been set, but shortens delays caused by GC
                forceCollectionOnRemoval: true
                           );

            Console.WriteLine("Filling lazy cache (with constructors)...");
            const int MAX_KEY = 100;

            //adding elements (you can also use stream as IEnumerable to populate cache)
            for (int key = 0; key <= MAX_KEY; key++)
            {
                memCache.AddOrUpdate(key, () =>
                {
                    //simulate getting image from a disc (slow operation)
                    var img = new Image <Gray, int>(640, 480);
                    img.SetValue(key);
                    Thread.Sleep(60);

                    return(img);
                },
                                     (img) => { img.Dispose(); });
            }

            //accessing elements (run Task Manager to see memory allocation!)
            Console.WriteLine("Accessing elements (run Task Manager to see memory allocation!):");
            Random rand = new Random();

            while (!Console.KeyAvailable)
            {
                var key = rand.Next(0, MAX_KEY + 1);
                ILazy <Image <Gray, int> > lazyVal;
                memCache.TryGetValue(key, out lazyVal);

                Console.ForegroundColor = lazyVal.IsValueCreated ? ConsoleColor.Green: ConsoleColor.Red;
                Image <Gray, int> val = null;
                var elapsed           = Diagnostics.MeasureTime(() =>
                {
                    val = lazyVal.Value;
                });

                Console.Write("\r Accessing {0}. Access time: {1} ms.", key, elapsed);
            }

            //accessing elements (run Task Manager to see memory allocation!)

            /*foreach (var item in memCache)
             * {
             *   var lazyVal = item.Value;
             *
             *  Console.WriteLine(lazyVal.Value);
             *  Console.WriteLine(memCache.HardFaults);
             * }*/
        }
Example #21
0
 LazyMemoryCache <Uri, ISendEndpoint> .ICacheExpiration GetEndpointCachePolicy(LazyMemoryCache <Uri, ISendEndpoint> .ICacheExpirationSelector selector)
 {
     return(selector.SlidingWindow(_cacheDurationProvider(selector.Key)));
 }
Example #22
0
 public void Execute()
 {
     var memoryCache     = MemoryCache.Instance;
     var lazyMemoryCache = LazyMemoryCache.GetInstance();
 }
        /// <summary>
        /// Creates and tests lazy memory cache where elements are constructed on demand and evicted based on LRU strategy only if the RAM usage above specified limits.
        /// </summary>
        static void testLazyMemCache()
        {
            //********************* construction *********************************************************
            var memCache = new LazyMemoryCache<int, Bgr<byte>[,]>
              (
                //should we start evicting ?
                isCacheReachedCapacity,
                //get element size
                (img) => (ulong)(img.LongLength * img.ColorInfo().Size),
                //should we collect after each eviction ?
                forceCollectionOnRemoval: false
               );

            const int MAX_KEY = 100;
            //******************* adding elements *********************************************************
            populateCache(memCache, MAX_KEY + 1);

            //******************* accessing elements (run Task Manager to see memory allocation!) **************
            Console.WriteLine("Accessing elements (run Task Manager to see memory allocation!):");
            Random rand = new Random();
            while (!Console.KeyAvailable)
            {
                var key  = rand.Next(0, MAX_KEY + 1);
                ILazy<Bgr<byte>[,]> lazyVal;
                memCache.TryGetValue(key, out lazyVal);

                Console.ForegroundColor = lazyVal.IsValueCreated ? ConsoleColor.Green: ConsoleColor.Red;
                Bgr<byte>[,] val = null;
                var elapsed = Diagnostics.MeasureTime(() =>
                {
                    val = lazyVal.Value;
                });

                Console.Write("\r Accessing {0}. Access time: {1} ms.", key, elapsed);
            }
        }
 public SerilogLogger(Serilog.ILogger baseLogger = null, bool demoteDebug = false)
 {
     _baseLogger  = baseLogger;
     _demoteDebug = demoteDebug;
     _logs        = new LazyMemoryCache <string, ILog>("MassTransit.SerilogIntegration", CreateLog);
 }