Ejemplo n.º 1
0
 /// <summary>
 /// Works like SelectMany, but memoizes selector calls. In addition, it
 /// guarantees that no more than 'maxConcurrent' selectors are running
 /// concurrently and queues the rest. This is very important when using
 /// web services to avoid potentially spamming the server with hundreds
 /// of requests.
 ///
 /// This overload is useful when making the same web service call in
 /// several places in the code, to ensure that all of the code paths are
 /// using the same cache.
 /// </summary>
 /// <param name="existingCache">An already-configured ObservableAsyncMRUCache.</param>
 /// <returns>An Observable representing the flattened results of the
 /// cache selector.</returns>
 public static IObservable <TRet> CachedSelectMany <T, TRet>(this IObservable <T> This, ObservableAsyncMRUCache <T, TRet> existingCache)
 {
     return(This.SelectMany(existingCache.AsyncGet));
 }
Ejemplo n.º 2
0
        /// <summary>
        /// Works like SelectMany, but memoizes selector calls. In addition, it
        /// guarantees that no more than 'maxConcurrent' selectors are running
        /// concurrently and queues the rest. This is very important when using
        /// web services to avoid potentially spamming the server with hundreds
        /// of requests.
        /// </summary>
        /// <param name="selector">A selector similar to one you would pass as a
        /// parameter passed to SelectMany. Note that similarly to
        /// ObservableAsyncMRUCache.AsyncGet, a selector must return semantically
        /// identical results given the same key - i.e. it must be a 'function' in
        /// the mathematical sense.</param>
        /// <param name="maxCached">The number of items to cache. When this limit
        /// is reached, not recently used items will be discarded.</param>
        /// <param name="maxConcurrent">The maximum number of concurrent
        /// asynchronous operations regardless of key - this is important for
        /// web-based caches to limit the number of concurrent requests to a
        /// server. The default is 5.</param>
        /// <param name="scheduler"></param>
        /// <returns>An Observable representing the flattened results of the
        /// selector.</returns>
        public static IObservable <TRet> CachedSelectMany <T, TRet>(this IObservable <T> This, Func <T, IObservable <TRet> > selector, int maxCached = 50, int maxConcurrent = 5, IScheduler scheduler = null)
        {
            var cache = new ObservableAsyncMRUCache <T, TRet>(selector, maxCached, maxConcurrent, null, scheduler);

            return(This.SelectMany(cache.AsyncGet));
        }