public AssemblyManifest() { MakeToken = (assemblyFullName) => { AssignedIdentifiers = false; return new Token(assemblyFullName); }; }
public AssemblyManifest() { MakeToken = (assemblyFullName) => { AssignedIdentifiers = false; return(new Token(assemblyFullName)); }; }
public void SingleThreaded_EmptyCacheReturns() { ConcurrentCache cache = new ConcurrentCache(10); var value = cache.Get("1"); Assert.IsNotNull(value); }
public void SingleThreaded_CacheHits() { int cacheSize = 1000; ConcurrentCache cache = new ConcurrentCache(cacheSize); foreach (var i in Enumerable.Range(0, cacheSize / 2)) { cache.Get(i.ToString()); } Assert.AreEqual(0, cache.CacheHit); Assert.AreEqual(cacheSize / 2, cache.TotalRequest); foreach (var i in Enumerable.Range(0, cacheSize / 2)) { cache.Get(i.ToString()); } Assert.AreEqual(cacheSize / 2, cache.CacheHit); Assert.AreEqual(cacheSize, cache.TotalRequest); foreach (var i in Enumerable.Range(cacheSize / 2, cacheSize / 2)) { cache.Get(i.ToString()); } Assert.AreEqual(cacheSize / 2, cache.CacheHit); Assert.AreEqual(cacheSize * 3 / 2, cache.TotalRequest); foreach (var i in Enumerable.Range(0, cacheSize)) { cache.Get(i.ToString()); } Assert.AreEqual(cacheSize * 3 / 2, cache.CacheHit); Assert.AreEqual(cacheSize * 5 / 2, cache.TotalRequest); }
public DefalutEventStorage(IEventStorageRepository eventStoreRepository, IMementoRepository mementoRepository) { this.EventStorageRepository = eventStoreRepository; this.MementoRepository = mementoRepository; _eventsDict = new ConcurrentCache <string, ConcurrentBag <Event> >(); }
private ImmutableArray <Symbol> GetMembersWorker(string name) { var originalMembers = OriginalDefinition.GetMembers(name); if (originalMembers.IsDefaultOrEmpty) { return(originalMembers); } var builder = ArrayBuilder <Symbol> .GetInstance(originalMembers.Length); foreach (var t in originalMembers) { builder.Add(t.SymbolAsMember(this)); } var substitutedMembers = builder.ToImmutableAndFree(); // cache of size 8 seems reasonable here. // considering that substituted methods have about 10 reference fields, // reusing just one may make the cache profitable. var cache = _lazyMembersByNameCache ?? (_lazyMembersByNameCache = new ConcurrentCache <string, ImmutableArray <Symbol> >(8)); cache.TryAdd(name, substitutedMembers); return(substitutedMembers); }
static TypeInfoProvider() { MakeModuleInfo = (key, module) => new ModuleInfo(module); MakeProxiesByName = _MakeProxiesByName; MakeProxiesByFullName = _MakeProxiesByFullName; ShouldAddProxies = _ShouldAddProxies; }
internal BinderFactory( CSharpCompilation compilation, SyntaxTree syntaxTree, bool ignoreAccessibility ) { _compilation = compilation; _syntaxTree = syntaxTree; _ignoreAccessibility = ignoreAccessibility; _binderFactoryVisitorPool = new ObjectPool <BinderFactoryVisitor>( () => new BinderFactoryVisitor(this), 64 ); // 50 is more or less a guess, but it seems to work fine for scenarios that I tried. // we need something big enough to keep binders for most classes and some methods // in a typical syntax tree. // On the other side, note that the whole factory is weakly referenced and therefore short lived, // making this cache big is not very useful. // I noticed that while compiling Roslyn C# compiler most caches never see // more than 50 items added before getting collected. _binderCache = new ConcurrentCache <BinderCacheKey, Binder>(50); _buckStopsHereBinder = new BuckStopsHereBinder(compilation); }
public void SingleThreaded_AutoEvictKeys() { int cacheSize = 5; ConcurrentCache cache = new ConcurrentCache(cacheSize); for (int i = 0; i < 2 * cacheSize; i++) { cache.Get(i.ToString()); } // [5, 6, 7, 8, 9] for (int i = 0; i < cacheSize; i++) { cache.Get(i.ToString()); } // [1, 2, 3, 4, 5] Assert.AreEqual(0, cache.CacheHit); Assert.AreEqual(3 * cacheSize, cache.TotalRequest); for (int i = 0; i < cacheSize; i++) { cache.Get(i.ToString()); } // [1, 2, 3, 4, 5] Assert.AreEqual(cacheSize, cache.CacheHit); }
public FlowFieldAlgorithm(int cacheSize) { if (cacheSize > 0) { _flowFieldCache = new ConcurrentCache <IPathRequest, FlowField>(cacheSize, new SingleSourcePathRequestComparer()); } }
public async Task CacheShouldNotAddWithEmptyOrNullArguments(string mcc, string mnc) { var cache = new ConcurrentCache(); await cache.Add(mcc, mnc, new DiscoveryResponse(_responses[0])); Assert.IsTrue(cache.IsEmpty); }
public async Task CacheShouldReturnDefaultValueIfKeyNull() { var cache = new ConcurrentCache(); var cached = await cache.Get <ProviderMetadata>(null, true); Assert.IsNull(cached); }
public FlowFieldAlgorithm(int cacheSize, int amountOfNodes) { _potentialFieldAlgorithm = new PotentialFieldAlgorithm(0, amountOfNodes); if (cacheSize > 0) { _flowFieldCache = new ConcurrentCache <IPathRequest, FlowField>(cacheSize, new SingleSourcePathRequestComparer()); } }
public PotentialFieldAlgorithm(int cacheSize, int amountOfNodes) { _dijkstraAlgorithm = new DijkstraAlgorithm(amountOfNodes); if (cacheSize > 0) { _potentialFieldCache = new ConcurrentCache <IPathRequest, PotentialField>(cacheSize, new SingleSourcePathRequestComparer()); } }
public MobileConnectController() { var cache = new ConcurrentCache(); if (_mobileConnect == null) { _mobileConnect = new MobileConnectWebInterface(DemoConfiguration.Config, cache); } }
public void SingleThreaded_RepeatedCallReturnsCachedValue() { ConcurrentCache cache = new ConcurrentCache(10); var val = cache.Get("1"); var val2 = cache.Get("1"); Assert.AreEqual(val, val2); Assert.AreEqual(1, cache.CacheHit); }
public FunctionCache(ITypeInfoSource typeInfo) { TypeInfo = typeInfo; Comparer = new QualifiedMemberIdentifier.Comparer(typeInfo); Cache = new ConcurrentCache <QualifiedMemberIdentifier, Entry>( Environment.ProcessorCount, 4096, Comparer ); PendingTransformsQueue = new ConcurrentHashQueue <QualifiedMemberIdentifier>( Math.Max(1, Environment.ProcessorCount / 4), 4096, Comparer ); ActiveTransformPipelines = new ConcurrentDictionary <QualifiedMemberIdentifier, FunctionTransformPipeline>( Math.Max(1, Environment.ProcessorCount / 4), 128, Comparer ); MethodTypes = new MethodTypeFactory(); MakeCacheEntry = (id, method) => { PendingTransformsQueue.TryEnqueue(id); return(new Entry(id, Locks) { Info = method.Method, Reference = method.Reference, SecondPass = new FunctionAnalysis2ndPass(this, method.Method) }); }; MakePopulatedCacheEntry = (id, args) => { var result = new JSFunctionExpression( new JSMethod(args.Method, args.Info, MethodTypes), args.Translator.Variables, args.Parameters, args.Body, MethodTypes ); PendingTransformsQueue.TryEnqueue(id); return(new Entry(id, Locks) { Info = args.Info, Reference = args.Method, Expression = result, SpecialIdentifiers = args.Translator.SpecialIdentifiers }); }; MakeNullCacheEntry = (id, args) => { return(new Entry(id, Locks) { Info = args.Info, Reference = args.Method, Expression = null }); }; }
/// <summary> /// Constructs a new instance of the <see cref="Container"/> class. /// </summary> /// <param name="targets">Optional. The target container whose registrations will be used for dependency lookup /// when <see cref="Resolve(ResolveContext)"/> (and other operations) is called. If not provided, a new /// <see cref="TargetContainer"/> instance is constructed. This will ultimately be available to inherited types, /// after construction, through the <see cref="Targets"/> property.</param> protected Container(IRootTargetContainer targets = null) { _scope = new NonTrackingContainerScope(this); Targets = targets ?? new TargetContainer(); #if !ENABLE_IL_EMIT _cache = new ConcurrentCache(GetWorker); #else _dynCache = DynamicCache.CreateCache(this); #endif }
public FunctionCache(ITypeInfoSource typeInfo) { var comparer = new QualifiedMemberIdentifier.Comparer(typeInfo); Cache = new ConcurrentCache <QualifiedMemberIdentifier, Entry>( Environment.ProcessorCount, 4096, comparer ); OptimizationQueue = new ConcurrentHashQueue <QualifiedMemberIdentifier>( Math.Max(1, Environment.ProcessorCount / 4), 4096, comparer ); MethodTypes = new MethodTypeFactory(); MakeCacheEntry = (id, method) => { OptimizationQueue.TryEnqueue(id); return(new Entry { Info = method.Method, Reference = method.Reference, Identifier = id, ParameterNames = new HashSet <string>(from p in method.Method.Parameters select p.Name), SecondPass = new FunctionAnalysis2ndPass(this, method.Method) }); }; MakePopulatedCacheEntry = (id, args) => { var result = new JSFunctionExpression( new JSMethod(args.Method, args.Info, MethodTypes), args.Translator.Variables, args.Parameters, args.Body, MethodTypes ); OptimizationQueue.TryEnqueue(id); return(new Entry { Identifier = id, Info = args.Info, Reference = args.Method, Expression = result, Variables = args.Translator.Variables, ParameterNames = args.Translator.ParameterNames, SpecialIdentifiers = args.Translator.SpecialIdentifiers }); }; MakeNullCacheEntry = (id, args) => { return(new Entry { Identifier = id, Info = args.Info, Reference = args.Method, Expression = null }); }; }
public FunctionCache(ITypeInfoSource typeInfo) { var comparer = new QualifiedMemberIdentifier.Comparer(typeInfo); Cache = new ConcurrentCache<QualifiedMemberIdentifier, Entry>( Environment.ProcessorCount, 4096, comparer ); OptimizationQueue = new ConcurrentHashQueue<QualifiedMemberIdentifier>( Math.Max(1, Environment.ProcessorCount / 4), 4096, comparer ); MethodTypes = new MethodTypeFactory(); }
public FunctionCache(ITypeInfoSource typeInfo) { var comparer = new QualifiedMemberIdentifier.Comparer(typeInfo); Cache = new ConcurrentCache<QualifiedMemberIdentifier, Entry>( Environment.ProcessorCount, 4096, comparer ); OptimizationQueue = new ConcurrentHashQueue<QualifiedMemberIdentifier>( Math.Max(1, Environment.ProcessorCount / 4), 4096, comparer ); MethodTypes = new MethodTypeFactory(); MakeCacheEntry = (id, method) => { OptimizationQueue.TryEnqueue(id); return new Entry { Info = method.Method, Reference = method.Reference, Identifier = id, ParameterNames = new HashSet<string>(from p in method.Method.Parameters select p.Name), SecondPass = new FunctionAnalysis2ndPass(this, method.Method) }; }; MakePopulatedCacheEntry = (id, args) => { var result = new JSFunctionExpression( new JSMethod(args.Method, args.Info, MethodTypes), args.Translator.Variables, args.Parameters, args.Body, MethodTypes ); OptimizationQueue.TryEnqueue(id); return new Entry { Identifier = id, Info = args.Info, Reference = args.Method, Expression = result, Variables = args.Translator.Variables, ParameterNames = args.Translator.ParameterNames, SpecialIdentifiers = args.Translator.SpecialIdentifiers }; }; MakeNullCacheEntry = (id, args) => { return new Entry { Identifier = id, Info = args.Info, Reference = args.Method, Expression = null }; }; }
public AssemblyManifest() { MakeToken = (assemblyFullName) => { lock (_syncRoot) { AssignedIdentifiers = false; } return(new Token(assemblyFullName)); }; }
public AssemblyManifest () { MakeToken = (assemblyFullName) => { lock (_syncRoot) { AssignedIdentifiers = false; } return new Token(assemblyFullName); }; }
public FunctionCache(ITypeInfoSource typeInfo) { TypeInfo = typeInfo; Comparer = new QualifiedMemberIdentifier.Comparer(typeInfo); Cache = new ConcurrentCache<QualifiedMemberIdentifier, Entry>( Environment.ProcessorCount, 4096, Comparer ); PendingTransformsQueue = new ConcurrentHashQueue<QualifiedMemberIdentifier>( Math.Max(1, Environment.ProcessorCount / 4), 4096, Comparer ); ActiveTransformPipelines = new ConcurrentDictionary<QualifiedMemberIdentifier, FunctionTransformPipeline>( Math.Max(1, Environment.ProcessorCount / 4), 128, Comparer ); MethodTypes = new MethodTypeFactory(); MakeCacheEntry = (id, method) => { PendingTransformsQueue.TryEnqueue(id); return new Entry(id, Locks) { Info = method.Method, Reference = method.Reference, SecondPass = new FunctionAnalysis2ndPass(this, method.Method) }; }; MakePopulatedCacheEntry = (id, args) => { var result = new JSFunctionExpression( new JSMethod(args.Method, args.Info, MethodTypes), args.Translator.Variables, args.Parameters, args.Body, MethodTypes ); PendingTransformsQueue.TryEnqueue(id); return new Entry(id, Locks) { Info = args.Info, Reference = args.Method, Expression = result, Variables = args.Translator.Variables, SpecialIdentifiers = args.Translator.SpecialIdentifiers }; }; MakeNullCacheEntry = (id, args) => { return new Entry(id, Locks) { Info = args.Info, Reference = args.Method, Expression = null }; }; }
public DebugDocumentsBuilder(SourceReferenceResolver resolverOpt, bool isDocumentNameCaseSensitive) { _resolverOpt = resolverOpt; _debugDocuments = new ConcurrentDictionary<string, Cci.DebugSourceDocument>( isDocumentNameCaseSensitive ? StringComparer.Ordinal : StringComparer.OrdinalIgnoreCase); _normalizedPathsCache = new ConcurrentCache<ValueTuple<string, string>, string>(16); }
public DebugDocumentsBuilder(SourceReferenceResolver resolverOpt, bool isDocumentNameCaseSensitive) { _resolverOpt = resolverOpt; _debugDocuments = new ConcurrentDictionary <string, Cci.DebugSourceDocument>( isDocumentNameCaseSensitive ? StringComparer.Ordinal : StringComparer.OrdinalIgnoreCase); _normalizedPathsCache = new ConcurrentCache <ValueTuple <string, string>, string>(16); _embeddedDocuments = ImmutableArray <Cci.DebugSourceDocument> .Empty; }
internal CachingBinderFactory(LanguageCompilation compilation, SyntaxTree syntaxTree) : base(compilation, syntaxTree) { // 50 is more or less a guess, but it seems to work fine for scenarios that I tried. // we need something big enough to keep binders for most classes and some methods // in a typical syntax tree. // On the other side, note that the whole factory is weakly referenced and therefore short lived, // making this cache big is not very useful. // I noticed that while compiling Roslyn C# compiler most caches never see // more than 50 items added before getting collected. _binderCache = new ConcurrentCache <BinderCacheKey, Binder>(50); }
public async Task ClearShouldClearStore() { var cache = new ConcurrentCache(); await cache.Add("001", "01", new DiscoveryResponse(_responses[0])); await cache.Add("002", "02", new DiscoveryResponse(_responses[1])); await cache.Clear(); Assert.IsTrue(cache.IsEmpty); }
public FunctionCache(ITypeInfoSource typeInfo) { var comparer = new QualifiedMemberIdentifier.Comparer(typeInfo); Cache = new ConcurrentCache <QualifiedMemberIdentifier, Entry>( Environment.ProcessorCount, 4096, comparer ); OptimizationQueue = new ConcurrentHashQueue <QualifiedMemberIdentifier>( Math.Max(1, Environment.ProcessorCount / 4), 4096, comparer ); MethodTypes = new MethodTypeFactory(); }
static DataRowFactory() { typeCount = 0; assemblyBuilder = Thread.GetDomain().DefineDynamicAssembly( new AssemblyName("EffortDataRowTypeLib"), AssemblyBuilderAccess.Run); moduleBuilder = assemblyBuilder.DefineDynamicModule("EffortDataRowTypeLib"); moduleBuilderLock = new object(); typeCache = new ConcurrentCache <TypeCacheEntryKey, Type>(); }
public async Task RemoveShouldRemoveStoredResponse() { var cache = new ConcurrentCache(); var mcc = "001"; var mnc = "01"; await cache.Add(mcc, mnc, new DiscoveryResponse(_responses[0])); await cache.Remove(mcc, mnc); var actual = await cache.Get(mcc, mnc); Assert.IsNull(actual); }
public TypeInfoProvider() { var levelOfParallelism = Math.Max(1, Environment.ProcessorCount / 2); Assemblies = new HashSet <AssemblyDefinition>(); ProxyAssemblyNames = new HashSet <string>(); TypeProxies = new Dictionary <TypeIdentifier, ProxyInfo>(); DirectProxiesByTypeName = new Dictionary <string, HashSet <ProxyInfo> >(); ProxiesByName = new ConcurrentCache <string, string[]>(levelOfParallelism, 256); TypeAssignabilityCache = new ConcurrentCache <Tuple <string, string>, bool>(levelOfParallelism, 4096); TypeInformation = new ConcurrentCache <TypeIdentifier, TypeInfo>(levelOfParallelism, 4096); ModuleInformation = new ConcurrentCache <string, ModuleInfo>(levelOfParallelism, 256); MakeTypeInfo = _MakeTypeInfo; }
public async Task CacheShouldGetResponseWhenMultipleStored() { var cache = new ConcurrentCache(); var expected = new DiscoveryResponse(_responses[1]); var mcc = "001"; var mnc = "01"; await cache.Add(mcc, mnc, expected); await cache.Add("002", "02", new DiscoveryResponse(_responses[0])); var actual = await cache.Get(mcc, mnc); Assert.IsNotNull(actual); Assert.IsTrue(actual.Cached); Assert.IsNotNull(actual.ResponseData.response.apis); }
public void GetOrAddGet() { var key = "WebApiClient"; var cache = new ConcurrentCache <string, int>(); Parallel.For(0, 1000, (i) => { var value = cache.GetOrAdd(key, k => { Interlocked.Increment(ref this.count); return(1); }); Assert.True(value == 1); Assert.True(count == 1); }); }
protected TypeInfoProvider(TypeInfoProvider cloneSource) { Assemblies = new HashSet <AssemblyDefinition>(cloneSource.Assemblies); ProxyAssemblyNames = new HashSet <string>(cloneSource.ProxyAssemblyNames); TypeProxies = new Dictionary <TypeIdentifier, ProxyInfo>(cloneSource.TypeProxies, TypeIdentifier.Comparer); DirectProxiesByTypeName = new Dictionary <string, HashSet <ProxyInfo> >(); foreach (var kvp in cloneSource.DirectProxiesByTypeName) { DirectProxiesByTypeName.Add(kvp.Key, new HashSet <ProxyInfo>(kvp.Value)); } ProxiesByName = cloneSource.ProxiesByName.Clone(); TypeAssignabilityCache = cloneSource.TypeAssignabilityCache.Clone(); TypeInformation = cloneSource.TypeInformation.Clone(); ModuleInformation = cloneSource.ModuleInformation.Clone(); MakeTypeInfo = _MakeTypeInfo; }
public async Task AddShouldStoreDiscoveryResponse() { var cache = new ConcurrentCache(); var response = new DiscoveryResponse(_responses[0]); var mcc = "001"; var mnc = "01"; string json = Newtonsoft.Json.JsonConvert.SerializeObject(response, new Newtonsoft.Json.JsonSerializerSettings { NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore }); await cache.Add(mcc, mnc, response); var actual = await cache.Get(mcc, mnc); Assert.IsFalse(cache.IsEmpty); Assert.IsNotNull(actual); Assert.IsTrue(actual.Cached); Assert.AreEqual(response.ResponseData.response, actual.ResponseData.response); }
/// <summary> /// Initializes static members of the <see cref="DbSchemaStore" /> class. /// </summary> static DbSchemaStore() { store = new ConcurrentCache<DbSchemaKey, DbSchema>(); }
static TrackedLockCollection () { MakeWaitList = (lck) => new OrderedDictionary<Wait, bool>(); }
/// <summary> /// Initializes static members of the <see cref="DbContainerStore" /> class. /// </summary> static DbContainerStore() { store = new ConcurrentCache<string, DbContainer>(); }
public FunctionCache() { Cache = new ConcurrentCache<QualifiedMemberIdentifier, Entry>(Environment.ProcessorCount, 4096); OptimizationQueue = new ConcurrentHashQueue<QualifiedMemberIdentifier>(Environment.ProcessorCount, 4096); }
/// <summary> /// Initializes static members of the the /// <see cref="CachingTableDataLoaderStore" /> class. /// </summary> static CachingTableDataLoaderStore() { store = new ConcurrentCache< CachingTableDataLoaderKey, CachingTableDataLoader>(); }
private static void AddToCache(object instance, object param, MethodInfo methodInfo) { ConcurrentCache<Type, MethodInfo> instanceCache; if (methodCache.TryGetValue(instance.GetType(), out instanceCache)) { instanceCache.TryAdd(param.GetType(), methodInfo); } else { instanceCache = new ConcurrentCache<Type, MethodInfo>(); instanceCache.TryAdd(param.GetType(), methodInfo); methodCache.TryAdd(instance.GetType(), instanceCache); } }
/// <summary> /// Initializes static members of the <see cref="ObjectContextTypeStore" /> class. /// </summary> static ObjectContextTypeStore() { store = new ConcurrentCache<ObjectContextTypeKey, Type>(); }
/// <summary> /// Initializes static members of the the /// <see cref="DataLoaderConfigurationLatchStore" /> class. /// </summary> static DataLoaderConfigurationLatchStore() { store = new ConcurrentCache< DataLoaderConfigurationKey, DataLoaderConfigurationLatch>(); }
public void SetUp() { cache = new ConcurrentCache<string, int>(); }
private ImmutableArray<Symbol> GetMembersWorker(string name) { var originalMembers = _originalDefinition.GetMembers(name); if (originalMembers.IsDefaultOrEmpty) { return originalMembers; } var builder = ArrayBuilder<Symbol>.GetInstance(originalMembers.Length); foreach (var t in originalMembers) { builder.Add(t.SymbolAsMember(this)); } var substitutedMembers = builder.ToImmutableAndFree(); // cache of size 8 seems reasonable here. // considering that substituted methods have about 10 reference fields, // reusing just one may make the cache profitable. var cache = _lazyMembersByNameCache ?? (_lazyMembersByNameCache = new ConcurrentCache<string, ImmutableArray<Symbol>>(8)); cache.TryAdd(name, substitutedMembers); return substitutedMembers; }
/// <summary> /// Initializes static members the <see cref="MetadataWorkspaceStore" /> class. /// </summary> static MetadataWorkspaceStore() { store = new ConcurrentCache<string, MetadataWorkspace>(); }