예제 #1
0
        /// <summary>
        /// Stores the specified key and value in the cache.
        /// </summary>
        /// <param name="key">The key of the value to store.</param>
        /// <param name="value">The value to store.</param>
        /// <exception cref="ArgumentNullException"><paramref name="key"/> is null.</exception>
        /// <exception cref="InvalidOperationException">The cache hasn't been initialized.</exception>
        /// <remarks>
        /// <para><paramref name="value"/> is stored in the set associated with <paramref name="key"/>.
        /// If <paramref name="key"/> is already stored, it's replaced with <paramref name="value"/>.
        /// Otherwise, if there's an open slot in the set, it's stored in that slot. If it's not already
        /// stored and the set is full, <see cref="ReplacementPolicy"/> is applied and data stored in
        /// an existing slot is evicted and replaced with <paramref name="value"/>.</para>
        /// </remarks>
        public void Store(TKey key, TValue value)
        {
            if (key == null)
            {
                throw new ArgumentNullException(nameof(key));
            }
            if (!_initialized)
            {
                throw new InvalidOperationException("The cache must be initialized before use.");
            }

            CacheItem <TKey, TValue> newCacheItem = new()
            {
                Key              = key,
                Value            = value,
                AddedDate        = DateTime.Now,
                LastAccessedDate = DateTime.Now,
                HitCount         = 0
            };

            // The key's hash code determines in which set in the cache the value gets stored
            int hashCode = key.GetHashCode();
            Dictionary <TKey, CacheItem <TKey, TValue> > setDictionary = GetCacheSet(hashCode);

            if (setDictionary.ContainsKey(key))
            {
                // We've already stored this key, so just update its value
                setDictionary[key] = newCacheItem;
            }
            else if (setDictionary.Count >= SetSize)
            {
                // This set is already full, so apply the replacement policy to determine
                // which key gets evicted.
                // Note that the setDictionary.Count should never exceed SetSize, but this is
                // cheap protection in case a bug is introduced in the future.
                TKey keyToReplace = ReplacementPolicy.GetItemToReplace(setDictionary.Values);
                setDictionary.Remove(keyToReplace);
                setDictionary[key] = newCacheItem;
            }
            else
            {
                // The key isn't already stored, and its set isn't full, so add it
                setDictionary[key] = newCacheItem;
            }
        }
예제 #2
0
 /**
  * Constructor of the generic NWaySetAssociativeCache.
  * Requires a MemoryAccess subclass instance, a ReplacementPolicy subclass instance, the cache size, number of sets, and writing policy.
  * Throws an exception in case of null values being passed to the constructor, or the cache and set parameters being out of bounds.
  * Note: the cache size is treated as a suggestion, and can expand by (b - 1 - ((a-1)%b))
  **/
 /// <include file='documentation.xml' path='docs/members[@name="Cache"]/NWaySetAssociativeCache/Constructor/*'/>
 public NWaySetAssociativeCache(MemoryAccess <TKey, TValue> memoryAccess, ReplacementPolicy <TKey, TValue> replacementPolicy, int cacheSize, int nWay, bool isWriteBack)
 {
     //Non-sensical parameters: throw Exception
     if (memoryAccess == null || replacementPolicy == null || cacheSize < 1 || nWay > cacheSize || nWay < 1)
     {
         throw new ConstructorParameterException();
     }
     this.memoryAccess      = memoryAccess;
     this.replacementPolicy = replacementPolicy;
     //Calculate the number of lines per set. This might slightly increase the cache size (by (b - 1 - ((a-1)%b))).
     lineNumber       = (int)Math.Ceiling(cacheSize / ((double)nWay));
     this.nWay        = nWay;
     this.cacheSize   = lineNumber * nWay;
     this.isWriteBack = isWriteBack;
     isModified       = new bool[this.cacheSize];
     keys             = new TKey[this.cacheSize];
     values           = new TValue[this.cacheSize];
     accessList       = new List <List <int> >();
     //Add a new List of ints for each set in the cache
     for (int i = 0; i < nWay; i++)
     {
         accessList.Add(new List <int>());
     }
 }
 /// <summary>
 /// Indicate the behavior for the existing document.
 /// </summary>
 /// <param name="mode">One of the replacement modes.</param>
 public AddOptions SetReplacementPolicy(ReplacementPolicy mode)
 {
     ReplacePolicy = mode;
     return(this);
 }
예제 #4
0
        /**
         * GetLowestMissN Method
         * Parametrized by the generic TKey,TValue, which are set at runtime.
         * Inputs: cacheSize, test entries (keys,values, and which operations to call on them), and a replacementPolicy.
         * Outputs: int optimalN, which corresponds to the N-way with the lowest miss rate.
         **/
        /// <include file='documentation.xml' path='docs/members[@name="Utility"]/SetOptimizer/GetLowestMissN/*'/>
        public static int GetLowestMissN(int cacheSize, Tuple <TKey, TValue, NWaySetAssociativeCache <TKey, TValue> .CacheOperation>[] testEntries, ReplacementPolicy <TKey, TValue> replacementPolicy)
        {
            //The N-way with the lowest miss rate
            int optimalN = 1;
            //Int keeping track of the minimum amount of misses
            int minCount = -1;

            //Loop, which initializes a cache for each N value from 1 to the cache size.
            for (int i = 1; i <= cacheSize; i++)
            {
                //Initialize the MemoryAccess subclass to keep track of misses.
                MemoryNode <TKey, TValue> memory = new MemoryNode <TKey, TValue>();
                NWaySetAssociativeCache <TKey, TValue> cache;
                cache = new NWaySetAssociativeCache <TKey, TValue>(memory, replacementPolicy, cacheSize, i, true);
                //For each tuple (key,value,operation), call the InvokeReflected method of the cache, which will use the defined operation upon the key-value pair.
                foreach (Tuple <TKey, TValue, NWaySetAssociativeCache <TKey, TValue> .CacheOperation> entry in testEntries)
                {
                    cache.InvokeReflected(entry);
                }
                //If minCount has not been changed, set it to the first miss rate value.
                if (minCount == -1)
                {
                    minCount = memory.getCount; optimalN = i;
                }
                //Else if the current miss rate is lower than the lowest miss rate, set the lowest miss rate to the current one.
                else if (memory.getCount < minCount)
                {
                    minCount = memory.getCount; optimalN = i;
                }
            }
            return(optimalN);
        }