public void DecrementActiveReference(FunctionDataCacheKey cacheKey)
 {
     lock (_lock)
     {
         DecrementActiveReferenceCore(cacheKey);
     }
 }
示例#2
0
        public void TryPutToCache_VerifySuccess(bool expected)
        {
            // Arrange
            FunctionDataCacheKey key   = CreateFunctionDataCacheKey();
            bool isIncrementActiveRefs = true;
            Mock <SharedMemoryMetadata> sharedMemMetaMock = CreateMockSharedMemoryMetadata();
            SharedMemoryMetadata        sharedMemMeta     = sharedMemMetaMock.Object;
            Mock <IFunctionDataCache>   cacheMock         = CreateMockFunctionDataCache();

            cacheMock
            .Setup(c => c.TryPut(key, sharedMemMeta, isIncrementActiveRefs, false))
            .Returns(expected)
            .Verifiable();
            IFunctionDataCache cache             = cacheMock.Object;
            Mock <Stream>      blobStreamMock    = CreateMockBlobStream();
            Stream             blobStream        = blobStreamMock.Object;
            CacheableReadBlob  cacheableReadBlob = CreateProductUnderTest(key, blobStream, cache);

            // Act
            bool result = cacheableReadBlob.TryPutToCache(sharedMemMeta, isIncrementActiveRefs);

            // Assert
            Assert.AreEqual(expected, result);
            cacheMock.Verify();
        }
 public MockCacheAwareReadObject(FunctionDataCacheKey cacheKey, Stream blobStream, IFunctionDataCache functionDataCache)
 {
     _functionDataCache = functionDataCache;
     CacheKey           = cacheKey;
     BlobStream         = blobStream;
     IsCacheHit         = false;
 }
 public MockCacheAwareReadObject(FunctionDataCacheKey cacheKey, SharedMemoryMetadata cacheObject, IFunctionDataCache functionDataCache)
 {
     _functionDataCache = functionDataCache;
     CacheKey           = cacheKey;
     CacheObject        = cacheObject;
     IsCacheHit         = true;
 }
        public bool TryGet(FunctionDataCacheKey cacheKey, bool isIncrementActiveReference, out SharedMemoryMetadata sharedMemoryMeta)
        {
            lock (_lock)
            {
                // Get the value from the local cache
                if (!_localCache.TryGetValue(cacheKey, out sharedMemoryMeta))
                {
                    // Key does not exist in the local cache
                    _logger.LogTrace("Cache miss for object: {ObjectName} and version: {Version}", cacheKey.Id, cacheKey.Version);

                    return(false);
                }

                // Update the LRU list (mark this key as the most recently used)
                AddToEndOfLRU(cacheKey);

                if (isIncrementActiveReference)
                {
                    IncrementActiveReference(cacheKey);
                }

                _logger.LogTrace("Cache hit for object: {ObjectName} and version: {Version} with size: {Size} in shared memory map: {MapName}", cacheKey.Id, cacheKey.Version, sharedMemoryMeta.Count, sharedMemoryMeta.MemoryMapName);

                return(true);
            }
        }
示例#6
0
        /// <summary>
        /// Put the object into the cache by generating a key for the object based on the blob's properties.
        /// </summary>
        /// <param name="properties">Properties of the blob corresponding to the object being written.</param>
        /// <param name="isDeleteOnFailure">If True, in the case where the cache is unable to insert this object, the local resources pointed to by the Stream (which were to be cached) will be deleted.</param>
        /// <returns>True if the object was written to the <see cref="IFunctionDataCache"/>, false otherwise.</returns>
        private bool TryPutToFunctionDataCacheCore(BlobProperties properties, bool isDeleteOnFailure)
        {
            string eTag = properties.ETag.ToString();
            string id   = _blob.BlobClient.Uri.AbsoluteUri;
            FunctionDataCacheKey cacheKey = new FunctionDataCacheKey(id, eTag);

            return(_functionDataCache.TryPut(cacheKey, _cacheObject, isIncrementActiveReference: false, isDeleteOnFailure));
        }
        public async Task PutObject_NoActiveReferences_ForceOneEviction_VerifyCorrectEviction()
        {
            int    contentSize  = 2 * 1024 * 1024; // 2MB
            int    cacheSize    = 6 * 1024 * 1024; // 6MB
            string cacheSizeVal = cacheSize.ToString();

            IEnvironment environment = new TestEnvironment();

            environment.SetEnvironmentVariable(FunctionDataCacheConstants.FunctionDataCacheMaximumSizeBytesSettingName, cacheSizeVal);
            environment.SetEnvironmentVariable(FunctionDataCacheConstants.FunctionDataCacheEnabledSettingName, "1");

            using (ISharedMemoryManager manager = new SharedMemoryManager(_loggerFactory, _mapAccessor))
                using (FunctionDataCache cache = new FunctionDataCache(manager, _loggerFactory, environment))
                {
                    // Prepare content
                    byte[] content = TestUtils.GetRandomBytesInArray(contentSize);

                    // Put into shared memory as three distinct objects
                    SharedMemoryMetadata metadata1 = await manager.PutObjectAsync(content);

                    SharedMemoryMetadata metadata2 = await manager.PutObjectAsync(content);

                    SharedMemoryMetadata metadata3 = await manager.PutObjectAsync(content);

                    // Put the three objects into the cache
                    FunctionDataCacheKey key1 = new FunctionDataCacheKey("foo1", "bar1");
                    FunctionDataCacheKey key2 = new FunctionDataCacheKey("foo2", "bar2");
                    FunctionDataCacheKey key3 = new FunctionDataCacheKey("foo3", "bar3");
                    Assert.True(cache.TryPut(key1, metadata1, isIncrementActiveReference: false, isDeleteOnFailure: false));
                    Assert.True(cache.TryPut(key2, metadata2, isIncrementActiveReference: false, isDeleteOnFailure: false));
                    Assert.True(cache.TryPut(key3, metadata3, isIncrementActiveReference: false, isDeleteOnFailure: false));

                    // Verify that the cache is full
                    Assert.Equal(0, cache.RemainingCapacityBytes);

                    // At this point, the cache is full.
                    // We will create another object and try to insert it.
                    // This should be inserted (as another object will be evicted to make room for this).
                    SharedMemoryMetadata metadata4 = await manager.PutObjectAsync(content);

                    FunctionDataCacheKey key4 = new FunctionDataCacheKey("foo4", "bar4");
                    Assert.True(cache.TryPut(key4, metadata4, isIncrementActiveReference: false, isDeleteOnFailure: false));

                    // The first object should be evicted (least recently used) by now
                    Assert.False(cache.TryGet(key1, isIncrementActiveReference: false, out var _));

                    // Try to open the shared memory map of the first object and ensure it is removed and cannot be opened
                    Assert.False(_mapAccessor.TryOpen(metadata1.MemoryMapName, out var _));

                    // The last three objects (the first two added before eviction and the one resulting in eviction) should be present
                    Assert.True(cache.TryGet(key2, isIncrementActiveReference: false, out var _));
                    Assert.True(cache.TryGet(key3, isIncrementActiveReference: false, out var _));
                    Assert.True(cache.TryGet(key4, isIncrementActiveReference: false, out var _));

                    // Verify that the cache is full
                    Assert.Equal(0, cache.RemainingCapacityBytes);
                }
        }
示例#8
0
 /// <summary>
 /// Used when the object was found in the cache.
 /// </summary>
 /// <param name="cacheKey">Key associated to this object to address it in the <see cref="IFunctionDataCache"/>.</param>
 /// <param name="cacheObject">Describes the shared memory region containing this object.</param>
 /// <param name="functionDataCache">Cache in which to put this object when required.</param>
 public CacheableReadBlob(FunctionDataCacheKey cacheKey, SharedMemoryMetadata cacheObject, IFunctionDataCache functionDataCache)
 {
     IsCacheHit         = true;
     CacheKey           = cacheKey;
     CacheObject        = cacheObject;
     _functionDataCache = functionDataCache;
     _isDisposed        = false;
     _decrementRefCountInCacheOnDispose = true;
 }
示例#9
0
 /// <summary>
 /// Used when the object was not found in the cache and will be retrieved from storage.
 /// </summary>
 /// <param name="cacheKey">Key associated to this object to address it in the <see cref="IFunctionDataCache"/>.</param>
 /// <param name="blobStream">Stream to use for writing this object to storage.</param>
 /// <param name="functionDataCache">Cache in which to put this object when required.</param>
 public CacheableReadBlob(FunctionDataCacheKey cacheKey, Stream blobStream, IFunctionDataCache functionDataCache)
 {
     IsCacheHit         = false;
     CacheKey           = cacheKey;
     BlobStream         = blobStream;
     _functionDataCache = functionDataCache;
     _isDisposed        = false;
     _decrementRefCountInCacheOnDispose = false;
 }
        public bool TryPut(FunctionDataCacheKey cacheKey, SharedMemoryMetadata sharedMemoryMeta, bool isIncrementActiveReference, bool isDeleteOnFailure)
        {
            bool isFailure = true;

            try
            {
                lock (_lock)
                {
                    // Check if the key is already present in the cache
                    if (_localCache.ContainsKey(cacheKey))
                    {
                        // Key already exists in the local cache; do not overwrite and don't delete the existing data.
                        _logger.LogTrace("Cannot insert object into cache, it already exists: {ObjectName} and version: {Version}", cacheKey.Id, cacheKey.Version);

                        isFailure = false;
                        return(false);
                    }

                    long bytesRequired = sharedMemoryMeta.Count;
                    if (!EvictUntilCapacityAvailable(bytesRequired))
                    {
                        _logger.LogTrace("Cannot insert object into cache, not enough space (required: {RequiredBytes} < available: {CapacityBytes})", bytesRequired, RemainingCapacityBytes);

                        return(false);
                    }

                    // Add the mapping into the local cache
                    _localCache.Add(cacheKey, sharedMemoryMeta);

                    // Update the LRU list (mark this key as the most recently used)
                    AddToEndOfLRU(cacheKey);

                    if (isIncrementActiveReference)
                    {
                        IncrementActiveReference(cacheKey);
                    }

                    // Update the cache utilization
                    RemainingCapacityBytes -= sharedMemoryMeta.Count;

                    _logger.LogTrace("Object inserted into cache: {ObjectName} and version: {Version} with size: {Size} in shared memory map: {MapName} with updated capacity: {CapacityBytes} bytes", cacheKey.Id, cacheKey.Version, sharedMemoryMeta.Count, sharedMemoryMeta.MemoryMapName, RemainingCapacityBytes);

                    isFailure = false;
                    return(true);
                }
            }
            finally
            {
                if (isFailure && isDeleteOnFailure)
                {
                    if (!_sharedMemoryManager.TryFreeSharedMemoryMap(sharedMemoryMeta.MemoryMapName))
                    {
                        _logger.LogTrace("Cannot free shared memory map: {MapName} with size: {Size} bytes on failure to insert into the cache", sharedMemoryMeta.MemoryMapName, sharedMemoryMeta.Count);
                    }
                }
            }
        }
        /// <summary>
        /// Adds the given key to the end of the LRU list.
        /// This key is the most recently used key.
        /// Note: Assumes that it is called in a thread-safe manner (i.e. <see cref="_lock"/> is held
        /// by the caller.)
        /// </summary>
        /// <param name="cacheKey">Key to add to the end of the LRU list.</param>
        private void AddToEndOfLRU(FunctionDataCacheKey cacheKey)
        {
            if (LRUList.Contains(cacheKey))
            {
                RemoveFromLRU(cacheKey);
            }

            LRUList.AddLast(cacheKey);
        }
        /// <summary>
        /// Increments the active reference count for the given key.
        /// Note: Assumes that it is called in a thread-safe manner (i.e. <see cref="_lock"/> is held
        /// by the caller.)
        /// </summary>
        /// <param name="cacheKey">Key for which to increment the active reference count.</param>
        private void IncrementActiveReference(FunctionDataCacheKey cacheKey)
        {
            long activeReferences = 0;

            if (ActiveReferences.TryGetValue(cacheKey, out activeReferences))
            {
                ActiveReferences.Remove(cacheKey);
            }
            ActiveReferences.Add(cacheKey, activeReferences + 1);
        }
        /// <summary>
        /// Evicts the least recently used object from the cache such that the object has no active references.
        /// Note: Assumes that it is called in a thread-safe manner (i.e. <see cref="_lock"/> is held
        /// by the caller.)
        /// </summary>
        /// <returns><see cref="true"/> if an object was evicted, <see cref="false"/> otherwise.</returns>
        internal bool EvictOne()
        {
            FunctionDataCacheKey cacheKey = GetFromFrontOfLRU();

            if (cacheKey == null)
            {
                return(false);
            }

            return(TryRemove(cacheKey));
        }
        /// <summary>
        /// Decrements the active reference count for the given key.
        /// Note: Assumes that it is called in a thread-safe manner (i.e. <see cref="_lock"/> is held
        /// by the caller.)
        /// </summary>
        /// <param name="cacheKey">Key for which to decrement the active reference count.</param>
        private void DecrementActiveReferenceCore(FunctionDataCacheKey cacheKey)
        {
            long activeReferences = 0;

            if (ActiveReferences.TryGetValue(cacheKey, out activeReferences))
            {
                ActiveReferences.Remove(cacheKey);
                if (activeReferences > 1)
                {
                    ActiveReferences.Add(cacheKey, activeReferences - 1);
                }
            }
        }
示例#15
0
        public void CreateCacheableReadBlob_IsCacheHit()
        {
            // Arrange
            FunctionDataCacheKey        key               = CreateFunctionDataCacheKey();
            Mock <IFunctionDataCache>   cacheMock         = CreateMockFunctionDataCache();
            IFunctionDataCache          cache             = cacheMock.Object;
            Mock <SharedMemoryMetadata> sharedMemMetaMock = CreateMockSharedMemoryMetadata();
            SharedMemoryMetadata        sharedMemMeta     = sharedMemMetaMock.Object;
            CacheableReadBlob           cacheableReadBlob = CreateProductUnderTest(key, sharedMemMeta, cache);

            // Act
            bool isCacheHit = cacheableReadBlob.IsCacheHit;

            // Assert
            Assert.True(isCacheHit);
        }
示例#16
0
        private static async Task <FunctionDataCacheKey> GetFunctionDataCacheKey(BlobWithContainer <BlobBaseClient> blob, CancellationToken cancellationToken)
        {
            // To be strongly consistent, first check the latest version present in blob storage;
            // query for that particular version in the cache.
            BlobProperties properties = await blob.BlobClient.FetchPropertiesOrNullIfNotExistAsync(cancellationToken).ConfigureAwait(false);

            if (properties == null)
            {
                return(null);
            }
            string eTag = properties.ETag.ToString();
            string id   = blob.BlobClient.Uri.AbsoluteUri;
            FunctionDataCacheKey cacheKey = new FunctionDataCacheKey(id, eTag);

            return(cacheKey);
        }
示例#17
0
        public void CreateCacheableReadBlob_IsCacheMiss()
        {
            // Arrange
            FunctionDataCacheKey      key            = CreateFunctionDataCacheKey();
            Mock <IFunctionDataCache> cacheMock      = CreateMockFunctionDataCache();
            IFunctionDataCache        cache          = cacheMock.Object;
            Mock <Stream>             blobStreamMock = CreateMockBlobStream();
            Stream            blobStream             = blobStreamMock.Object;
            CacheableReadBlob cacheableReadBlob      = CreateProductUnderTest(key, blobStream, cache);

            // Act
            bool isCacheHit = cacheableReadBlob.IsCacheHit;

            // Assert
            Assert.False(isCacheHit);
        }
示例#18
0
            public async Task InitializeAsync(WebJobsTestEnvironment testEnvironment)
            {
                RandomNameResolver nameResolver = new RandomNameResolver();

                CacheMock = CreateMockFunctionDataCache();
                CacheMock
                .Setup(c => c.IsEnabled)
                .Returns(true);
                IFunctionDataCache cache = CacheMock.Object;

                Host = new HostBuilder()
                       .ConfigureDefaultTestHost <CacheableBlobsEndToEndTests>(b =>
                {
                    b.AddAzureStorageBlobs().AddAzureStorageQueues();
                    b.AddAzureStorageCoreServices();
                })
                       .ConfigureServices(services =>
                {
                    services.AddSingleton <INameResolver>(nameResolver)
                    .AddSingleton(cache);
                })
                       .Build();

                JobHost = Host.GetJobHost();

                BlobServiceClient = new BlobServiceClient(testEnvironment.PrimaryStorageAccountConnectionString);

                BlobContainer = BlobServiceClient.GetBlobContainerClient(nameResolver.ResolveInString(ContainerName));
                Assert.False(await BlobContainer.ExistsAsync());
                await BlobContainer.CreateAsync();

                OutputBlobContainer = BlobServiceClient.GetBlobContainerClient(nameResolver.ResolveInString(OutputContainerName));

                await Host.StartAsync();

                // Upload some test blobs
                BlockBlobClient blob = BlobContainer.GetBlockBlobClient(InputBlobName);
                await blob.UploadTextAsync(TestData);

                // Get information about the uploaded blob
                BlobProperties blobProperties = await blob.GetPropertiesAsync();

                string blobId      = blob.Uri.ToString();
                string blobVersion = blobProperties.ETag.ToString();

                _expectedBlobCacheKey = new FunctionDataCacheKey(blobId, blobVersion);
            }
        public async Task PutObject_ActiveReference_VerifyNotEvicted()
        {
            int    contentSize  = 2 * 1024 * 1024; // 2MB
            int    cacheSize    = 3 * 1024 * 1024; // 3MB
            string cacheSizeVal = cacheSize.ToString();

            IEnvironment environment = new TestEnvironment();

            environment.SetEnvironmentVariable(FunctionDataCacheConstants.FunctionDataCacheMaximumSizeBytesSettingName, cacheSizeVal);
            environment.SetEnvironmentVariable(FunctionDataCacheConstants.FunctionDataCacheEnabledSettingName, "1");

            using (ISharedMemoryManager manager = new SharedMemoryManager(_loggerFactory, _mapAccessor))
                using (FunctionDataCache cache = new FunctionDataCache(manager, _loggerFactory, environment))
                {
                    // Prepare content
                    byte[] content = TestUtils.GetRandomBytesInArray(contentSize);

                    // Put into shared memory as two objects
                    SharedMemoryMetadata metadata1 = await manager.PutObjectAsync(content);

                    SharedMemoryMetadata metadata2 = await manager.PutObjectAsync(content);

                    // Put one object into the cache and keep an active reference
                    FunctionDataCacheKey key1 = new FunctionDataCacheKey("foo1", "bar1");
                    Assert.True(cache.TryPut(key1, metadata1, isIncrementActiveReference: true, isDeleteOnFailure: false));

                    // The first object has used up the cache space.
                    // When trying to insert the second object into the cache, it should fail
                    // since the first has an active reference and cannot be evicted.
                    FunctionDataCacheKey key2 = new FunctionDataCacheKey("foo2", "bar2");
                    Assert.False(cache.TryPut(key2, metadata2, isIncrementActiveReference: false, isDeleteOnFailure: false));
                    // Ensure that the first object was not evicted
                    Assert.True(cache.TryGet(key1, isIncrementActiveReference: false, out var _));

                    // Drop the active reference on the first object
                    cache.DecrementActiveReference(key1);

                    // Now, when trying to insert the second object into the cache, it should succeed
                    // since the first object can be evicted (since its active reference was dropped).
                    Assert.True(cache.TryPut(key2, metadata2, isIncrementActiveReference: false, isDeleteOnFailure: false));
                    // Ensure that the first object was evicted
                    Assert.False(cache.TryGet(key1, isIncrementActiveReference: false, out var _));
                }
        }
示例#20
0
        public void TryPutToCacheAlreadyCached_VerifyFailure()
        {
            // Arrange
            FunctionDataCacheKey key            = CreateFunctionDataCacheKey();
            bool isIncrementActiveRefs          = true;
            Mock <IFunctionDataCache> cacheMock = CreateMockFunctionDataCache();

            cacheMock
            .Setup(c => c.TryPut(key, It.IsAny <SharedMemoryMetadata>(), isIncrementActiveRefs, false))
            .Throws(new Exception("This should not be called"));
            IFunctionDataCache          cache             = cacheMock.Object;
            Mock <SharedMemoryMetadata> sharedMemMetaMock = CreateMockSharedMemoryMetadata();
            SharedMemoryMetadata        sharedMemMeta     = sharedMemMetaMock.Object;
            CacheableReadBlob           cacheableReadBlob = CreateProductUnderTest(key, sharedMemMeta, cache);

            // Act
            bool result = cacheableReadBlob.TryPutToCache(sharedMemMeta, isIncrementActiveRefs);

            // Assert
            Assert.IsFalse(result);
        }
示例#21
0
        public void CacheHit_Dispose_VerifyCacheRefCountDecremented()
        {
            // Arrange
            FunctionDataCacheKey        key = CreateFunctionDataCacheKey();
            Mock <SharedMemoryMetadata> sharedMemMetaMock = CreateMockSharedMemoryMetadata();
            SharedMemoryMetadata        sharedMemMeta     = sharedMemMetaMock.Object;
            Mock <IFunctionDataCache>   cacheMock         = CreateMockFunctionDataCache();

            cacheMock
            .Setup(c => c.DecrementActiveReference(key))
            .Verifiable();
            IFunctionDataCache cache             = cacheMock.Object;
            CacheableReadBlob  cacheableReadBlob = CreateProductUnderTest(key, sharedMemMeta, cache);

            // Act
            cacheableReadBlob.Dispose();

            // Assert
            // This will ensure that the appropriate method was called on the cache
            cacheMock.Verify();
        }
        public async Task PutObject_NoEvictions_VerifyGet(int contentSize)
        {
            using (ISharedMemoryManager manager = new SharedMemoryManager(_loggerFactory, _mapAccessor))
                using (FunctionDataCache cache = new FunctionDataCache(manager, _loggerFactory, _testEnvironment))
                {
                    // Prepare content
                    byte[] content = TestUtils.GetRandomBytesInArray(contentSize);

                    // Put into shared memory
                    SharedMemoryMetadata metadata = await manager.PutObjectAsync(content);

                    // Put into cache
                    FunctionDataCacheKey key = new FunctionDataCacheKey("foo", "bar");
                    Assert.True(cache.TryPut(key, metadata, isIncrementActiveReference: false, isDeleteOnFailure: false));

                    // Get from cache
                    Assert.True(cache.TryGet(key, isIncrementActiveReference: false, out SharedMemoryMetadata getMetadata));

                    // Compare if the obtained values are equal
                    Assert.Equal(metadata, getMetadata);
                }
        }
示例#23
0
        public void CacheMiss_Dispose_VerifyBlobStreamDisposed()
        {
            // Arrange
            FunctionDataCacheKey        key               = CreateFunctionDataCacheKey();
            Mock <IFunctionDataCache>   cacheMock         = CreateMockFunctionDataCache();
            IFunctionDataCache          cache             = cacheMock.Object;
            Mock <SharedMemoryMetadata> sharedMemMetaMock = CreateMockSharedMemoryMetadata();
            SharedMemoryMetadata        sharedMemMeta     = sharedMemMetaMock.Object;
            Mock <Stream> blobStreamMock = CreateMockBlobStream();

            blobStreamMock
            .Setup(s => s.Close())     // Close is called internally when Stream is Disposed
            .Verifiable();
            Stream            blobStream        = blobStreamMock.Object;
            CacheableReadBlob cacheableReadBlob = CreateProductUnderTest(key, blobStream, cache);

            // Act
            cacheableReadBlob.Dispose();

            // Assert
            // This will ensure that the appropriate method was called on the stream
            blobStreamMock.Verify();
        }
        /// <summary>
        /// Note: This will remove the entry even if it has active references. It is the responsibility of the caller to
        /// ensure the entry is safe to be removed.
        /// </summary>
        /// <param name="cacheKey">The <see cref="FunctionDataCacheKey"/> corresponding to the entry to be removed.</param>
        /// <returns><see cref="true"/> if the entry was successfully removed, <see cref="false"/> if not.</returns>
        public bool TryRemove(FunctionDataCacheKey cacheKey)
        {
            lock (_lock)
            {
                if (!TryGet(cacheKey, isIncrementActiveReference: false, out SharedMemoryMetadata sharedMemoryMeta))
                {
                    return(false);
                }

                // Remove the key from the local cache
                if (!_localCache.Remove(cacheKey))
                {
                    // Key does not exist in the local cache
                    return(false);
                }

                // Free the shared memory containing data for the given key that is being removed
                if (!_sharedMemoryManager.TryFreeSharedMemoryMap(sharedMemoryMeta.MemoryMapName))
                {
                    // Unable to free the shared memory
                    return(false);
                }

                // Remove from LRU list
                RemoveFromLRU(cacheKey);

                // Remove the key from the list of active references
                ActiveReferences.Remove(cacheKey);

                // Update the cache utilization
                RemainingCapacityBytes += sharedMemoryMeta.Count;

                _logger.LogTrace("Removed cache object: {ObjectName} and version: {Version} with size: {Size} in shared memory map: {MapName} with updated capacity: {CapacityBytes} bytes", cacheKey.Id, cacheKey.Version, sharedMemoryMeta.Count, sharedMemoryMeta.MemoryMapName, RemainingCapacityBytes);

                return(true);
            }
        }
示例#25
0
        public static async Task <ICacheAwareReadObject> TryBindCacheAwareAsync(BlobWithContainer <BlobBaseClient> blob, ValueBindingContext context, IFunctionDataCache functionDataCache)
        {
            try
            {
                // Generate the cache key for this blob
                FunctionDataCacheKey cacheKey = await GetFunctionDataCacheKey(blob, context.CancellationToken).ConfigureAwait(false);

                if (cacheKey == null)
                {
                    return(null);
                }

                // Check if it exists in the cache
                if (functionDataCache.TryGet(cacheKey, isIncrementActiveReference: true, out SharedMemoryMetadata sharedMemoryMeta))
                {
                    // CACHE HIT
                    return(new CacheableReadBlob(cacheKey, sharedMemoryMeta, functionDataCache));
                }

                // CACHE MISS
                // Wrap the blob's stream along with the cache key so it can be inserted in the cache later using the above generated key for this blob
                Stream innerStream = await TryBindStreamAsync(blob.BlobClient, context.CancellationToken, cacheKey.Version).ConfigureAwait(false);

                return(new CacheableReadBlob(cacheKey, innerStream, functionDataCache));
            }
            catch (RequestFailedException exception)
            {
                // Testing generic error case since specific error codes are not available for FetchAttributes
                // (HEAD request), including OpenRead.
                if (!exception.IsNotFound())
                {
                    throw;
                }

                return(null);
            }
        }
        public async Task PutObject_FailToPut_DoNotDeleteOnFailure()
        {
            int    contentSize  = 4 * 1024 * 1024; // 4MB
            int    cacheSize    = 3 * 1024 * 1024; // 3MB
            string cacheSizeVal = cacheSize.ToString();

            IEnvironment environment = new TestEnvironment();

            environment.SetEnvironmentVariable(FunctionDataCacheConstants.FunctionDataCacheMaximumSizeBytesSettingName, cacheSizeVal);
            environment.SetEnvironmentVariable(FunctionDataCacheConstants.FunctionDataCacheEnabledSettingName, "1");

            using (SharedMemoryManager manager = new SharedMemoryManager(_loggerFactory, _mapAccessor))
                using (FunctionDataCache cache = new FunctionDataCache(manager, _loggerFactory, environment))
                {
                    // Prepare content
                    byte[] content = TestUtils.GetRandomBytesInArray(contentSize);

                    // Put into shared memory
                    SharedMemoryMetadata metadata = await manager.PutObjectAsync(content);

                    // Try to put the object into the cache; this will fail because the cache is smaller than the object size.
                    // Since isDeleteOnFailure is false, the object will not be deleted from shared memory.
                    FunctionDataCacheKey key = new FunctionDataCacheKey("foo", "bar");
                    Assert.False(cache.TryPut(key, metadata, isIncrementActiveReference: true, isDeleteOnFailure: false));

                    // Ensure that nothing was cached and no references are held
                    Assert.Empty(cache.LRUList);
                    Assert.Empty(cache.ActiveReferences);
                    Assert.Equal(cacheSize, cache.RemainingCapacityBytes);

                    // Ensure that the SharedMemoryManager has the allocated memory map and it was not deleted
                    Assert.Equal(1, manager.AllocatedSharedMemoryMaps.Count);

                    // Try to open the shared memory map of the first object and ensure it exists and can be opened
                    Assert.True(_mapAccessor.TryOpen(metadata.MemoryMapName, out var _));
                }
        }
示例#27
0
        public void CacheMiss_Dispose_VerifyCacheRefCountNotDecremented()
        {
            // Arrange
            FunctionDataCacheKey      key       = CreateFunctionDataCacheKey();
            Mock <IFunctionDataCache> cacheMock = CreateMockFunctionDataCache();

            cacheMock
            .Setup(c => c.DecrementActiveReference(key))
            .Throws(new Exception("This should not be called"));
            IFunctionDataCache cache          = cacheMock.Object;
            Mock <Stream>      blobStreamMock = CreateMockBlobStream();

            blobStreamMock
            .Setup(s => s.Close())
            .Verifiable();
            Stream            blobStream        = blobStreamMock.Object;
            CacheableReadBlob cacheableReadBlob = CreateProductUnderTest(key, blobStream, cache);

            // Act
            cacheableReadBlob.Dispose();

            // Assert
            // If the wrong method was called, an exception would have been thrown
        }
示例#28
0
 /// <summary>
 /// Create a <see cref="CacheableReadBlob"/> to use for a test.
 /// </summary>
 /// <param name="cacheKey">Key associated to this object to address it in the <see cref="IFunctionDataCache"/>.</param>
 /// <param name="blobStream">Stream to use for writing this object to storage.</param>
 /// <param name="functionDataCache">Cache in which to put this object when required.</param>
 /// <returns>A <see cref="CacheableReadBlob"/> object to use for a test.</returns>
 private static CacheableReadBlob CreateProductUnderTest(FunctionDataCacheKey cacheKey, SharedMemoryMetadata sharedMemMeta, IFunctionDataCache functionDataCache)
 {
     return(new CacheableReadBlob(cacheKey, sharedMemMeta, functionDataCache));
 }
        internal static async Task <RpcSharedMemory> ToRpcSharedMemoryAsync(this object value, DataType dataType, ILogger logger, string invocationId, ISharedMemoryManager sharedMemoryManager)
        {
            if (value == null)
            {
                return(new RpcSharedMemory());
            }

            if (!sharedMemoryManager.IsSupported(value))
            {
                return(null);
            }

            SharedMemoryMetadata sharedMemoryMetadata;
            bool needToFreeAfterInvocation = true;

            // Check if the cache is being used or not.
            // The binding extension will only hand out ICacheAwareReadObject if the FunctionDataCache is available and enabled.
            if (value is ICacheAwareReadObject obj)
            {
                if (obj.IsCacheHit)
                {
                    // Content was already present in shared memory (cache hit)
                    logger.LogTrace("Object already present in shared memory for invocation id: {Id}", invocationId);
                    sharedMemoryMetadata      = obj.CacheObject;
                    needToFreeAfterInvocation = false;
                }
                else
                {
                    // Put the content into shared memory and get the name of the shared memory map written to.
                    // This will make the SharedMemoryManager keep an active reference to the memory map.
                    sharedMemoryMetadata = await sharedMemoryManager.PutObjectAsync(obj.BlobStream);

                    if (sharedMemoryMetadata != null)
                    {
                        FunctionDataCacheKey cacheKey = obj.CacheKey;

                        // Try to add the object into the cache and keep an active ref-count for it so that it does not get
                        // evicted while it is still being used by the invocation.
                        if (obj.TryPutToCache(sharedMemoryMetadata, isIncrementActiveReference: true))
                        {
                            logger.LogTrace("Put object: {CacheKey} in cache with metadata: {SharedMemoryMetadata} for invocation id: {Id}", cacheKey, sharedMemoryMetadata, invocationId);
                            // We don't need to free the object after the invocation; it will be freed as part of the cache's
                            // eviction policy.
                            needToFreeAfterInvocation = false;
                        }
                        else
                        {
                            logger.LogTrace("Cannot put object: {CacheKey} in cache with metadata: {SharedMemoryMetadata} for invocation id: {Id}", cacheKey, sharedMemoryMetadata, invocationId);
                            // Since we could not add this object to the cache (and therefore the cache will not be able to evict
                            // it as part of its eviction policy) we will need to free it after the invocation is done.
                            needToFreeAfterInvocation = true;
                        }
                    }
                }
            }
            else
            {
                // Put the content into shared memory and get the name of the shared memory map written to
                sharedMemoryMetadata = await sharedMemoryManager.PutObjectAsync(value);

                needToFreeAfterInvocation = true;
            }

            // Check if the object was either already in shared memory or written to shared memory
            if (sharedMemoryMetadata == null)
            {
                logger.LogTrace("Cannot write to shared memory for invocation id: {Id}", invocationId);
                return(null);
            }

            RpcDataType?rpcDataType = GetRpcDataType(dataType);

            if (!rpcDataType.HasValue)
            {
                logger.LogTrace("Cannot get shared memory data type for invocation id: {Id}", invocationId);
                return(null);
            }

            // When using the cache, we don't need to free the memory map after using it;
            // it will be freed as per the eviction policy of the cache.
            // However, if either the cache was not enabled or the object could not be added to the cache,
            // we will need to free it after the invocation.
            if (needToFreeAfterInvocation)
            {
                // If written to shared memory successfully, add this shared memory map to the list of maps for this invocation
                // so that once the invocation is over, the memory map's resources can be freed.
                sharedMemoryManager.AddSharedMemoryMapForInvocation(invocationId, sharedMemoryMetadata.MemoryMapName);
            }

            // Generate a response
            RpcSharedMemory sharedMem = new RpcSharedMemory()
            {
                Name   = sharedMemoryMetadata.MemoryMapName,
                Offset = 0,
                Count  = sharedMemoryMetadata.Count,
                Type   = rpcDataType.Value
            };

            logger.LogTrace("Put object in shared memory for invocation id: {Id}", invocationId);
            return(sharedMem);
        }
        public async Task ToRpcInvocationRequest_RpcSharedMemoryDataTransfer_UsingFunctionDataCache_CacheMiss()
        {
            var logger = new TestLogger("test");

            var httpContext = new DefaultHttpContext();

            httpContext.Request.Host   = new HostString("local");
            httpContext.Request.Path   = "/test";
            httpContext.Request.Method = "Post";

            var poco = new TestPoco {
                Id = 1, Name = "Test"
            };

            var bindingData = new Dictionary <string, object>
            {
                { "req", httpContext.Request },
                { "$request", httpContext.Request },
                { "headers", httpContext.Request.Headers.ToDictionary(p => p.Key, p => p.Value) },
                { "query", httpContext.Request.QueryString.ToString() },
                { "sys", new SystemBindingData() }
            };

            const int    inputStringLength  = 2 * 1024 * 1024;
            string       inputString        = TestUtils.GetRandomString(inputStringLength);
            Stream       inputStream1       = new MemoryStream();
            StreamWriter inputStreamWriter1 = new StreamWriter(inputStream1);
            await inputStreamWriter1.WriteAsync(inputString);

            await inputStreamWriter1.FlushAsync();

            inputStream1.Seek(0, SeekOrigin.Begin);

            FunctionDataCacheKey     key1      = new FunctionDataCacheKey("fooStr", "0x1");
            MockCacheAwareReadObject cacheObj1 = new MockCacheAwareReadObject(key1, inputStream1, _functionDataCache);

            const int inputBytesLength = 2 * 1024 * 1024;

            byte[] inputBytes   = TestUtils.GetRandomBytesInArray(inputBytesLength);
            Stream inputStream2 = new MemoryStream(inputBytes);

            inputStream2.Seek(0, SeekOrigin.Begin);

            FunctionDataCacheKey     key2      = new FunctionDataCacheKey("fooBytes", "0x1");
            MockCacheAwareReadObject cacheObj2 = new MockCacheAwareReadObject(key2, inputStream2, _functionDataCache);

            var inputs = new List <(string name, DataType type, object val)>
            {
                ("req", DataType.String, httpContext.Request),
                ("fooStr", DataType.String, cacheObj1),
                ("fooBytes", DataType.Binary, cacheObj2),
            };

            var invocationContext = new ScriptInvocationContext()
            {
                ExecutionContext = new ExecutionContext()
                {
                    InvocationId = Guid.NewGuid(),
                    FunctionName = "Test",
                },
                BindingData           = bindingData,
                Inputs                = inputs,
                ResultSource          = new TaskCompletionSource <ScriptInvocationResult>(),
                Logger                = logger,
                AsyncExecutionContext = System.Threading.ExecutionContext.Capture()
            };

            var functionMetadata = new FunctionMetadata
            {
                Name = "Test"
            };

            var httpTriggerBinding = new BindingMetadata
            {
                Name      = "req",
                Type      = "httpTrigger",
                Direction = BindingDirection.In,
                Raw       = new JObject()
            };

            var fooStrInputBinding = new BindingMetadata
            {
                Name      = "fooStr",
                Type      = "fooStr",
                Direction = BindingDirection.In
            };

            var fooBytesInputBinding = new BindingMetadata
            {
                Name      = "fooBytes",
                Type      = "fooBytes",
                Direction = BindingDirection.In
            };

            var httpOutputBinding = new BindingMetadata
            {
                Name      = "res",
                Type      = "http",
                Direction = BindingDirection.Out,
                Raw       = new JObject(),
                DataType  = DataType.String
            };

            functionMetadata.Bindings.Add(httpTriggerBinding);
            functionMetadata.Bindings.Add(fooStrInputBinding);
            functionMetadata.Bindings.Add(fooBytesInputBinding);
            functionMetadata.Bindings.Add(httpOutputBinding);
            invocationContext.FunctionMetadata = functionMetadata;

            GrpcCapabilities capabilities = new GrpcCapabilities(logger);
            var result = await invocationContext.ToRpcInvocationRequest(logger, capabilities, isSharedMemoryDataTransferEnabled : true, _sharedMemoryManager);

            Assert.Equal(3, result.InputData.Count);

            Assert.Equal("fooStr", result.InputData[1].Name);
            Assert.Equal("fooBytes", result.InputData[2].Name);

            // The input data should be transferred over shared memory
            RpcSharedMemory sharedMem1 = result.InputData[1].RpcSharedMemory;

            // This is what the expected byte[] representation of the string should be
            // We use that to find expected length
            byte[] contentBytes = Encoding.UTF8.GetBytes(inputString);
            Assert.Equal(contentBytes.Length, sharedMem1.Count);

            // Check that the name of the shared memory map is a valid GUID
            Assert.True(Guid.TryParse(sharedMem1.Name, out _));

            // Check the type being sent
            Assert.Equal(sharedMem1.Type, RpcDataType.String);

            // The input data should be transferred over shared memory
            RpcSharedMemory sharedMem2 = result.InputData[2].RpcSharedMemory;

            Assert.Equal(inputBytes.Length, sharedMem2.Count);

            // Check that the name of the shared memory map is a valid GUID
            Assert.True(Guid.TryParse(sharedMem2.Name, out _));

            // Check the type being sent
            Assert.Equal(sharedMem2.Type, RpcDataType.Bytes);

            // Check that the inputs were inserted into shared memory
            object inputStringReadObj = await _sharedMemoryManager.GetObjectAsync(sharedMem1.Name, 0, (int)sharedMem1.Count, typeof(string));

            Assert.NotNull(inputStringReadObj);
            string inputStringRead = inputStringReadObj as string;

            Assert.Equal(inputString, inputStringRead);

            object inputBytesReadObj = await _sharedMemoryManager.GetObjectAsync(sharedMem2.Name, 0, (int)sharedMem2.Count, typeof(byte[]));

            Assert.NotNull(inputBytesReadObj);
            byte[] inputBytesRead = inputBytesReadObj as byte[];
            Assert.Equal(inputBytes, inputBytesRead);

            // Check that the inputs were not marked to be removed after the invocation
            Assert.Empty(_sharedMemoryManager.InvocationSharedMemoryMaps);

            // Check that the inputs were inserted into the cache
            Assert.True(_functionDataCache.TryGet(key1, isIncrementActiveReference: false, out _));
            Assert.True(_functionDataCache.TryGet(key2, isIncrementActiveReference: false, out _));
        }