internal HashedCacheKey Hash(CacheKey key) { using (var h = SHA256.Create()) { var a = h.ComputeHash(key.Key1); var b = h.ComputeHash(key.Key2).Take(16).ToArray(); var c = h.ComputeHash(key.Key3).Take(16).ToArray(); var masterHashData = new List <byte>(64); masterHashData.AddRange(a); masterHashData.AddRange(b); masterHashData.AddRange(c); var masterHash = h.ComputeHash(masterHashData.ToArray()); var readId = BitConverter.ToUInt32(masterHash, 0); var shardSeed = BitConverter.ToUInt32(a, 8); uint shard = (uint)(shardSeed % _shardCount); var blobKey = new StringBuilder(75); blobKey.Append("blobs/"); blobKey.AppendFormat("{0:x2}", masterHash[16]); blobKey.Append('/'); blobKey.AppendFormat("{0:x2}", masterHash[17]); blobKey.Append('/'); foreach (byte item in masterHash) { blobKey.AppendFormat("{0:x2}", item); } var result = new HashedCacheKey { Key1Hash = a, Key2Hash = b, Key3Hash = c, ReadId = readId, Shard = shard, BlobKey = blobKey.ToString() }; return(result); } }
internal void EnqueuePutBytes(HashedCacheKey hash, byte[] data, uint cost) { // TODO: have a temporary in-memory cache while the files are being written so that repeated requests for the same file aren't a miss putByteTasks.Enqueue(Task.Run(async() => { try { await evicter.EvictSpaceFor(data.Length, shutdownTokenSource.Token); await evicter.WriteLogEventually(hash, data, cost); await evicter.RecordBytesUsed(data.Length); // Don't cancel while writing bytes await store.WriteBytes(shardId, hash.BlobKey, data, CancellationToken.None); } catch (Exception e) { if (e is OperationCanceledException) { throw; } else { exceptionLog.Enqueue(e); } } }, shutdownTokenSource.Token)); /// Try to cleanup tasks when possible. A long running task may block cleanup of others if (putByteTasks.TryPeek(out Task t)) { if (t.IsCompleted || t.IsFaulted || t.IsCanceled) { if (putByteTasks.TryDequeue(out Task task)) { if (!(task.IsCompleted || task.IsFaulted || task.IsCanceled)) { putByteTasks.Enqueue(task); } } } } }