static List <CacheResult> simulateSetAssociativeCaches(int[] addresses, int maxSize, int maxRows, int maxBlockSize) { List <CacheResult> bestCacheResults = new List <CacheResult>(); Dictionary <int, bool> cacheHitsMissRecord = new Dictionary <int, bool>(addresses.Length); for (int numberOfRows = 1; numberOfRows <= maxRows; numberOfRows *= 2) { for (int blockSize = 4; blockSize <= maxBlockSize; blockSize *= 2) { SetAssociativeSetCacheSimulator aCacheSimulator; try { aCacheSimulator = new SetAssociativeSetCacheSimulator(numberOfRows: numberOfRows, bytesPerBlock: blockSize, maxCacheBitSize: maxSize); } catch { //Catch Exceptions If The Block Size Is To Big For The Given Row And Max Bit Size //Breaks Because If The A Block Size Is Too Big The Then The Next Block Sizes Are Too //Big For The Given Row And Max Bit Size break; } CacheResult newCacheResult = simulateCacheUses(addresses, aCacheSimulator); // Uncomment To Log All Set Associative Simulations In The Console //Console.WriteLine(newCacheResult.ToCsvString(true)); updateBestCacheResults(bestCacheResults, newCacheResult); } } return(bestCacheResults); }
static List <CacheResult> simulateDirectMappedCaches(int[] addresses, int maxSize, int maxBlockSize) { List <CacheResult> bestCacheResults = new List <CacheResult>(); Dictionary <int, bool> cacheHitsMissRecord = new Dictionary <int, bool>(addresses.Length); for (int blockSize = 4; blockSize <= maxBlockSize; blockSize *= 2) { DirectMappedCacheSimulator aCacheSimulator; try { aCacheSimulator = new DirectMappedCacheSimulator(bytesPerBlock: blockSize, maxCacheBitSize: maxSize); } catch { //Catch Exceptions If The Block Size Is To Big For The Given Max Bit Size Breaks //Because If The A Block Size Is Too Big Then The Next Block Sizes Are Too Big //For The Given Max Bit Size break; } CacheResult newCacheResult = simulateCacheUses(addresses, aCacheSimulator); // Uncomment To Get Log Direct Cache Simulations In The Console //Console.WriteLine(newCacheResult.ToCsvString(true)); updateBestCacheResults(bestCacheResults, newCacheResult); } return(bestCacheResults); }
protected override List <BAccount> CreateRecordsResult(CacheResult cacheRes, int first, int count) { List <BAccount> result = new List <BAccount>(); BAccountCacheResult cache = cacheRes as BAccountCacheResult; if (cache == null || cache.Results == null) { return(result); } if (first < 0 || first >= cache.Results.Count) { return(result); } if (count == -1) { count = cache.Results.Count; } this.totalCount = cache.Results.Count; for (int i = first; i < first + count && i < cache.Results.Count; i++) { BAccount ba = SWSelect <BAccount, Where <BAccount.bAccountID, Equal <Required <BAccount.bAccountID> > > > .SelectWindowed(this.graph, 0, 1, cache.Results[i]); if (ba != null) { result.Add(ba); } } return(result); }
/// <summary> /// enumerate through the current range of keys until the callback verifies a key or there are no more keys /// if the callback fails then either the key used to encrypt the CRC is not inside the range or the data was corrupted in transit /// </summary> /// <param name="callback">the verification callback</param> /// <returns>whether the key offset was found or not, as reported by the callback testing each</returns> public bool Search(Func <Tuple <int, uint>, bool> callback) { if (isReleased) { return(false); } Tuple <int, int> range = SearchRange; for (int i = range.Item1; i < range.Item2 + 1; i++) { CacheResult result = this[i]; if (result.Invalid) { continue; } if (callback(result.Key)) { RemoveVerifiedKey(i); offset = i; RemoveDisabledKeys(); AdjustEffortLevel(true); return(true); } } AdjustEffortLevel(false); return(false); }
/// <summary> /// enumerate through the current range of keys until the callback verifies a key or there are no more keys /// if the callback fails then either the key used to encrypt the CRC is not inside the range or the data was corrupted in transit /// </summary> /// <param name="callback">the verification callback</param> /// <param name="rangeAdvance">whether or not to set the current center-of-range offset to the verified offset and remove keys that are surpassed by the new range, /// Do NOT range advance for { [this]->reordering->processing } stage because it could allow walking the current range out of sync with the ordered packet stream. /// If the range is advanced beyond the (shared between client and server) stream position the session is destroyed.</param> /// <returns>whether the key offset was found or not, as reported by the callback testing each</returns> public bool Search(Func <Tuple <int, uint>, bool> callback, bool rangeAdvance) { Tuple <int, int> range = SearchRange; for (int i = range.Item1; i < range.Item2 + 1; i++) { CacheResult result = this[i]; if (result.Invalid) { continue; } if (callback(result.Key)) { RemoveVerifiedKey(i); if (rangeAdvance) { RangeAdvance(i); } AdjustEffortLevel(true); return(true); } } AdjustEffortLevel(false); return(false); }
public void Failure_Integer_ShouldReturnSuccessFalseWithContentAsDefaultInt() { var result = CacheResult <int> .Failure(); result.Success.ShouldBeFalse(); result.Content.ShouldBe(default(int)); }
static CacheResultTOperationTReturnTest() { ConstructorArgumentValidationTestScenarios.RemoveAllScenarios(); ConstructorArgumentValidationTestScenarios .AddScenario( () => new ConstructorArgumentValidationTestScenario <CacheResult <Version, Version> > { Name = "constructor should throw ArgumentNullException when parameter 'operation' is null scenario", ConstructionFunc = () => { var referenceObject = A.Dummy <CacheResult <Version, Version> >(); var result = new CacheResult <Version, Version>( null, referenceObject.CachedObject, referenceObject.FreshnessInUtc); return(result); }, ExpectedExceptionType = typeof(ArgumentNullException), ExpectedExceptionMessageContains = new[] { "operation", }, }); }
/// <summary> /// Recursively populates DataTable object with info about every directory and file so those info can be cached in db. /// </summary> public CacheResult PrepareFileDetailsForCache(DirectoryInfo rootDirectory, DataTable data) { if (rootDirectory == null) { return(new CacheResult()); } CacheResult result = new CacheResult(); try { var directories = rootDirectory.EnumerateDirectories(); var files = rootDirectory.EnumerateFiles(); foreach (var childDirectory in directories) { var tmpRes = PrepareFileDetailsForCache(childDirectory, data); result.Size += tmpRes.Size; result.NumberOfFiles += tmpRes.NumberOfFiles; } foreach (var childFile in files) { result.Size += childFile.Length; result.NumberOfFiles++; var fileRow = data.NewRow(); fileRow["Name"] = childFile.FullName.Replace('\\', '/'); fileRow["Extension"] = childFile.Extension.Split('.')[1]; fileRow["Size"] = childFile.Length; fileRow["NumberOfFiles"] = 0; fileRow["CreationTime"] = childFile.CreationTime; fileRow["LastAccessTime"] = childFile.LastAccessTime; fileRow["LastModificationTime"] = childFile.LastWriteTime; data.Rows.Add(fileRow); } } catch (Exception) { return(new CacheResult()); } var directoryRow = data.NewRow(); directoryRow["Name"] = rootDirectory.FullName.Replace('\\', '/'); directoryRow["Extension"] = "folder"; directoryRow["Size"] = result.Size; directoryRow["NumberOfFiles"] = result.NumberOfFiles; directoryRow["CreationTime"] = rootDirectory.CreationTime; directoryRow["LastAccessTime"] = rootDirectory.LastAccessTime; directoryRow["LastModificationTime"] = rootDirectory.LastWriteTime; data.Rows.Add(directoryRow); return(result); }
public void Free() { _editDistance?.Dispose(); _source = null; _editDistance = null; _lastAreSimilarResult = default(CacheResult); s_pool.Push(this); }
public async Task SetAsync_ <T>(string key, T data, TimeSpan expire) { key.Should().NotBeNullOrEmpty(); var res = new CacheResult <object>(data); var json = res.ToJson(); await this._db.StringSetAsync(key, (string)json, expire); }
public void CacheResultMetadata() { IMutablePropertyContainer metadata = new MutablePropertyContainer().WithValue(CacheResult.DataSource, "source"); CacheResult <int> cacheResult = new CacheResult <int>(new CacheSectionDescriptor <int>("int"), "key", 5, metadata: metadata, hitMiss: CacheHitMiss.Hit, error: null, isCached: true); cacheResult.GetDataSource().Should().Be("source"); cacheResult.Metadata.Should().BeSameAs(metadata); }
public void TestAccess() { CacheResult r = cache.GetCachedFile(new Random().Next(0, quantity).ToString(), "test", delegate(Stream s) { Assert.Fail("No files have been modified, this should not execute"); }, defaultDate, 100); Assert.IsTrue(System.IO.File.Exists(r.PhysicalPath)); Assert.IsTrue(r.Result == CacheQueryResult.Hit); }
public void Set_ <T>(string key, T data, TimeSpan expire) { key.Should().NotBeNullOrEmpty(); var res = new CacheResult <object>(data); var json = res.ToJson(); this._db.StringSet(key, (string)json, expire); }
public void Free() { _editDistance?.Dispose(); _source = null; _editDistance = null; _lastAreSimilarResult = default; lock (s_poolGate) { s_pool.Push(this); } }
protected override List <SWSearchResult> CreateSearchResult(CacheResult cacheRes, int first, int count) { List <SWSearchResult> result = new List <SWSearchResult>(); foreach (BAccount ba in this.CreateRecordsResult(cacheRes, first, count)) { result.Add(new Result(CreateLink(ba), CreateKeys(ba), CreateCaption(ba), CreateDescription(ba))); } return(result); }
public override Task <CacheResult> GetCacheResultAsync( BuildRequestData buildRequest, PluginLoggerBase logger, CancellationToken cancellationToken) { logger.LogMessage($"{nameof(AssemblyMockCache)}: GetCacheResultAsync for {buildRequest.ProjectFullPath}", MessageImportance.High); ErrorFrom(nameof(GetCacheResultAsync), logger); return(Task.FromResult(CacheResult.IndicateNonCacheHit(CacheResultType.CacheNotApplicable))); }
public void Test_CacheResult_NotImplementedException() { CacheResult <string> result1 = new CacheResult <string>("Fish Li", null); Assert.AreEqual("Fish Li", result1.Result); CacheResult <string> result2 = new CacheResult <string>("Fish Li", new NotImplementedException()); Assert.AreEqual("Fish Li", result2.Result); // 这个断言应该会抛出异常 }
public void TestHashCode() { var entry1 = new CacheResult<int>(2); var entry2 = new CacheResult<int>(2); var entry3 = new CacheResult<int>(3); var set = new HashSet<object> {entry1}; Assert.IsTrue(set.Contains(entry1)); Assert.IsTrue(set.Contains(entry2)); Assert.IsFalse(set.Contains(entry3)); }
private static CacheResult <TResult> CreateCacheResult <TResult>(string entry) { if (entry == null) { return(CacheResult <TResult> .Failure()); } return(new CacheResult <TResult> { Success = true, Content = JsonConvert.DeserializeObject <TResult>(entry) }); }
public async Task <CacheResult <Products> > GetAsync(int id) { var cacheResult = await _context.Database.StringGetAsync($"products.{id}"); if (!cacheResult.HasValue) { return(CacheResult <Products> .NoData()); } var product = _cacheResializer.Deserialize <Products>(cacheResult); return(CacheResult <Products> .Result(product)); }
public IActionResult Result <T>(CacheType cacheType, IEnumerable <T> data) where T : class, new() { var result = new CacheResult <T>(); var config = CacheConfigs.GetConfig(cacheType); result.CacheMinutes = config.Minutes; result.Level = config.Level; result.Data = data; return(Success(result)); }
public override Task <CacheResult> GetCacheResultAsync( BuildRequestData buildRequest, PluginLoggerBase logger, CancellationToken cancellationToken) { Requests.Enqueue(buildRequest); logger.LogMessage($"MockCache: GetCacheResultAsync for {buildRequest.ProjectFullPath}", MessageImportance.High); return (Task.FromResult( _testData?.GetExpectedCacheResultForProjectNumber(GetProjectNumber(buildRequest.ProjectFullPath)) ?? CacheResult.IndicateNonCacheHit(CacheResultType.CacheMiss))); }
public void TestUpdate() { //try to get a unique date time value DateTime newTime = DateTime.UtcNow.AddDays(seed++); CacheResult r = cache.GetCachedFile(new Random().Next(0, quantity).ToString(), "test", delegate(Stream s) { s.WriteByte(32); //Just one space }, newTime, 100); Assert.AreEqual <DateTime>(newTime, System.IO.File.GetLastWriteTimeUtc(r.PhysicalPath)); Assert.IsTrue(r.Result == CacheQueryResult.Miss); }
public void TestHashCode() { var entry1 = new CacheResult <int>(2); var entry2 = new CacheResult <int>(2); var entry3 = new CacheResult <int>(3); var set = new HashSet <object> { entry1 }; Assert.IsTrue(set.Contains(entry1)); Assert.IsTrue(set.Contains(entry2)); Assert.IsFalse(set.Contains(entry3)); }
public void PostCacheRequest(CacheRequest cacheRequest) { Task.Run(async() => { try { var cacheResult = await ProcessCacheRequest(cacheRequest); _buildManager.PostCacheResult(cacheRequest, cacheResult); } catch (Exception e) { _buildManager.PostCacheResult(cacheRequest, CacheResult.IndicateException(e)); } }, _cancellationToken); async Task <CacheResult> ProcessCacheRequest(CacheRequest request) { // Prevent needless evaluation if design time builds detected. if (_projectCacheDescriptor.VsWorkaround && DesignTimeBuildsDetected) { // The BuildManager should disable the cache when it finds its servicing design time builds. return(CacheResult.IndicateNonCacheHit(CacheResultType.CacheMiss)); } EvaluateProjectIfNecessary(request); if (_projectCacheDescriptor.VsWorkaround) { Interlocked.CompareExchange( ref DesignTimeBuildsDetected, new NullableBool(IsDesignTimeBuild(request.Configuration.Project)), null); // No point progressing with expensive plugin initialization or cache query if design time build detected. if (DesignTimeBuildsDetected) { // The BuildManager should disable the cache when it finds its servicing design time builds. return(CacheResult.IndicateNonCacheHit(CacheResultType.CacheMiss)); } } if (_projectCacheDescriptor.VsWorkaround) { // TODO: remove after we change VS to set the cache descriptor via build parameters. await LateInitializePluginForVsWorkaround(request); } return(await GetCacheResultAsync(cacheRequest.Submission.BuildRequestData)); }
public ActionResult clearCache() { ReturnMessage rm = new ReturnMessage(); try { CacheResult.clearCache(); rm.code = MessageCode.SUCCESS; rm.data = "clear cache success."; } catch (Exception e) { rm.code = MessageCode.ERROR; rm.data = e.Message; } return(Ok(rm)); }
public CacheResult Query(App app, string key) { var cache = Caching.CacheFactory.Create(app.Type, app.ConnectionString); CacheKeyType type = cache.Type(key); string value = cache.QueryWithType(key, type); TimeSpan? expire = cache.Expire(key); cache.Close(); CacheResult result = new CacheResult(); result.Expire = expire.HasValue ? expire.Value.TotalSeconds.ToString() : ""; result.Value = value; result.Key = key; result.Index = 0; result.Type = ((int)type).ToString(); return(result); }
/// <summary> /// Execute cache operation /// </summary> /// <returns>Return cache result</returns> internal async Task <CacheResult <TResponse> > ExecuteAsync() { var servers = GetCacheServers(); if (servers.IsNullOrEmpty()) { return(CacheResult <TResponse> .EmptyResult()); } CacheResult <TResponse> result = new CacheResult <TResponse>(); //Single cache server if (servers.Count == 1) { var firstServer = servers.First(); var provider = CacheManager.Configuration.GetCacheProvider(firstServer.ServerType); if (provider != null) { result.AddResponse(await ExecuteCacheOperationAsync(provider, firstServer).ConfigureAwait(false)); } return(result); } //Multiple cache server Task <TResponse>[] cacheTasks = new Task <TResponse> [servers.Count]; var serverIndex = 0; foreach (var server in servers) { if (server == null) { continue; } var provider = CacheManager.Configuration.GetCacheProvider(server.ServerType); if (provider == null) { continue; } cacheTasks[serverIndex] = ExecuteCacheOperationAsync(provider, server); serverIndex++; } result.AddResponse(await Task.WhenAll(cacheTasks).ConfigureAwait(false)); return(result); }
public void TestEquality() { var entry1 = new CacheResult<int>(2); var entry2 = new CacheResult<int>(2); var entry3 = new CacheResult<int>(3); Assert.AreEqual(entry1, entry2); Assert.AreNotEqual(entry1, entry3); Assert.IsTrue(entry1 == entry2); Assert.IsFalse(entry1 != entry2); Assert.IsTrue(entry1 != entry3); Assert.IsFalse(entry1 == entry3); var boxedEntry1 = (object) entry1; var boxedEntry2 = (object) entry2; var boxedEntry3 = (object) entry3; Assert.IsFalse(ReferenceEquals(boxedEntry1, boxedEntry2)); Assert.AreEqual(boxedEntry1, boxedEntry2); Assert.AreNotEqual(boxedEntry1, boxedEntry3); }
public bool AreSimilar(string candidateText, out double similarityWeight) { if (_source.Length < 3) { // If we're comparing strings that are too short, we'll find // far too many spurious hits. Don't even bother in this case. similarityWeight = double.MaxValue; return false; } if (_lastAreSimilarResult.CandidateText == candidateText) { similarityWeight = _lastAreSimilarResult.SimilarityWeight; return _lastAreSimilarResult.AreSimilar; } var result = AreSimilarWorker(candidateText, out similarityWeight); _lastAreSimilarResult = new CacheResult(candidateText, result, similarityWeight); return result; }