public async Task Should_renew_age_when_returned__when_configured(bool renewAge) { _cache = new MemCache<int, string>(3, TimeSpan.FromMilliseconds(49)); _cache.AutoRenew = renewAge; _cache.Add(1, "a"); await Task.Delay(25); _cache.Get(1); await Task.Delay(25); Assert.AreEqual(renewAge ? "a" : null, _cache.Get(1)); }
public async Task Should_renew_age_when_returned__when_configured(bool renewAge) { _cache = new MemCache <int, string>(3, TimeSpan.FromMilliseconds(49)); _cache.AutoRenew = renewAge; _cache.Add(1, "a"); await Task.Delay(25); _cache.Get(1); await Task.Delay(25); Assert.Equal(renewAge ? "a" : null, _cache.Get(1)); }
public async Task AuthenticateAsync(HttpAuthenticationContext context, CancellationToken cancellationToken) { try { var actionAuthentication = context.ActionContext.ActionDescriptor.GetCustomAttributes <AuthenticationAttribute>(); //验证 if ( (actionAuthentication.Count > 0 && actionAuthentication.Last().Authenticate == true) || (actionAuthentication.Count == 0 && AuthenticationConfig.GetAuthenticateNoAttribute() == true) ) { var ticket = context.Request.Headers.GetValues(AuthenticationConfig.AuthenticationString).FirstOrDefault(); if (ticket == null) { throw new AuthenticationException("can not get ticket !"); } object obj = MemCache.Get(AuthenticationConfig.TicketKeyPrefix + ticket); if (obj == null) { AuthorizationConfig.RemoveRoles(ticket); throw new AuthenticationException("Ticket has Expired !"); } if (AuthenticationConfig.GetRefreshTicket()) { MemCache.Add(AuthenticationConfig.TicketKeyPrefix + ticket, obj, DateTime.Now.AddSeconds(AuthenticationConfig.GetTicketExpire())); } } } catch (Exception ex) { throw ex; } }
public static UserInfo GetUser(string upn, bool clearCache = false) { if (clearCache) { MemCache.Clear(upn); } var result = (UserInfo)MemCache.Get(upn); if (result == null) { using (var ctx = new ClientContext(SharePointHelper.Url)) { var u = ctx.Web.EnsureUser(upn); ctx.Load(u); ctx.Load(u.Groups); ctx.ExecuteQuery(); result = MapToModel(u); MemCache.Add(upn, result); } } return(result); }
public void Should_invalidate_cache() { _cache = new MemCache <int, string>(3, TimeSpan.MaxValue); _cache.Add(1, "a"); _cache.Invalidate(1); Assert.Null(_cache.Get(1)); }
public async Task Should_not_return_expired() { _cache = new MemCache <int, string>(3, TimeSpan.FromMilliseconds(49)); _cache.Add(1, "a"); await Task.Delay(50); Assert.Equal(null, _cache.Get(1)); }
/// <summary> /// Gets the cached captcha. /// </summary> /// <param name="guid">The GUID.</param> /// <returns></returns> public static CaptchaImage GetCachedCaptcha(string guid) { if (String.IsNullOrEmpty(guid)) { return(null); } return((CaptchaImage)MemCache.Get(guid)); }
/// <summary> /// 获取错误计数 /// </summary> /// <returns></returns> public static string GetErrorCount() { object countObj = MemCache.Get(CacheKey + "_" + "ErrorCount" + "_" + Ip); if (countObj != null) { return(countObj.ToString()); } return(null); }
public static bool IsDisplay(CaptchaDispalyType type) { var cacheValue = MemCache.Get(GetId(type)); if (cacheValue == null) { return(false); } return(true); }
public void LevelDbSearchLogTest() { // https://github.com/google/leveldb/blob/master/doc/log_format.md LogReader logReader = new LogReader(new FileInfo(@"TestWorld\000047.log")); logReader.Open(); MemCache memCache = new MemCache(); memCache.Load(logReader); var result = memCache.Get(new byte[] { 0xeb, 0xff, 0xff, 0xff, 0xf3, 0xff, 0xff, 0xff, 0x31 }); Assert.IsTrue(ReadOnlySpan <byte> .Empty != result.Data); Assert.AreEqual(new byte[] { 0xA, 0x00, 0x00, 0x02, 0x05 }, result.Data.Slice(0, 5).ToArray()); }
public static List <UserInfo> GetUserByGroup(string groupName) { var result = (List <UserInfo>)MemCache.Get(groupName); if (result == null) { result = new List <UserInfo>(); var users = EmailHelper.GetUsersInGroup(groupName); foreach (User usr in users) { var ui = MapToModel(usr); result.Add(ui); } MemCache.Add(groupName, result); } return(result); }
static void testCache() { MemCache.Set <string>("noexpiration", "noexpiration-noexpiration-noexpiration-noexpiration"); var a0 = MemCache.Get <string>("noexpiration"); MemCache.Set <string>("testabsolutetime", "sdfaslfjasldfjadsfjsafsfsd", DateTime.Now.AddSeconds(3)); var a1 = MemCache.Get <string>("testabsolutetime"); Thread.Sleep(4000); var a2 = MemCache.Get <string>("testabsolutetime"); a0 = MemCache.Get <string>("noexpiration"); MemCache.Set <string>("test123", "sdfaslfjasldfjadsfjsafsfsd", new TimeSpan(0, 0, 5)); var v1 = MemCache.Get <string>("test123"); var v2 = MemCache.Get <string>("test123"); var v3 = MemCache.Get <string>("test123"); MemCache.Set <string>("test123", "123123123213213124324", new TimeSpan(0, 0, 5)); var v0 = MemCache.Get <string>("test123"); Console.WriteLine(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss") + " - begin"); getCache(); Console.WriteLine(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss") + " - 1"); getCache(); Console.WriteLine(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss") + " - 2"); getCache(); Console.WriteLine(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss") + " - 3"); getCache(); Console.WriteLine(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss") + " - 4"); getCache(); Console.WriteLine(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss") + " - 5"); for (int i = 0; i < 12; i++) { var Name = getCache(); Thread.Sleep(1000); Console.WriteLine(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss") + " - " + i); } }
public static object GetUser(string ticket) { return(MemCache.Get(AuthenticationConfig.TicketKeyPrefix + ticket)); }
public void Should_not_break_when_invalidating_non_existent_cache() { _cache = new MemCache <int, string>(3, TimeSpan.MaxValue); _cache.Invalidate(1); Assert.Null(_cache.Get(1)); }
public void GetSingleItem() { _cache.Store("k", 0, Encoding.ASCII.GetBytes("foo"), DateTime.MinValue + TimeSpan.FromSeconds(10)); Assert.AreEqual("foo", Encoding.ASCII.GetString(_cache.Get("k").Data)); }
public async Task Should_not_return_expired() { _cache = new MemCache<int, string>(3, TimeSpan.FromMilliseconds(49)); _cache.Add(1, "a"); await Task.Delay(50); Assert.AreEqual(null, _cache.Get(1)); }
public static List <string> GetRoles(string ticket) { return((List <string>)MemCache.Get(RolePrefix + ticket)); }
public void CompactNumeric() { var keys = new List <byte[]>(); DirectoryInfo dir = TestUtils.GetTestDirectory(false); // Setup new database and generate values enough to create 2 level 0 tables with overlapping keys. // We use this when we run the real test. ulong idx = 0; var options = new Options() { LevelSizeBaseFactor = 10, RetainAllFiles = true }; List <FileMetadata> level0Files; Version version = null; using (var db = new Database(dir, true, options)) { db.Open(); for (int j = 0; j < 4; j++) { for (int i = 0; i < 8000; i++) { byte[] key = BitConverter.GetBytes(idx++); byte[] data = TestUtils.FillArrayWithRandomBytes(1000, 128); db.Put(key, data); keys.Add(key); } } level0Files = new List <FileMetadata>(db.Level0Tables); version = db.Version; db.Close(); } ((Hierarchy)LogManager.GetRepository(Assembly.GetEntryAssembly())).Root.Level = Level.Warn; { Log.Warn($"Reading {keys.Count} values using regular db.get()"); using (var db = new Database(dir, false, options)) { db.Open(); ulong count = 0; ulong countMissing = 0; foreach (byte[] key in keys) { byte[] value = db.Get(key); if (value == null) { Log.Error($"Missing key {key.ToHexString()} at idx:{count}, {countMissing++}"); } count++; } db.Close(); } } return; //{ // Log.Warn($"Reading {keys.Count} values, from log files"); // List<byte[]> keysToRemove = new List<byte[]>(keys); // FileInfo[] logFiles = dir.GetFiles("*.log"); // foreach (FileInfo fileInfo in logFiles) // { // Log.Warn($"Reading from {fileInfo.Name}. Have {keysToRemove.Count} keys left"); // using var reader = new LogReader(fileInfo.Open(FileMode.Open)); // var cache = new MemCache(); // cache.Load(reader); // foreach (byte[] key in keysToRemove.Take(5000).ToArray()) // { // if (cache.Get(key).State == ResultState.Exist) // { // keysToRemove.Remove(key); // } // } // } // Assert.AreEqual(0, keysToRemove.Count); //} int keysInLevel0 = 0; var keysInCurrentLog = new List <byte[]>(); { Log.Warn($"Reading {keys.Count} values, from level0 files"); List <byte[]> keysToRemove = new List <byte[]>(keys); var enumerators = new List <TableEnumerator>(); foreach (FileMetadata fileMeta in level0Files.OrderBy(f => f.FileNumber)) { string filePath = Path.Combine(dir.FullName, $"{fileMeta.FileNumber:000000}.ldb"); var fileInfo = new FileInfo(filePath); Log.Warn($"Reading from {fileInfo.Name}. Have {keysToRemove.Count} keys left"); var table = new Table(fileInfo); foreach (byte[] key in keysToRemove.ToArray()) { if (table.Get(key).State == ResultState.Exist) { keysInLevel0++; keysToRemove.Remove(key); } } enumerators.Add((TableEnumerator)table.GetEnumerator()); } Assert.Less(0, keysInLevel0); // Read the remaining from current log file { string filePath = Path.Combine(dir.FullName, $"{version.LogNumber:000000}.log"); var fileInfo = new FileInfo(filePath); Log.Warn($"Reading remaining {keysToRemove.Count} values from current log {fileInfo.Name}"); using var reader = new LogReader(fileInfo.Open(FileMode.Open)); var cache = new MemCache(); cache.Load(reader); foreach (byte[] key in keysToRemove.ToArray()) { if (cache.Get(key).State == ResultState.Exist) { keysInCurrentLog.Add(key); keysToRemove.Remove(key); } } Assert.AreEqual(0, keysToRemove.Count); } { Log.Warn($"Reading {keysInLevel0} values, based on merge enumerator of all level0 table files"); var enumerator = new MergeEnumerator(enumerators); int enumCount = 0; while (enumerator.MoveNext()) { enumCount++; } Assert.AreEqual(keysInLevel0, enumCount); // Close the tables foreach (TableEnumerator tableEnumerator in enumerators) { tableEnumerator.TEST_Close(); } } } { var keysLeftToRemove = new List <byte[]>(keys).Except(keysInCurrentLog).ToList(); Log.Warn($"Reading {keysLeftToRemove.Count} values, from all level+1 files + current level0"); var level1Enumerators = new List <TableEnumerator>(); FileInfo[] tableFiles = dir.GetFiles("*.ldb"); foreach (var fileInfo in tableFiles.OrderBy(f => f.Name)) { if (level0Files.Any(f => $"{f.FileNumber:000000}.ldb" == fileInfo.Name)) { if (version.GetFiles(0).All(f => $"{f.FileNumber:000000}.ldb" != fileInfo.Name)) { continue; } Log.Warn($"Reading current level0 file {fileInfo.Name}"); } Log.Warn($"Reading from {fileInfo.Name}. Have {keysLeftToRemove.Count} keys left"); var table = new Table(fileInfo); table.Initialize(); level1Enumerators.Add((TableEnumerator)table.GetEnumerator()); foreach (byte[] key in keysLeftToRemove.ToArray()) { if (table.Get(key).State == ResultState.Exist) { keysLeftToRemove.Remove(key); } } } //Assert.AreEqual(0, keysLeftToRemove.Count); // FAIL { keysLeftToRemove = new List <byte[]>(keys).Except(keysInCurrentLog).ToList(); Log.Warn($"Reading {keysLeftToRemove.Count} values, from all level+1 files + current level0 using merge enumerator"); var enumerator = new MergeEnumerator(level1Enumerators); int enumCount = 0; while (enumerator.MoveNext()) { enumCount++; if (enumerator.Current != null) { byte[] key = enumerator.Current.Key.Span.UserKey().ToArray(); keysLeftToRemove.RemoveAll(bytes => new BytewiseComparator().Compare(bytes, key) == 0); } else { Log.Warn($"Current in enumerator is null"); } } Assert.AreEqual(keys.Count - keysInCurrentLog.Count, enumCount, "Expected to have count of all keys"); Assert.AreEqual(0, keysLeftToRemove.Count, "Expected to have found all keys"); foreach (TableEnumerator tableEnumerator in level1Enumerators) { tableEnumerator.TEST_Close(); } } } Log.Warn($"Done!"); }
public void Should_add_simple_item() { _cache.Add(1, "1"); Assert.Equal("1", _cache.Get(1)); }