CachedObject CreateForCache(long val) { Assert.IsFalse(CreatedObjects.Contains(val)); CreatedObjects.Add(val); CachedObject toReturn = new CachedObject(); toReturn.Val = val; return toReturn; }
public void cachedobject_is_working_with_dict() { var dict = new Dictionary<string, string>(); var cachedObject = new CachedObject<string>(() => dict.Add(Key, Value), () => dict.GetValueOrDefault(Key)); dict.Count.ShouldEqual(0); cachedObject.Get().ShouldEqual(Value); dict.Count.ShouldEqual(1); cachedObject.Get().ShouldEqual(Value); dict.Count.ShouldEqual(1); }
/// <summary> /// Turns the given <paramref name="obj"/> in a cacheable object. /// </summary> /// <param name="obj">The <see cref="Record"/> for which to create the cacheable object.</param> /// <param name="query">The <see cref="Query"/> which resulted in the given <paramref name="obj"/>.</param> /// <returns>Returns the <see cref="CachedObject{TObject}"/>.</returns> public static CachedObject<Record> AsCacheableObject(this Record obj, Query query) { // validate arguments if (query == null) throw new ArgumentNullException("query"); // create a new cacheable object var cacheable = new CachedObject<Record>(obj); // if the result is found, cache it by it's id ChildOfSpecification childOfSpecification; if (obj != null) { // generate an ID for this specific record var recordIdCacheKey = obj.CalculateIdCacheKey(); // add that cache key as the dependency cacheable.Add((StringCacheKeyDependency) recordIdCacheKey); } else if (query.TryGetSpecification(out childOfSpecification)) { // cache on the parent tree Id var parentTreeIdCacheKey = childOfSpecification.ParentPointer.CalculateTreeIdCacheKey(); // add that cache key as the dependency cacheable.Add((StringCacheKeyDependency) parentTreeIdCacheKey); } else { // add the repository modified cache key cacheable.Add(CachingRepositoryDecorator.RepositoryModifiedDependency); } // return the cacheable object return cacheable; }
/// <summary> /// Turns the given <paramref name="obj"/> in a cacheable object. /// </summary> /// <param name="obj">The <see cref="Nodeset"/> for which to create the cacheable object.</param> /// <param name="query">The <see cref="Query"/> which resulted in the given <paramref name="obj"/>.</param> /// <returns>Returns the <see cref="CachedObject{TObject}"/>.</returns> /// <exception cref="ArgumentNullException">Thrown if <paramref name="obj"/> is null.</exception> public static CachedObject<Nodeset> AsCacheableObject(this Nodeset obj, Query query) { // validate arguments if (obj == null) throw new ArgumentNullException("obj"); if (query == null) throw new ArgumentNullException("query"); // create a new cacheable object var cacheable = new CachedObject<Nodeset>(obj); ChildOfSpecification childOfSpecification; if (query.TryGetSpecification(out childOfSpecification)) { // cache on the parent tree Id var parentTreeIdCacheKey = childOfSpecification.ParentPointer.CalculateTreeIdCacheKey(); // add that cache key as the dependency cacheable.Add((StringCacheKeyDependency) parentTreeIdCacheKey); } else { // add the repository modified cache key cacheable.Add(CachingRepositoryDecorator.RepositoryModifiedDependency); } // return the cacheable object return cacheable; }
public override bool Match(CachedObject item) { return(Elements.All(t => t.Match(item))); }
public void DataAccess() { using (var client = ClientFactory.InitSingleNode("CacheClientConfig.xml", "localhost", _serverPort)) { var clientImplementation = ( CacheClient)client; var serverDescription = clientImplementation.GetServerDescription(); Assert.IsNotNull(serverDescription); var tradeDescription = clientImplementation.KnownTypes["UnitTests.TestData.Trade"]; var quoteDescription = clientImplementation.KnownTypes["UnitTests.TestData.Quote"]; Assert.AreEqual(serverDescription.KnownTypesByFullName.Count, 2); Assert.AreEqual(tradeDescription.AsTypeDescription.IndexFields.Count, 3); Assert.AreEqual(tradeDescription.AsTypeDescription.ListFields.Count, 2); Assert.AreEqual(quoteDescription.AsTypeDescription.IndexFields.Count, 3); ////////////////////////////////////////////: // test trades var trade1 = new Trade(1, 1001, "XXX", new DateTime(2009, 1, 15), (float)10000.25); var trade2 = new Trade(2, 1002, "XXX", new DateTime(2009, 1, 15), (float)20000.25); client.Put(trade1); client.Put(trade2); //build a query the "hard" way var tradeQueryBuilder = new QueryBuilder(tradeDescription.AsTypeDescription); var q1 = tradeQueryBuilder.MakeAtomicQuery("Nominal", QueryOperator.Gt, 1000F); var q2 = tradeQueryBuilder.MakeAtomicQuery("Nominal", QueryOperator.Le, 20000.25F); var q12 = tradeQueryBuilder.MakeAndQuery(); q12.Elements.Add(q1); q12.Elements.Add(q2); var q = tradeQueryBuilder.MakeOrQuery(q12); Assert.IsTrue(q.IsValid); Assert.IsTrue(q.Match(CachedObject.Pack(trade1, tradeDescription))); Assert.IsTrue(q.Match(CachedObject.Pack(trade2, tradeDescription))); var trades = client.GetMany <Trade>(q).ToList(); Assert.IsNotNull(trades); Assert.AreEqual(trades.Count, 2); ////////////////////////////////////////////////////// // test quotes //put a quote with some null index values //RefSet is null var quote1 = new Quote { Name = "aaa", Mid = 2.2F, Ask = 2.1F, Bid = 2.3F }; client.Put(quote1); var quote1Reloaded = client.GetOne <Quote>("aaa"); Assert.AreEqual(quote1Reloaded.QuoteType, QuoteType.INVALID); //get by null index value //need to create the query the "hard way" ( cause null can not be specified in a query string) var quoteQueryBuilder = new QueryBuilder(quoteDescription.AsTypeDescription); q = quoteQueryBuilder.MakeOrQuery(quoteQueryBuilder.MakeAtomicQuery("RefSet", null)); var quotes = client.GetMany <Quote>(q).ToList(); Assert.AreEqual(quotes.Count, 1); Assert.AreEqual(quotes[0].Name, "aaa"); } }
public override void TryRemove(CachedObject item) { _evictionQueue.TryRemove(item); }
/// <summary> /// Return true if the current query matches the specified object /// </summary> /// <param name="item"></param> /// <returns></returns> public abstract bool Match(CachedObject item);
public void PutDifferentType() { var item1 = new NewCacheableTypeOk(1, 1001, "AHA", new DateTime(2010, 10, 01), 55); Assert.Throws <InvalidOperationException>(() => _dataStore.InternalAddNew(CachedObject.Pack(item1), false)); ; }
public override void AddItem(CachedObject item) { _evictionQueue.AddNew(item); }
public virtual void AddItem(CachedObject item) { //ignore in the base class }
/// <summary> /// The specified item was accessed, update its priority accordingly /// </summary> /// <param name="item"></param> public virtual void Touch(CachedObject item) { //ignore in the base class }
public abstract void RemoveOne(CachedObject item);
public IMethodReturn Invoke(IMethodInvocation input, GetNextInterceptionBehaviorDelegate getNext) { if (input.MethodBase is MethodInfo method) { if (method.Name.StartsWith("Get")) { var parameters = method.GetParameters(); if (parameters.All(i => i.ParameterType == typeof(string))) { var key1 = method.ReflectedType.FullName; var key2 = $"{method.Name}_{parameters.Length}_" + string.Join("_", input.Inputs.Cast <object>()); log.Debug($"Using keys {key1} {key2}"); CachedObject result; while (true) { var cache = _cache.GetOrAdd(key1, new ConcurrentDictionary <string, CachedObject>()); result = cache.GetOrAdd(key2, _ => { log.Debug($"Cache miss on {key2}"); var r = getNext()(input, getNext); var ret = new CachedObject(DateTime.UtcNow, r?.ReturnValue, r?.Exception); if (UnwrapTasks) { if (ret.Object is Task && method.ReturnType.IsGenericType && method.ReturnType.GetGenericTypeDefinition() == typeof(Task <>)) { ((Task)ret.Object).ContinueWith(t => { var tr = _taskResult.GetOrAdd(method.ReturnType.GetGenericArguments()[0], GetTaskResult); ret.UnwrappedTask = tr(t); }); } } return(ret); }); if (result.CacheTime.AddMinutes(5) < DateTime.UtcNow) { cache.TryRemove(key2, out result); } else { return(result.Exception != null ? input.CreateExceptionMethodReturn(result.Exception) : UnwrapTasks && result.UnwrappedTask != null && method.ReturnType.GetGenericTypeDefinition() == typeof(Task <>) ? input.CreateMethodReturn(_taskFromResult.GetOrAdd(method.ReturnType.GetGenericArguments()[0], CreateTaskFromResult)(result.UnwrappedTask)) : input.CreateMethodReturn(result.Object)); } } } } else { // hmm.... we better clear the cache for this object to be sure.... var key1 = method.ReflectedType.FullName; log.Debug("Clearing cache for {key1}"); ConcurrentDictionary <string, CachedObject> removed; _cache.TryRemove(key1, out removed); } } log.Debug($"Not caching call to {input.MethodBase}"); return(getNext()(input, getNext)); }
/// <summary> /// Put a new item in the index /// REQUIRE: no item having the same primary key exists in the index /// If an item /// </summary> /// <param name="item"></param> public abstract void Put(CachedObject item);
/// <summary> /// This method will check the specified sifDataObject to see whether its dependent objects already exist in /// the target system or in the cache. If all the dependent objects exist in the target system, then the /// sifDataObject can be processed further (return true). If some dependent objects are have been cached, then /// the sifDataObject will be cached awaiting all outstanding dependent objects, and this method will return /// false. /// </summary> /// <param name="sifDataObject">SIF Data Object to check against the cache.</param> /// <param name="eventAction">The action associated with the SIF Data Object, i.e. add, change.</param> /// <param name="zone">Zone the SIF Data Object was received from.</param> /// <returns>True if all dependent objects exist in the target system; false otherwise.</returns> /// <exception cref="System.ArgumentException">sifDataObject or zone parameter is null.</exception> private bool PreProcessSifDataObject(T sifDataObject, EventAction?eventAction, IZone zone) { if (sifDataObject == null) { throw new ArgumentNullException("sifDataObject"); } if (zone == null) { throw new ArgumentNullException("zone"); } bool processFurther = true; SifDataObjectMetadata <T> metadata = MetadataInstance(sifDataObject); if (log.IsDebugEnabled) { log.Debug(this.GetType().Name + " preprocessing " + metadata.ObjectName + " (" + metadata.SifUniqueId + ") for application " + ApplicationId + " in zone " + zone.ZoneId + "."); } CachedObject cachedObject = cacheService.RetrieveCachedObject(metadata.ObjectName, metadata.SifUniqueId, ApplicationId, zone.ZoneId); // Previously cached SIF Data Objects/messages are currently ignored. // TODO: Implement a better solution that manages previously received messages. if (cachedObject == null) { if (log.IsDebugEnabled) { log.Debug(metadata.ObjectName + " (" + metadata.SifUniqueId + ") does not exist in the cache and it's dependents will be checked."); } ICollection <DependentObject> dependentObjects = metadata.DependentObjects; ICollection <DependentObject> existingObjects = new Collection <DependentObject>(); foreach (DependentObject dependentObject in dependentObjects) { // The dependent object does not exist in the cache. if (cacheService.RetrieveDependentObject(dependentObject.SifObjectName, dependentObject.ObjectKeyValue, ApplicationId, zone.ZoneId) == null) { if (log.IsDebugEnabled) { log.Debug("Dependent " + dependentObject.SifObjectName + " (" + dependentObject.ObjectKeyValue + ") did NOT exist in the cache."); } // The dependent objects exists in the target system so there is no need to cache it. if (DoesObjectExistInTargetSystem(dependentObject.SifObjectName, dependentObject.ObjectKeyValue)) { if (log.IsDebugEnabled) { log.Debug("Dependent " + dependentObject.SifObjectName + " (" + dependentObject.ObjectKeyValue + ") did exist in the target system and will NOT be cached."); } existingObjects.Add(dependentObject); } else { if (log.IsDebugEnabled) { log.Debug("Dependent " + dependentObject.SifObjectName + " (" + dependentObject.ObjectKeyValue + ") did NOT exist in the target system and will be cached."); } } } // The dependent object exists in the cache so there is no need to cache it again. else { if (log.IsDebugEnabled) { log.Debug("Dependent " + dependentObject.SifObjectName + " (" + dependentObject.ObjectKeyValue + ") did exist in the cache and will NOT be cached."); } existingObjects.Add(dependentObject); } } foreach (DependentObject existingObject in existingObjects) { dependentObjects.Remove(existingObject); } // There are outstanding dependent objects. if (dependentObjects.Count != 0) { if (log.IsDebugEnabled) { log.Debug(metadata.ObjectName + " (" + metadata.SifUniqueId + ") will be cached and as not all it's dependents exist in the target system."); } processFurther = false; cacheService.StoreObjectInCache (metadata.ObjectName, metadata.SifUniqueId, sifDataObject.ToXml(), (eventAction == null ? null : eventAction.ToString()), AgentConfiguration.SourceId, ApplicationId, zone.ZoneId, ExpiryStrategy, ExpiryPeriod, dependentObjects); } else { if (log.IsDebugEnabled) { log.Debug(metadata.ObjectName + " (" + metadata.SifUniqueId + ") will NOT be cached as all it's dependents exist in the target system."); } } } else { processFurther = false; if (log.IsDebugEnabled) { log.Debug(metadata.ObjectName + " (" + metadata.SifUniqueId + ") already exists in the cache and will be ignored."); } } return(processFurther); }
static public void Set(string guidString, object objectToCache) { ConditionalGarbageCollect(); _cache[guidString] = new CachedObject() {StoredDateTime = DateTime.UtcNow, Object = objectToCache}; }
public override void TryRemove(CachedObject item) { _removed.Add(item); }
/// <summary> /// This item is not present in the cache any more. The eviction policy should not compute /// its eviction priority any more /// </summary> /// <param name="item"></param> public virtual void TryRemove(CachedObject item) { //ignore in the base class }
public void Queries() { var item = new CacheableTypeOk(1, 1001, "AHA", new DateTime(2010, 10, 01), 9); _dataStore.InternalAddNew(CachedObject.Pack(item), false); item = new CacheableTypeOk(2, 1002, "AHA", new DateTime(2010, 10, 01), 8); _dataStore.InternalAddNew(CachedObject.Pack(item), false); item = new CacheableTypeOk(3, 1003, "AHA", new DateTime(2010, 10, 02), 8); _dataStore.InternalAddNew(CachedObject.Pack(item), false); item = new CacheableTypeOk(4, 1004, "BBB", new DateTime(2010, 9, 01), 5); _dataStore.InternalAddNew(CachedObject.Pack(item), false); item = new CacheableTypeOk(5, 1005, "BBB", new DateTime(2010, 10, 01), 4); _dataStore.InternalAddNew(CachedObject.Pack(item), false); item = new CacheableTypeOk(6, 1006, "BBA", new DateTime(2010, 10, 01), 1); _dataStore.InternalAddNew(CachedObject.Pack(item), false); var builder = new QueryBuilder(typeof(CacheableTypeOk)); //test In query with unique key : should return items 4 and 5 var q1 = builder.In("uniquekey", 1004, 1005); var result = _dataStore.InternalGetMany(q1); Assert.AreEqual(result.Count, 2); Assert.AreEqual(result[0].PrimaryKey, 4); Assert.AreEqual(result[1].PrimaryKey, 5); //test In query with primary key : should return items 4 and 5 var q2 = builder.In(4, 5); result = _dataStore.InternalGetMany(q2); Assert.AreEqual(result.Count, 2); Assert.AreEqual(result[0].PrimaryKey, 4); Assert.AreEqual(result[1].PrimaryKey, 5); //test In query with index key : should return items 4, 5, 6 var q3 = builder.In("IndexKeyFolder", "BBB", "BBA"); result = _dataStore.InternalGetMany(q3); Assert.AreEqual(result.Count, 3); //where IndexKeyValue <= 4 AND IndexKeyFolder = "BBB" var q4 = builder.GetMany("IndexKeyValue <= 4", "IndexKeyFolder = BBB"); result = _dataStore.InternalGetMany(q4); Assert.AreEqual(result.Count, 1); Assert.AreEqual(result[0].PrimaryKey, 5); Assert.IsTrue(q4.Match(result[0])); //where IndexKeyFolder = "AHA" AND IndexKeyDate <= 20101001 var q5 = builder.GetMany("IndexKeyFolder = AHA", $"IndexKeyDate <= {new DateTime(2010, 10, 01).Ticks}"); result = _dataStore.InternalGetMany(q5); Assert.AreEqual(result.Count, 2); foreach (var cachedObject in result) { Assert.IsTrue(q5.Match(cachedObject)); } //where IndexKeyDate <= 20101001 var q6 = builder.GetMany($"IndexKeyDate <= {new DateTime(2010, 10, 01).Ticks}"); result = _dataStore.InternalGetMany(q6); Assert.AreEqual(result.Count, 5); foreach (var cachedObject in result) { Assert.IsTrue(q6.Match(cachedObject)); } // IN alone var q7 = builder.In("IndexKeyFolder", "BBA", "BBB", "BBC"); result = _dataStore.InternalGetMany(q7); Assert.AreEqual(result.Count, 3); Assert.IsFalse(_dataStore.LastExecutionPlan.IsFullScan); Assert.AreEqual(_dataStore.LastExecutionPlan.PrimaryIndexName, "IndexKeyFolder"); // IN and BTW var q81 = builder.In("IndexKeyFolder", "BBA", "BBB", "BBC"); var q82 = builder.MakeAtomicQuery("indexKeyValue", 4, 5); q81.Elements[0].Elements.Add(q82); var queryDescription = q81.ToString(); Assert.IsTrue(queryDescription.Contains("AND")); result = _dataStore.InternalGetMany(q81); Assert.AreEqual(result.Count, 2); Assert.IsFalse(_dataStore.LastExecutionPlan.IsFullScan); Assert.AreEqual(_dataStore.LastExecutionPlan.PrimaryIndexName, "IndexKeyValue"); Assert.AreEqual(_dataStore.LastExecutionPlan.ElementsInPrimarySet, 2); // Perform a comparison on a non ordered index. it should be solved by a full scan var q9 = builder.GetMany("IndexKeyFolder >= BBA"); result = _dataStore.InternalGetMany(q9); Assert.AreEqual(result.Count, 3); Assert.IsTrue(_dataStore.LastExecutionPlan.IsFullScan); // Multiple query to be solved by ful scan var q10 = builder.GetMany("IndexKeyFolder >= BBA", "IndexKeyFolder < BBB"); result = _dataStore.InternalGetMany(q10); Assert.AreEqual(result.Count, 1); Assert.IsTrue(_dataStore.LastExecutionPlan.IsFullScan); }
public void TestCachedObjectCorrectness() { // Test zero maxAge -- object should be recreated every time we get the instance. CachedObject <ObjectThatIsDifferentEachTime> co = new CachedObject <ObjectThatIsDifferentEachTime>(() => { return(new ObjectThatIsDifferentEachTime()); }, TimeSpan.FromMinutes(1), TimeSpan.Zero); for (int n = 1; n <= 50; n++) { Assert.AreEqual(n, co.GetInstance().id); } // Test that the object is created only once when expiration dates are long. ObjectThatIsDifferentEachTime.Reset(); co = new CachedObject <ObjectThatIsDifferentEachTime>(() => { return(new ObjectThatIsDifferentEachTime()); }, TimeSpan.FromMinutes(1), TimeSpan.FromMinutes(2)); Assert.AreEqual(1, co.GetInstance().id); Assert.AreEqual(1, co.GetInstance().id); Assert.AreEqual(1, co.GetInstance().id); if (allowSlowTest) { Thread.Sleep(100); Assert.AreEqual(1, co.GetInstance().id); Thread.Sleep(100); // Test short minAge ObjectThatIsDifferentEachTime.Reset(); co = new CachedObject <ObjectThatIsDifferentEachTime>(() => { return(new ObjectThatIsDifferentEachTime()); }, TimeSpan.FromMilliseconds(50), TimeSpan.FromMinutes(2)); Assert.AreEqual(1, co.GetInstance().id); Assert.AreEqual(1, co.GetInstance().id); Assert.AreEqual(1, co.GetInstance().id); Thread.Sleep(100); Assert.AreEqual(1, co.GetInstance().id); Thread.Sleep(25); Assert.AreEqual(2, co.GetInstance().id); Assert.AreEqual(2, co.GetInstance().id); Thread.Sleep(100); Assert.AreEqual(2, co.GetInstance().id); Thread.Sleep(25); Assert.AreEqual(3, co.GetInstance().id); Assert.AreEqual(3, co.GetInstance().id); Thread.Sleep(100); Assert.AreEqual(3, co.GetInstance().id); Thread.Sleep(100); Assert.AreEqual(4, co.GetInstance().id); Thread.Sleep(100); // Test short maxAge ObjectThatIsDifferentEachTime.Reset(); co = new CachedObject <ObjectThatIsDifferentEachTime>(() => { return(new ObjectThatIsDifferentEachTime()); }, TimeSpan.FromMinutes(1), TimeSpan.FromMilliseconds(50)); Assert.AreEqual(1, co.GetInstance().id); Assert.AreEqual(1, co.GetInstance().id); Assert.AreEqual(1, co.GetInstance().id); Thread.Sleep(100); Assert.AreEqual(2, co.GetInstance().id); Assert.AreEqual(2, co.GetInstance().id); Assert.AreEqual(2, co.GetInstance().id); Thread.Sleep(100); Assert.AreEqual(3, co.GetInstance().id); Assert.AreEqual(3, co.GetInstance().id); Assert.AreEqual(3, co.GetInstance().id); Thread.Sleep(100); Assert.AreEqual(4, co.GetInstance().id); Thread.Sleep(100); Assert.AreEqual(5, co.GetInstance().id); } }
public void Initialize(IRequestHandler requestHandler, CachedObject cachedObject, params KeyValuePair <string, string>[] args) { this.requestHandler = requestHandler; this.cachedObject = cachedObject; this.args = args.ToDictionary(a => a.Key, a => a.Value); }
/// <summary> /// Метод получения объекта по его id /// </summary> /// <param name="id">Идентификатор объекта</param> /// <param name="obj">Возвращаемый объект</param> /// <param name="onlyInCache">Признак поиска только в кэше</param> /// <returns>Признак успешного нахождени</returns> public bool Get(Int64 id, out T obj, bool onlyInCache) { //Инициализируем выходные параметры obj = default(T); //Результат работы bool result = false; //Признак отсутствия объекта в БД bool notFoundInDB = false; //Структура описания кешируемого объекта CachedObject cachedObj; //Цикл поиска объекта while (true) { //Пытаемся найти объект в кэше result |= _objects.TryGetValue(id, out cachedObj); //Если нашли if (result) { //Возвращаем объект obj = cachedObj.Object; //Обновляем историю объекта _objectsHistory.Remove(cachedObj.HistoryIndex); cachedObj.HistoryIndex = _objectsHistory.Add(id); //Возвращаем успех return(true); } //Если установлен признак поиска только в кэше if (onlyInCache) { //Возвращаем результат return(result); } //Пытаемся найти объект в БД result |= GetFromDB(id, out obj); //Если нашли if (result) { //Выходим из цикла поиска break; } //Если не нашли else { //Устанавлвиаем признак отсутствия объекта в БД notFoundInDB = true; } //Пытаемся найти объект на сервере result |= GetFromServer(id, out obj); //Если нашли if (result) { //Выходим из цикла поиска break; } //Если не нашли else { //Возвращаем неуспех return(false); } } //Если не нашли в БД if (notFoundInDB) { //Сохраняем объект в БД SaveToDB(id, ref obj); } //Если максимальное количество объектов в кэше задано if (_maxCacheCount != 0) { //Если кэш переполнен if (_objectsHistory.Count >= _maxCacheCount) { //Идентификатор самого неиспользуемого объекта Int64 localId; //Извлекаем идентификатор самого неиспользуемого объекта из истории if (!_objectsHistory.PopFirst(out localId)) { //Если что-то не так //Создаём исключение throw new IndexOutOfRangeException("SingleCacheBase::Get - попытка извлечь из пустой истории!"); } //Удаляем объект из кэша _objects.Remove(localId); } } //Размер объекта int objectSize = 0; //Если максимальный размер кэша или объекта задан if (_maxCacheSize != 0 || _maxObjectSize != 0) { //Получаем размер объекта objectSize = GetObjectSize(ref obj); //Если размер объекта превышает максимально допустимый if (objectSize > _maxObjectSize || objectSize > _maxCacheSize) { //Возвращаем успех (при этом не кешируем объект) return(true); } //Пока в кэше нет места для объекта while (objectSize + _currentCacheSize > _maxCacheSize) { //Идентификатор самого неиспользуемого объекта Int64 localId; //Извлекаем идентификатор самого неиспользуемого объекта из истории if (!_objectsHistory.PopFirst(out localId)) { //Если что-то не так //Создаём исключение throw new InvalidOperationException("SingleCacheBase::Get - попытка извлечь из пустой истории!"); } //Получаем описание объекта if (!_objects.TryGetValue(localId, out cachedObj)) { //Если что-то не так //Создаём исключение throw new InvalidOperationException("SingleCacheBase::Get - объект не найден в кэше!"); } //Уменьшаем размер кеша _currentCacheSize -= cachedObj.ObjectSize; //Удаляем объект из кэша _objects.Remove(localId); } } //Создаём новое описание кешируемого объекта cachedObj = new CachedObject(); //Заполняем его cachedObj.Object = obj; cachedObj.ObjectSize = objectSize; //Добавляем объект в список истории использования cachedObj.HistoryIndex = _objectsHistory.Add(id); //Сохраняем объект в кэш _objects.Add(id, cachedObj); //Возвращаем успех return(true); }
public override void Touch(CachedObject item) { _evictionQueue.Touch(item); }
private static bool IsCacheExpired(CachedObject cachedObject) { var isCacheExpired = cachedObject.Expiration.TimeOfDay < DateTime.Now.TimeOfDay; return(isCacheExpired); }
/// <summary> /// Delete one item by primary key /// </summary> /// <param name="item"></param> public void Delete(T item) { var packed = CachedObject.Pack(item, _typeDescription); _client.Remove <T>(packed.PrimaryKey); }
public void SerializeCachedObjectUsingProtocolBuffers() { ClientSideTypeDescription.RegisterType(typeof(TradeLike)); Random randGen = new Random(); //to byte array for (int i = 0; i < 5000; i++) { TradeLike obj = new TradeLike(1, 1001, "aaa", new DateTime(2009, 10, 10), 1); CachedObject packed = CachedObject.Pack(obj).Metadata; byte[] data = SerializationHelper.ObjectToBytes(packed, SerializationMode.ProtocolBuffers, null); CachedObject reloaded = SerializationHelper.ObjectFromBytes <CachedObject>(data, SerializationMode.ProtocolBuffers, false); Assert.AreEqual(reloaded.IndexKeys[2], packed.IndexKeys[2]); Console.WriteLine(reloaded); } //to stream MemoryStream stream = new MemoryStream(); List <CachedObject> items = new List <CachedObject>(); for (int i = 0; i < 1000; i++) { TradeLike obj = new TradeLike(1, 1001, "aaa", new DateTime(2009, 10, 10), randGen.Next(1000)); CachedObject packed = CachedObject.Pack(obj).Metadata; items.Add(packed); } List <CachedObject> itemsReloaded = new List <CachedObject>(); Streamer.ToStreamGeneric(stream, items); stream.Seek(0, SeekOrigin.Begin); ManualResetEvent evt = new ManualResetEvent(false); Streamer.FromStream(stream, delegate(CachedObject item, int i, int totalItems) { itemsReloaded.Add(item); if (i == totalItems) { evt.Set(); } }, delegate { /* ignore exceptions */ }); evt.WaitOne(); for (int i = 0; i < 1000; i++) { Assert.AreEqual(itemsReloaded[i].IndexKeys[2], items[i].IndexKeys[2]); } }
public void Test_packing_performance() { var home = new Home { Address = "14 rue du chien qui fume", Bathrooms = 2, Rooms = 4, PriceInEuros = 200, CountryCode = "FR", Comments = { new Comment { Text = "Wonderful place", User = "******" }, new Comment { Text = "Very nice apartment" } } }; var desc = ClientSideTypeDescription.RegisterType <Home>(); const int objects = 10_000; { // warm up var unused = CachedObject.Pack(home, desc); var json = unused.AsJson(); var reloaded = CachedObject.Unpack <Home>(unused); var watch = new Stopwatch(); watch.Start(); for (int i = 0; i < objects; i++) { var packed = CachedObject.Pack(home, desc); reloaded = CachedObject.Unpack <Home>(unused); } watch.Stop(); Console.WriteLine($"Packing + unpacking {objects} objects took {watch.ElapsedMilliseconds} ms"); } { // warm up desc.UseCompression = true; var unused = CachedObject.Pack(home, desc); var reloaded = CachedObject.Unpack <Home>(unused); var watch = new Stopwatch(); watch.Start(); for (int i = 0; i < objects; i++) { var packed = CachedObject.Pack(home, desc); reloaded = CachedObject.Unpack <Home>(unused); } watch.Stop(); Console.WriteLine($"Packing + unpacking {objects} objects with compression took {watch.ElapsedMilliseconds} ms"); } }
public static async Task <bool> Execute() { if (_current == null) { if ((_current = Npcs.ClosestValid()) == null) { return(false); } } var pos = _current.Position; var name = pos.Name; GlobalLog.Info($"[TalkToQuestgivers] Now going to talk to {pos}"); if (!await pos.TryComeAtOnce()) { GlobalLog.Error($"[TalkToQuestgivers] Unexpected error. \"{name}\" position is unwalkable."); _current.Unwalkable = true; _current = null; return(true); } if (_current == null) { return(false); } var obj = _current.Object; if (obj == null) { GlobalLog.Error($"[TalkToQuestgivers] Unexpected error. \"{name}\" object is null."); _current.Ignored = true; _current = null; return(true); } if (!obj.IsTargetable) { GlobalLog.Error($"[TalkToQuestgivers] Unexpected error. \"{name}\" is untargetable."); _current.Ignored = true; _current = null; return(true); } if (!obj.HasNpcFloatingIcon) { GlobalLog.Debug($"[TalkToQuestgivers] \"{name}\" no longer has NpcFloatingIcon."); Npcs.Remove(_current); _current = null; return(true); } var attempts = ++_current.InteractionAttempts; if (attempts > 5) { GlobalLog.Error($"[TalkToQuestgivers] All attempts to interact with \"{name}\" have been spent. Now ignoring it."); _current.Ignored = true; _current = null; return(true); } if (!await obj.AsTownNpc().Talk()) { await Wait.SleepSafe(1000); return(true); } await Coroutines.CloseBlockingWindows(); await Wait.SleepSafe(200); if (!obj.Fresh().HasNpcFloatingIcon) { Npcs.Remove(_current); _current = null; } return(true); }
/// <summary> /// Ctor. call this in your Program Ctor. /// </summary> /// <param name="_ThisProgram">"<c>this</c>" to hand over a reference to the calling Program</param> /// <param name="_Jobs">a dict mapping from a job name to an <c>Job</c>. Mandatory, otherwise this entire class is useless</param> /// <param name="_Commands">a dict mapping from a string to an Command. Not mandatory</param> /// <param name="_EchoState">whether the enviromnment should echo its state each run.</param> /// <param name="_DisplayState">whether the environment should display its state onscreen.</param> public RuntimeEnvironment( MyGridProgram _ThisProgram, Dictionary <string, Job> _Jobs, Dictionary <string, Command> _Commands = null, bool _EchoState = false, bool _DisplayState = false ) { ThisProgram = _ThisProgram; CurrentTickrate = RateNeededForInterval(interval); EchoState = _EchoState; DisplayState = _DisplayState; if (DisplayState) { PBscreen = ThisProgram.Me.GetSurface(0); PBscreen.ContentType = ContentType.TEXT_AND_IMAGE; PBscreen.WriteText("", false); PBscreen.Font = "Monospace"; PBkeyboard = ThisProgram.Me.GetSurface(1); PBkeyboard.ContentType = ContentType.TEXT_AND_IMAGE; PBkeyboard.WriteText("", false); PBkeyboard.Font = "Monospace"; PBkeyboard.FontSize = 4.5f; } Output(things: "Creating RuntimeEnvironment..."); Jobs = _Jobs; Output(EndLine: false, things: " registering jobs..."); if (!Jobs.Any()) { Output(EndLine: true, things: " ERROR"); throw new Exception("No Jobs provided!"); } else { Output(EndLine: true, things: ""); } foreach (var job in Jobs) { Output(EndLine: false, things: " " + job.Key); if (ForbiddenJobNames.Any(x => x == job.Key)) { Output(things: " ERROR"); Echo("forbidden job key \"", job.Key, "\" encountered."); throw new Exception("forbidden job key \"" + job.Key + "\" encountered."); } else { Output(things: " OK"); } AverageJobRuntimes.Add(job.Key, new JobRuntimeInfo()); job.Value.RequeueInterval = SanitizeInterval(job.Value.RequeueInterval); AllowFrequencyChange |= job.Value.AllowFrequencyChange; AllowToggle |= job.Value.AllowToggle; RunningJobs.Add(job.Key, null); } JobNames = Jobs.Keys.ToList(); if (_Commands == null) { Commands = new Dictionary <string, Command>(); } else { Commands = _Commands; } Output(EndLine: false, things: " registering commands..."); if (!Commands.Any()) { Output(EndLine: true, things: " None"); } else { Output(EndLine: true, things: ""); } foreach (var command in Commands.Keys) { Output(EndLine: false, things: " " + command); if (ForbiddenCommands.Any(x => x == command)) { Output(things: " ERROR"); Echo("forbidden command key \"", command, "\" encountered."); throw new Exception("forbidden command key \"" + command + "\" encountered."); } else { Output(things: " OK"); } } Commands.Add("run", new Command(CMD_run, 1, UpdateType.Trigger | UpdateType.Terminal)); Commands.Add("evaluate", new Command(CMD_evaluate, 1, UpdateType.Terminal)); if (AllowToggle) { Commands.Add("toggle", new Command(CMD_toggle, 0, UpdateType.Trigger | UpdateType.Terminal)); } if (AllowFrequencyChange) { Commands.Add("frequency", new Command(CMD_freq, 1)); } Commands.Add("reset", new Command(CMD_reset, 0, UpdateType.Trigger | UpdateType.Terminal)); foreach (var command in Commands.Values) { KnownCommandUpdateTypes |= command.UpdateType; } Output(EndLine: false, things: " building caches..."); SystemInfoList = new CachedObject <List <string> >(BuildSystemInfoList); JobInfoList = new CachedObject <List <string> >(BuildJobInfoList); StatsStrings[0] = new CachedObject <string>(() => BuildStatsString(0)); StatsStrings[1] = new CachedObject <string>(() => BuildStatsString(1)); StatsStrings[2] = new CachedObject <string>(() => BuildStatsString(2)); StatsStrings[3] = new CachedObject <string>(() => BuildStatsString(3)); StatsStrings[-1] = new CachedObject <string>(() => BuildStatsString(-1)); StatsStrings[-2] = new CachedObject <string>(() => BuildStatsString(-2)); StatsStrings[-3] = new CachedObject <string>(() => BuildStatsString(-3)); Output(EndLine: true, things: "Done!"); if (DisplayState) { var textsize = ThisProgram.Me.GetSurface(0).MeasureStringInPixels(new StringBuilder(StatsString(-2)), "Monospace", 1f); var screensize = ThisProgram.Me.GetSurface(0).SurfaceSize; var xscale = screensize[0] / textsize[0]; var yscale = screensize[1] / textsize[1]; ThisProgram.Me.GetSurface(0).FontSize = Math.Min(xscale, yscale); ThisProgram.Me.GetSurface(0).Font = "Monospace"; } Output(things: "Done Creating RuntimeEnvironment"); }
public async Task <bool> Run() { if (!World.CurrentArea.IsCombatArea) { return(false); } var cache = CombatAreaCache.Current; if (Settings.ChestOpenRange != 0) { if (_chest != null) { await ProcessChest(); return(true); } var closestChest = cache.Chests.ClosestValid(); if (closestChest != null && ShouldOpen(closestChest, Settings.ChestOpenRange, Settings.Chests)) { _chest = closestChest; return(true); } } if (Settings.ShrineOpenRange != 0) { if (_shrine != null) { await ProcessShrine(); return(true); } var closestShrine = cache.Shrines.ClosestValid(); if (closestShrine != null && ShouldOpen(closestShrine, Settings.ShrineOpenRange, Settings.Shrines)) { _shrine = closestShrine; return(true); } } if (Settings.StrongboxOpenRange != 0) { if (_strongbox != null) { await ProcessStrongbox(); return(true); } var closestStrongbox = cache.Strongboxes.ClosestValid(); if (closestStrongbox != null && closestStrongbox.Rarity <= Settings.MaxStrongboxRarity && ShouldOpen(closestStrongbox, Settings.StrongboxOpenRange, Settings.Strongboxes)) { _strongbox = closestStrongbox; return(true); } } if (_specialChest != null) { await ProcessSpecialChest(); return(true); } var closestSpecialChest = cache.SpecialChests.ClosestValid(); if (closestSpecialChest != null) { _specialChest = closestSpecialChest; return(true); } return(false); }
public override void AddItem(CachedObject item) { _evictionQueue.Enqueue(new Tuple <DateTimeOffset, CachedObject>(DateTimeOffset.Now, item)); }
public async void TestEmptyCacheExceptionSyncronization() { var implementation = new Mock <ITestInterface>(MockBehavior.Strict); var policyProvider = new Mock <ICachePolicyProvider <ITestInterface> >(MockBehavior.Strict); var cache = new Mock <ICacheProvider <ITestInterface> >(MockBehavior.Strict); var lookupHandler = new AsyncLookupHandler <ITestInterface>(implementation.Object, policyProvider.Object, cache.Object); var invocation = ProxiedMethodInvocationGenerator <ITestInterface> .FromExpression(a => a.AddNumbersAsync(1, 2)); var method = invocation.Method; var parameters = invocation.Parameters; var thrownException = new TestException(); implementation.Setup(a => a.AddNumbersAsync(1, 2)).ThrowsAsync(thrownException); var cachePolicy = new CachePolicy() { CacheDuration = 20 }; policyProvider.Setup(a => a.GetPolicy(method, parameters)).Returns(cachePolicy); var cachedObject = new CachedObject <int>(CachedObjectState.None, null); cache.Setup(a => a.GetAsync(invocation, cachePolicy)).ReturnsAsync(cachedObject); var cacheStoreTask = new Task(() => { Thread.Sleep(50); }); cache.Setup(a => a.StoreExceptionAsync(invocation, cachePolicy, thrownException)).Returns(() => { cacheStoreTask.Start(); return(cacheStoreTask); }); const int taskCount = 10; var tasks = new Task <int> [taskCount]; for (var i = 0; i < taskCount; i++) { tasks[i] = lookupHandler.LookupAsync(invocation); } foreach (var t in tasks) { try { await t; Assert.Fail("Task didn't throw exception"); } catch (TestException e) { } } implementation.Verify(a => a.AddNumbersAsync(1, 2), Times.Once); Assert.IsTrue(cacheStoreTask.Wait(5000), "Store action on cache did not appear to have been called"); cache.Verify(a => a.StoreExceptionAsync(invocation, cachePolicy, thrownException), Times.Once); }
private static async Task ProcessShrine() { //check if current shrine was blacklisted by Combat Routine if (Blacklist.Contains(_shrine.Id)) { GlobalLog.Error("[OpenChestTask] Current shrine was blacklisted from outside."); _shrine.Ignored = true; _shrine = null; return; } var pos = _shrine.Position; if (Settings.ShrineOpenRange != -1) { if (pos.Distance > Settings.ShrineOpenRange * AbandonDistanceMult) { GlobalLog.Debug("[OpenChestTask] Abandoning current shrine because its too far away."); TemporaryIgnore(_shrine.Id); _shrine = null; return; } } if (pos.IsFar) { if (!pos.TryCome()) { GlobalLog.Error($"[OpenChestTask] Fail to move to {pos}. Marking this shrine as unwalkable."); _shrine.Unwalkable = true; _shrine = null; } return; } var shrineObj = _shrine.Object as Shrine; if (shrineObj == null || shrineObj.IsDeactivated) { CombatAreaCache.Current.Shrines.Remove(_shrine); _shrine = null; return; } var attempts = ++_shrine.InteractionAttempts; if (attempts > MaxShrineAttempts) { GlobalLog.Error("[OpenChestTask] All attempts to take a shrine have been spent. Now ignoring it."); _shrine.Ignored = true; _shrine = null; return; } if (await PlayerAction.Interact(shrineObj)) { await Wait.LatencySleep(); if (await Wait.For(() => shrineObj.IsDeactivated, "shrine deactivation", 100, 400)) { CombatAreaCache.Current.Shrines.Remove(_shrine); _shrine = null; } return; } await Wait.SleepSafe(400); }
public override void Touch(CachedObject item) { // nothing to do for this policy }