public void Add_ItemIsAdded_ItemExistsInCache() { var lru = new LruCache<string, string>(1); lru.Add("k1", "v1"); Assert.IsTrue(lru.ContainsKey("k1")); }
public void LruCache_TryGetValue_OutReturnsTypeDefaultIfNotExists() { var cache = new LruCache <string, string>(100); cache.TryGetValue("does not exist", out var value); Assert.Equal(default(string), value); }
public UdpServer(ServerConfig serverConfig, ILogger logger = null) : base(logger) { this._config = Throw.IfNull(() => serverConfig); this._clientLockers = new LruCache <Locker <UdpClient2> >(); this._bufferPool = new FixedSizeBuffer.BufferPool(1500, 10 * Defaults.MaxNumClient, 5); }
public void AddDenyTest1() { var cache = new LruCache <string, TestClass, TestClass>( 100, value => value.Text); Assert.Throws <ArgumentNullException>(() => cache.Add(null)); }
public void Cache_Null() { var source = GetRandomStringKeys(1); var loader = new CacheLoader <string, string>(source); var cache = new LruCache <string, string>(source.Count, loader); Console.WriteLine(cache[null]); }
public void LruCache_Get() { LruCache<int, string> cache = new LruCache<int, string>(2); cache.Put(1, "One"); cache.Put(2, "Two"); Assert.IsNotNull(cache.Get(1)); Assert.IsNotNull(cache.Get(2)); }
AlbumArtCache() { int maxSize = Math.Min(MaxAlbumArtCacheSize, (int)(Math.Min(int.MaxValue, Java.Lang.Runtime.GetRuntime().MaxMemory() / 4))); cache = new LruCache(maxSize); cache.GetSizeOf = (key, value) => value[BigBitmapIndex].ByteCount + value[IconBitmapIndex].ByteCount; }
public PE GetBinary(string PePath) { Debug.WriteLine(String.Format("Attempt to load : {0:s}", PePath), "BinaryCache"); if (!NativeFile.Exists(PePath)) { Debug.WriteLine(String.Format("File not present on the filesystem : {0:s} ", PePath), "BinaryCache"); return(null); } string Fullpath = Path.GetFullPath(PePath); if (FilepathDatabase.ContainsKey(Fullpath)) { // TODO : update LRU cache PE sShadowBinary = FilepathDatabase[Fullpath]; sShadowBinary.Filepath = Fullpath; return(sShadowBinary); } string PeHash = GetBinaryHash(PePath); Debug.WriteLine(String.Format("File {0:s} hash : {1:s} ", PePath, PeHash), "BinaryCache"); // A sync lock is mandatory here in order not to load twice the // same binary from two differents workers lock (BinaryDatabaseLock) { bool hit = BinaryDatabase.ContainsKey(PeHash); // Cache "miss" if (!hit) { string DestFilePath = Path.Combine(BinaryCacheFolderPath, PeHash); if (!File.Exists(DestFilePath) && (DestFilePath != PePath)) { Debug.WriteLine(String.Format("FileCopy from {0:s} to {1:s}", PePath, DestFilePath), "BinaryCache"); NativeFile.Copy(PePath, DestFilePath); } PE NewShadowBinary = new PE(DestFilePath); NewShadowBinary.Load(); LruCache.Add(PeHash); BinaryDatabase.Add(PeHash, NewShadowBinary); FilepathDatabase.Add(Fullpath, NewShadowBinary); } } // Cache "Hit" UpdateLru(PeHash); PE ShadowBinary = BinaryDatabase[PeHash]; ShadowBinary.Filepath = Path.GetFullPath(PePath); // convert any paths to an absolute one. Debug.WriteLine(String.Format("File {0:s} loaded from {1:s}", PePath, Path.Combine(BinaryCacheFolderPath, PeHash)), "BinaryCache"); return(ShadowBinary); }
/// <summary> /// Creates a new <seealso cref="RocksDBStore"/>. /// </summary> /// <param name="path">The path of the directory where the storage files will be saved. /// </param> /// <param name="blockCacheSize">The capacity of the block cache.</param> /// <param name="txCacheSize">The capacity of the transaction cache.</param> /// <param name="statesCacheSize">The capacity of the states cache.</param> /// <param name="maxTotalWalSize">The number to configure <c>max_total_wal_size</c> RocksDB /// option.</param> /// <param name="keepLogFileNum">The number to configure <c>keep_log_file_num</c> RocksDB /// option.</param> /// <param name="maxLogFileSize">The number to configure <c>max_log_file_size</c> /// RocksDB option.</param> public MonoRocksDBStore( string path, int blockCacheSize = 512, int txCacheSize = 1024, ulong?maxTotalWalSize = null, ulong?keepLogFileNum = null, ulong?maxLogFileSize = null ) { _logger = Log.ForContext <RocksDBStore>(); if (path is null) { throw new ArgumentNullException(nameof(path)); } path = Path.GetFullPath(path); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } _txCache = new LruCache <TxId, object>(capacity: txCacheSize); _blockCache = new LruCache <HashDigest <SHA256>, BlockDigest>(capacity: blockCacheSize); _path = path; _options = new DbOptions() .SetCreateIfMissing(); if (maxTotalWalSize is ulong maxTotalWalSizeValue) { _options = _options.SetMaxTotalWalSize(maxTotalWalSizeValue); } if (keepLogFileNum is ulong keepLogFileNumValue) { _options = _options.SetKeepLogFileNum(keepLogFileNumValue); } if (maxLogFileSize is ulong maxLogFileSizeValue) { _options = _options.SetMaxLogFileSize(maxLogFileSizeValue); } _blockDb = RocksDBUtils.OpenRocksDb(_options, RocksDbPath(BlockDbName)); _blockPerceptionDb = RocksDBUtils.OpenRocksDb(_options, RocksDbPath(BlockPerceptionDbName)); _txDb = RocksDBUtils.OpenRocksDb(_options, RocksDbPath(TxDbName)); _stagedTxDb = RocksDBUtils.OpenRocksDb(_options, RocksDbPath(StagedTxDbName)); // When opening a DB in a read-write mode, you need to specify all Column Families that // currently exist in a DB. https://github.com/facebook/rocksdb/wiki/Column-Families var chainDbColumnFamilies = GetColumnFamilies(_options, ChainDbName); _chainDb = RocksDBUtils.OpenRocksDb( _options, RocksDbPath(ChainDbName), chainDbColumnFamilies); }
public void Cache_Single() { var source = GetNumericKeys(1); var loader = new CacheLoader <int, string>(source); var cache = new LruCache <int, string>(source.Count, loader); Assert.IsTrue(cache[0] == source[0]); Assert.IsTrue(cache.Misses == 1); }
public Instance(Configuration config) : base(config) { Sleeper = new ProgressiveSleeper(config.ApiModeConfiguration.RequestTimeout); Logger.Info("Creating HttpClient, endpoint {0}...", config.ApiModeConfiguration.Endpoint); Comm = new HttpApiCommunicator(CreateHttpClient(config.ApiModeConfiguration.Endpoint, config.ApiModeConfiguration.Token)); Fetcher = new JobFetcher(Comm); PlagiarismChecker = new PlagiarismTester(config); Cache = new LruCache <string, Source>(10000); }
public ProtectedKeyStoreWallet(IKeyStore keyStore, IProtectedPrivateKeyFactory protectedPrivateKeyFactory, ITimestamper timestamper, ILogManager logManager) { _keyStore = keyStore ?? throw new ArgumentNullException(nameof(keyStore)); _protectedPrivateKeyFactory = protectedPrivateKeyFactory ?? throw new ArgumentNullException(nameof(protectedPrivateKeyFactory)); _timestamper = timestamper ?? Timestamper.Default; _logger = logManager.GetClassLogger() ?? throw new ArgumentNullException(nameof(logManager)); // maxCapacity - 100, is just an estimate here _unlockedAccounts = new LruCache <string, ProtectedPrivateKey>(100, nameof(ProtectedKeyStoreWallet)); }
private static LruCache<string, string> GetInitializedCache(int max, params string[] entries) { var cache = new LruCache<string, string>(max); foreach (var entry in entries) { cache.Add(entry, entry); } return cache; }
public void GetAndAdd_IfMaxCapacityReached_RemovesCache() { LruCache <int, int> cache = new LruCache <int, int>(2); cache.Add(5, 1); cache.Add(4, 1); cache.Add(3, 1); Assert.Throws <ArgumentNullException>(() => cache.Get(5)); }
public void RefreshNotExists() { var cache = new LruCache <string, TestData>(10); cache.AddOrUpdate("0", new TestData()); bool check = cache.Refresh("1"); Assert.IsFalse(check, "Item should not have refreshed in cache"); }
public void GetKeyValuePairRemover_CacheSizeOf1_CreateValueGetsCalledForEveryNewElement() { var cache = new LruCache <int, int>(1); AddKeyAndAssertCreateValueIsCalled(cache, 1, true); AddKeyAndAssertCreateValueIsCalled(cache, 2, true); AddKeyAndAssertCreateValueIsCalled(cache, 1, true); AddKeyAndAssertCreateValueIsCalled(cache, 1, false); }
internal Snapshot(LruCache <Keccak, Address> sigCache, UInt256 number, Keccak hash, SortedList <Address, UInt256> signers, Dictionary <Address, Tally> tally) { SigCache = sigCache; Number = number; Hash = hash; Signers = new SortedList <Address, UInt256>(signers, CliqueAddressComparer.Instance); Votes = new List <Vote>(); Tally = tally; }
public void LruCache_Clear() { LruCache<int, string> cache = new LruCache<int, string>(2); cache.Put(1, "One"); cache.Put(2, "Two"); Assert.AreEqual(2, cache.Count); cache.Clear(); Assert.AreEqual(0, cache.Count); }
public void LruCache_Contains() { LruCache<int, string> cache = new LruCache<int, string>(2); cache.Put(1, "One"); cache.Put(2, "Two"); Assert.IsTrue(cache.Contains(1)); Assert.IsTrue(cache.Contains(2)); Assert.IsFalse(cache.Contains(3)); }
public void AddDenyTest3() { var cache = new LruCache <string, BadTestClass, BadTestClass>( 100, value => value.Identifier); Assert.Throws <ArgumentNullException>(() => cache.Add(new BadTestClass())); }
public Cache(int retainLimit) { // how many items to retain strong references to // the underlying strong reference cache // the underlying weak reference cache this.retainLimit = retainLimit; strongReferenceCache = new LruCache <K, V>(this.retainLimit); weakReferenceCache = new WeakValueHashMap <K, V>(); }
public void EntryLruCache() { var cache = new LruCache <string, int>(2); var keys = cache.Keys.ToArray(); Assert.Empty(keys); cache.Add("foo", 1); keys = cache.Keys.ToArray(); Assert.Single(keys); Assert.Equal("foo", keys[0]); cache.Add("bar", 2); keys = cache.Keys.ToArray(); Assert.Equal(2, keys.Length); Assert.Equal("bar", keys[0]); Assert.Equal("foo", keys[1]); cache.Add("baz", 3); keys = cache.Keys.ToArray(); Assert.Equal(2, keys.Length); Assert.Equal("baz", keys[0]); Assert.Equal("bar", keys[1]); int val; bool success = cache.TryGetValue("bar", out val); Assert.True(success); Assert.Equal(2, val); keys = cache.Keys.ToArray(); Assert.Equal(2, keys.Length); Assert.Equal("bar", keys[0]); Assert.Equal("baz", keys[1]); success = cache.TryGetValue("bar", out val); Assert.True(success); Assert.Equal(2, val); keys = cache.Keys.ToArray(); Assert.Equal(2, keys.Length); Assert.Equal("bar", keys[0]); Assert.Equal("baz", keys[1]); success = cache.TryGetValue("baz", out val); Assert.True(success); Assert.Equal(3, val); keys = cache.Keys.ToArray(); Assert.Equal(2, keys.Length); Assert.Equal("baz", keys[0]); Assert.Equal("bar", keys[1]); success = cache.TryGetValue("foo", out val); Assert.False(success); keys = cache.Keys.ToArray(); Assert.Equal(2, keys.Length); Assert.Equal("baz", keys[0]); Assert.Equal("bar", keys[1]); }
/// <summary> /// Sets the information. /// </summary> /// <param name="prefix">The prefix.</param> /// <param name="path">The path.</param> /// <param name="kind">The kind of info to set.</param> /// <param name="info">The information to set.</param> public void SetInfo(string prefix, string path, CachedKind kind, IRingMasterClientCacheDataEntry info) { if (info == null) { this.Invalidate(prefix, path); return; } LruCache <string, DataEntry> cache = this.GetCacheForPrefix(prefix, true); DataEntry entry; if (!cache.TryGetValue(path, out entry)) { lock (cache) { if (!cache.TryGetValue(path, out entry)) { entry = new DataEntry(); cache.Add(path, entry); } } } lock (entry) { if ((kind & CachedKind.NodeAcls) != CachedKind.None) { entry.Acls = info.Acls; } if ((kind & CachedKind.NodeData) != CachedKind.None) { entry.Data = info.Data; } if ((kind & CachedKind.NodeChildren) != CachedKind.None) { entry.Children = info.Children; } if ((kind & CachedKind.NodeStats) != CachedKind.None) { entry.Stat = info.Stat; } if (entry.Stat == null && entry.Children == null && entry.Data == null && entry.Acls == null) { cache.Remove(path); } } if (this.debugCache) { System.Console.Write(string.Format("*** SETINFO {0}/{1}/{2} at {3}", prefix, path, kind, this.GetStack())); } }
public void LruCache_CallsValueGenerator_IfKeyDoesNotExist() { var cache = new LruCache <string, string>(100); var valueGeneratorMock = new Mock <IValueGenerator <string> >(); cache.Get("foo", valueGeneratorMock.Object.ValueGenerator); valueGeneratorMock.Verify(m => m.ValueGenerator(), Times.Once); }
public FilesController(IElevationDataStorage elevationDataStorage, IHttpGatewayFactory httpGatewayFactory, IDataContainerConverterService dataContainerConverterService, LruCache<string, TokenAndSecret> cache) { _elevationDataStorage = elevationDataStorage; _httpGatewayFactory = httpGatewayFactory; _dataContainerConverterService = dataContainerConverterService; _cache = cache; }
// Constructors public PrefetchManager(Session session) { ArgumentValidator.EnsureArgumentNotNull(session, "session"); this.session = session; fetcher = new Fetcher(this); columnsCache = new LruCache <RootContainerCacheKey, RootContainerCacheEntry>( ColumnIndexesCacheSize, cacheEntry => cacheEntry.Key); }
public ActorCollection(int capacity, ILogger <ActorCollection> logger) { capacity.VerifyAssert(x => x > 0, "Capacity must be greater then 0"); logger.VerifyNotNull(nameof(logger)); _actorRemove = new ActionBlock <ActorInstance>(async x => await x.Instance.Deactivate()); _logger = logger; _actorCache = new LruCache <ActorTypeKey, ActorInstance>(capacity); _actorCache.CacheItemRemoved += x => _actorRemove.Post(x.Value); }
/// <summary> /// Controller's constructor /// </summary> /// <param name="elevationDataStorage"></param> /// <param name="remoteFileFetcherGateway"></param> /// <param name="dataContainerConverterService"></param> /// <param name="cache"></param> public FilesController(IElevationDataStorage elevationDataStorage, IRemoteFileFetcherGateway remoteFileFetcherGateway, IDataContainerConverterService dataContainerConverterService, LruCache <string, TokenAndSecret> cache) { _elevationDataStorage = elevationDataStorage; _remoteFileFetcherGateway = remoteFileFetcherGateway; _dataContainerConverterService = dataContainerConverterService; _cache = cache; }
public void RemoveDenyTest3() { var cache = new LruCache <string, BadTestClass, BadTestClass>( 100, value => value.Identifier); BadTestClass test1 = new BadTestClass(); Assert.Throws <ArgumentNullException>(() => cache.Remove(test1)); }
public void ConstructorDenyTest() { Assert.Throws <ArgumentOutOfRangeException>(() => { var cache = new LruCache <string, TestClass, TestClass>( -1, value => value.Text ); }); }
public void LruCache_TryGetValue_ReturnsTrueIfValueExists() { const string key = "foo"; const string expectedValue = "bar"; var cache = new LruCache <string, string>(100); cache.Add(key, expectedValue); Assert.True(cache.TryGetValue(key, out var value)); }
public void TestInitialize() { _clientsFactory = Substitute.For <IClientsFactory>(); var options = new ConfigurationData(); var optionsProvider = Substitute.For <IOptions <ConfigurationData> >(); optionsProvider.Value.Returns(options); _cache = new LruCache <string, TokenAndSecret>(optionsProvider, Substitute.For <ILogger>()); _controller = new OsmTracesController(_clientsFactory, Substitute.For <IElevationDataStorage>(), Substitute.For <IDataContainerConverterService>(), optionsProvider, Substitute.For <IImageCreationService>(), _cache); }
public void LruCache_AddingValue_OverCapacity_RemovesOldest() { var cache = new LruCache <string, string>(1); cache.Add("foo", "bar"); cache.Add("bar", "foo"); Assert.False(cache.TryGetValue("foo", out var foo)); Assert.True(cache.TryGetValue("bar", out var bar)); }
private QueueSystem() { _howToDownload = new Dictionary<string, Func<Tile, Task<Tile>>>(); _tilesBuffer = new QueueBuffer(); _memoryCache = new LruCache<string, byte[]>(1000); _storageCache = new Dictionary<string, IOfflineStorage>(); _executantPool = new ExecutantPool(6); RetryMaxCount = 5; _executantPool.DownloadImageCompleted += _executantPool_DownloadImageCompleted; }
private LruCache <string, string> GetCache(int maxItems, int count) { var cache = new LruCache <string, string>(maxItems); for (int i = 0; i < count; i++) { cache.AddOrUpdate("key" + i, "value" + i); } return(cache); }
public void LruCache_Supports_GettingValue() { const string key = "foo"; const string expectedValue = "bar"; var cache = new LruCache <string, string>(100); cache.Add(key, expectedValue); Assert.Equal(expectedValue, cache.Get(key, () => null)); }
/// <summary> /// Controller's constructor /// </summary> /// <param name="elevationDataStorage"></param> /// <param name="httpGatewayFactory"></param> /// <param name="dataContainerConverterService"></param> /// <param name="cache"></param> public FilesController(IElevationDataStorage elevationDataStorage, IHttpGatewayFactory httpGatewayFactory, IDataContainerConverterService dataContainerConverterService, LruCache <string, TokenAndSecret> cache) { _elevationDataStorage = elevationDataStorage; _httpGatewayFactory = httpGatewayFactory; _dataContainerConverterService = dataContainerConverterService; _cache = cache; }
private void Dispose(bool disposing) { if (disposing) { if (!_disposed) { _cache = null; _disposed = true; } } }
public void Add_MaxSizeIsReached_OldestItemDiscarded() { var lru = new LruCache<string, string>(5); lru.Add("k1", "v1"); lru.Add("k2", "v2"); lru.Add("k3", "v3"); lru.Add("k4", "v4"); lru.Add("k5", "v5"); lru.Add("k6", "v6"); Assert.IsFalse(lru.ContainsKey("k1")); }
public void Complex() { var sut = new LruCache(3); var pages = new[] { 0, 1, 2, 3, 2, 3, 0, 4, 5, 2, 3, 1, 4, 3, 2, 6, 3, 2, 1, 2 }; foreach (var page in pages) sut.AddPage(page); var result = sut.Pages; CollectionAssert.AreEqual(new[] { 2, 1, 3 }, result); Assert.AreEqual(14, sut.PageFaults); }
public void TestSizeIsLimited() { var cache = new LruCache<string, int>(10); for (int i = 0; i < 20; i++) { cache[i.ToString()] = i; } Assert.AreEqual(10, cache.Count); for (int i = 10; i < 20; i++) { Assert.IsTrue(cache.ContainsKey(i.ToString())); } }
public void TestLookup() { var cache = new LruCache<int, string>(10); cache.InsertOrUpdate(1, "test"); cache.InsertOrUpdate(2, "test"); string retrieved; Assert.IsTrue(cache.TryLookup(1, out retrieved)); Assert.AreEqual("test", retrieved); Assert.IsTrue(cache.TryLookup(2, out retrieved)); Assert.AreEqual("test", retrieved); Assert.IsFalse(cache.TryLookup(3, out retrieved)); }
public OsmController(IHttpGatewayFactory httpGatewayFactory, IDataContainerConverterService dataContainerConverterService, ICoordinatesConverter coordinatesConverter, IGpxSplitterService gpxSplitterService, IElasticSearchGateway elasticSearchGateway, LruCache<string, TokenAndSecret> cache) { _httpGatewayFactory = httpGatewayFactory; _dataContainerConverterService = dataContainerConverterService; _coordinatesConverter = coordinatesConverter; _gpxSplitterService = gpxSplitterService; _cache = cache; _elasticSearchGateway = elasticSearchGateway; }
public void Add_DuplicateKeyAdded_ThrowsArgumentException() { try { var lru = new LruCache<string, string>(5); lru.Add("k1", "v1"); lru.Add("k1", "v1"); Assert.Fail("Should not have reached this point"); } catch (ArgumentException) { Assert.Pass(); } }
public void Add_ItemIsAdded_CurrentSizeIncreasesByItemSize() { var lru = new LruCache<string, string>(1024, 2); Assert.AreEqual(0, lru.CurrentSize); lru.Add("k1", "v1"); Assert.AreEqual(2, lru.CurrentSize); lru.Add("k2", "v2"); Assert.AreEqual(4, lru.CurrentSize); lru.Add("k3", "v3"); Assert.AreEqual(6, lru.CurrentSize); lru.Add("k4", "v4"); Assert.AreEqual(8, lru.CurrentSize); lru.Add("k5", "v5"); }
public void CacheAccessShouldBumpItToTheFront() { var cache = new LruCache<int, int>(3); cache[1] = 1; cache[2] = 2; cache[3] = 3; var a = cache[1]; cache[4] = 4; Assert.AreEqual(cache[1], 1); Assert.AreEqual(cache[3], 3); Assert.AreEqual(cache[4], 4); Assert.IsFalse(cache.ContainsKey(2)); }
public void TestLruCacheTryGetValue() { RegionEndpoint regionEndpoint; var lru = new LruCache<string, RegionEndpoint>(5); lru.AddOrUpdate("my-bucket-us-east-1", RegionEndpoint.USEast1); lru.AddOrUpdate("my-bucket-us-west-2", RegionEndpoint.USWest2); lru.AddOrUpdate("my-bucket-ap-northeast-2", RegionEndpoint.APNortheast2); lru.AddOrUpdate("my-bucket-sa-east-1", RegionEndpoint.SAEast1); lru.TryGetValue("my-bucket-us-west-2", out regionEndpoint); Assert.AreEqual(RegionEndpoint.USWest2, regionEndpoint); lru.TryGetValue("my-bucket-ap-northeast-2", out regionEndpoint); Assert.AreEqual(RegionEndpoint.APNortheast2, regionEndpoint); }
public void Add_MaxSizeIsReached_DiscardedCountIncreases() { var lru = new LruCache<string, string>(5); lru.Add("k1", "v1"); lru.Add("k2", "v2"); lru.Add("k3", "v3"); lru.Add("k4", "v4"); lru.Add("k5", "v5"); Assert.AreEqual(0, lru.DiscardedCount); lru.Add("k6", "v6"); Assert.AreEqual(1, lru.DiscardedCount); lru.Add("k7", "v7"); Assert.AreEqual(2, lru.DiscardedCount); }
public void LruCache_LeastUsedObjectIsRemovedWhenCapacityIsReached() { LruCache<int, string> cache = new LruCache<int, string>(2); cache.Put(1, "One"); cache.Put(2, "Two"); Assert.IsNotNull(cache.Get(1)); Assert.IsNotNull(cache.Get(2)); Assert.AreEqual(2, cache.Count); cache.Put(3, "Three"); Assert.AreEqual(2, cache.Count); Assert.IsNull(cache.Get(1)); Assert.IsNotNull(cache.Get(2)); Assert.IsNotNull(cache.Get(3)); cache.Put(4, "Four"); Assert.AreEqual(2, cache.Count); Assert.IsNull(cache.Get(2)); Assert.IsNotNull(cache.Get(3)); Assert.IsNotNull(cache.Get(4)); }
public void GetとPutがスレッドセーフである() { LruCache lruCache = new LruCache(5, 1); Action action1 = () => { for (int i = 0; i < 1000 * 1000; i++) { lruCache.Put("a", "abc"); Assert.AreEqual("abc", lruCache.Get("a")); } }; Action action2 = () => { for (int i = 0; i < 1000 * 1000; i++) { lruCache.Put("a", "abc"); Assert.AreEqual("abc", lruCache.Get("a")); } }; action1.BeginInvoke(null, null); action2.BeginInvoke(null, null); }
/// <summary> /// Initializes a new instance of the <see cref="Couchbase.Lite.Database"/> class. /// </summary> /// <param name="path">Path.</param> /// <param name="manager">Manager.</param> internal Database(String path, Manager manager) { Debug.Assert(System.IO.Path.IsPathRooted(path)); //path must be absolute Path = path; Name = FileDirUtils.GetDatabaseNameFromPath(path); Manager = manager; DocumentCache = new LruCache<string, Document>(MaxDocCacheSize); UnsavedRevisionDocumentCache = new Dictionary<string, WeakReference>(); // FIXME: Not portable to WinRT/WP8. ActiveReplicators = new List<Replication>(); AllReplicators = new List<Replication> (); _changesToNotify = new AList<DocumentChange>(); StartTime = DateTime.UtcNow.ToMillisecondsSinceEpoch (); MaxRevTreeDepth = DefaultMaxRevs; }
private PageManager() { //precompute the hilbert curve, since it will be the same for every page var bitsPerAxis = (int)(Math.Ceiling(Math.Log (Page.PageSizeInBlocks, 2))); m_hilbertCurve = new int[BlockCount]; for(uint index = 0; index < BlockCount; ++index) { var arr = HilbertCurve.HilbertAxes(index, 3, bitsPerAxis); var blockIndex = Page.BlockIndexFromRelativePosition((int)arr [0], (int)arr [1], (int)arr [2]); m_hilbertCurve [index] = blockIndex; } m_pageCache = new LruCache<Page>(25); var executableDir = Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().GetName().CodeBase); Debug.Assert(!String.IsNullOrEmpty(executableDir)); m_dataDirectory = Path.Combine(executableDir, "Saves").Substring(6); m_buffers = new Stack<byte[]>(); m_buffers.Push(new byte[BufferSize]); m_pagesPendingWrite = new HashSet<string>(); m_jsonWriter = new JsonWriter(m_dataDirectory); }
public DrawView(Context context, int deviceCacheSize) : base(context) { this.deviceCacheSize = deviceCacheSize; // For touch events to work Focusable = true; Point dragObjectStartPoint = new Point(); // Can be changed to any other BG image SetBackgroundResource(Resource.Drawable.gameBackground); //The divisor to scale the pixels according to screen density // 0.5 = ldpi // 1 = mdpi // 2 = hdpi // 4 = xhdpi // TODO get device screen density and create // switch enum depending on acquired density const int densityFactor = 1; // Setup starting position of // object to drag. int startx = 50 / densityFactor; int startY = 20 / densityFactor; dragObjectStartPoint.X = startx; dragObjectStartPoint.Y = startY; // Create one object to drag, loop this to create more dragObjects[0] = new DragObject(context.Resources, Resource.Drawable.gameObject, dragObjectStartPoint); // Setup our cache bitmapCache = new LruCache(this.deviceCacheSize); // Get bitmap and save to our cache Bitmap gameTargetBitmap = BitmapFactory.DecodeResource(this.Resources, Resource.Drawable.gameTarget, null); AddBitmapToMemoryCache("target", gameTargetBitmap); }
public void TestEvictionOnInsert() { var cache = new LruCache<int, string>(10, 9, 7); string retrieved; for (int i = 0; i < 8; i++) { cache.InsertOrUpdate(i, "test:"+i); } for (int i = 0; i < 8; i++) { Assert.IsTrue(cache.TryLookup(i, out retrieved), "Could not find entry for key {0}", i); } cache.InsertOrUpdate(8, "test8"); Assert.IsFalse(cache.TryLookup(0, out retrieved)); Assert.IsFalse(cache.TryLookup(1, out retrieved)); for (int i = 2; i < 9; i++) { Assert.IsTrue(cache.TryLookup(i, out retrieved), "Could not find entry for key {0} after evictions", i); } }
public void TestOverwriteOnInsert() { var cache = new LruCache<int, string>(10, 9, 7); string retrieved; for (int i = 0; i < 8; i++) { cache.InsertOrUpdate(i, "test" + i); } for (int i = 0; i < 8; i++) { Assert.IsNotNull(cache.TryLookup(i, out retrieved)); } cache.InsertOrUpdate(3, "updated"); // Insert should not have caused eviction of any items for (int i = 0; i < 8; i++) { Assert.IsNotNull(cache.TryLookup(i, out retrieved)); } // Item with key 3 should have been udpated Assert.IsTrue(cache.TryLookup(3, out retrieved)); Assert.IsNotNull(retrieved); Assert.AreEqual("updated", retrieved); }
/// <summary>Constructor</summary> internal Database(String path, Manager manager) { #if PORTABLE Debug.Assert((path.StartsWith("/", StringComparison.CurrentCultureIgnoreCase))); #else Debug.Assert((path.StartsWith("/", StringComparison.InvariantCultureIgnoreCase))); #endif //path must be absolute Path = path; Name = FileDirUtils.GetDatabaseNameFromPath(path); Manager = manager; DocumentCache = new LruCache<string, Document>(MaxDocCacheSize); // TODO: Make Synchronized ICollection ActiveReplicators = new HashSet<Replication>(); AllReplicators = new HashSet<Replication>(); ChangesToNotify = new AList<DocumentChange>(); StartTime = DateTime.UtcNow.ToMillisecondsSinceEpoch (); MaxRevTreeDepth = DefaultMaxRevs; }
internal Database(string path, string name, Manager manager) { Debug.Assert(System.IO.Path.IsPathRooted(path)); //path must be absolute Path = path; Name = name ?? FileDirUtils.GetDatabaseNameFromPath(path); Manager = manager; DocumentCache = new LruCache<string, Document>(MAX_DOC_CACHE_SIZE); UnsavedRevisionDocumentCache = new Dictionary<string, WeakReference>(); // FIXME: Not portable to WinRT/WP8. ActiveReplicators = new List<Replication>(); AllReplicators = new List<Replication> (); _changesToNotify = new List<DocumentChange>(); Scheduler = new TaskFactory(new SingleTaskThreadpoolScheduler()); StartTime = DateTime.UtcNow.ToMillisecondsSinceEpoch (); }
private Document GetDocument(string docId, bool mustExist) { if (StringEx.IsNullOrWhiteSpace (docId)) { return null; } var unsavedDoc = UnsavedRevisionDocumentCache.Get(docId); var doc = unsavedDoc != null ? (Document)unsavedDoc.Target : DocumentCache.Get(docId); if (doc != null) { if (mustExist && doc.CurrentRevision == null) { return null; } return doc; } doc = new Document(this, docId); if (mustExist && doc.CurrentRevision == null) { return null; } if (DocumentCache == null) { DocumentCache = new LruCache<string, Document>(MAX_DOC_CACHE_SIZE); } DocumentCache[docId] = doc; UnsavedRevisionDocumentCache[docId] = new WeakReference(doc); return doc; }
internal bool Close() { var success = true; if (_isOpen) { Log.D("Closing database at {0}", Path); if (_views != null) { foreach (var view in _views) { view.Value.Close(); } } if (ActiveReplicators != null) { var activeReplicatorCopy = new Replication[ActiveReplicators.Count]; ActiveReplicators.CopyTo(activeReplicatorCopy, 0); foreach (var repl in activeReplicatorCopy) { repl.DatabaseClosing(); } ActiveReplicators = null; } try { Storage.Close(); } catch(Exception) { success = false; } Storage = null; _isOpen = false; UnsavedRevisionDocumentCache.Clear(); DocumentCache = new LruCache<string, Document>(DocumentCache.MaxSize); } Manager.ForgetDatabase(this); return success; }
private LruCache<string, string> GetCache(int maxItems, int count) { var cache = new LruCache<string, string>(maxItems); for (int i = 0; i < count; i++) { cache.AddOrUpdate("key" + i, "value" + i); } return cache; }