private void LoadDocumentsFromDisk(Etag etag, Etag untilEtag) { var jsonDocs = GetJsonDocsFromDisk(etag, untilEtag); using (prefetchingQueue.EnterWriteLock()) foreach (var jsonDocument in jsonDocs) { prefetchingQueue.Add(jsonDocument); } }
public void ConcurrentJsonDocumentSortedListShouldSortByEtag() { var list = new ConcurrentJsonDocumentSortedList(); var etag1 = EtagUtil.Increment(Etag.Empty, 1); var etag2 = EtagUtil.Increment(Etag.Empty, 2); var etag3 = EtagUtil.Increment(Etag.Empty, 3); var etag4 = EtagUtil.Increment(Etag.Empty, 4); var doc1 = new JsonDocument { Etag = etag1 }; var doc2 = new JsonDocument { Etag = etag2 }; var doc3 = new JsonDocument { Etag = etag3 }; var doc4 = new JsonDocument { Etag = etag4 }; using (list.EnterWriteLock()) { list.Add(doc4); list.Add(doc2); list.Add(doc1); list.Add(doc3); } JsonDocument result; Assert.True(list.TryDequeue(out result)); Assert.Equal(doc1.Etag, result.Etag); Assert.True(list.TryDequeue(out result)); Assert.Equal(doc2.Etag, result.Etag); Assert.True(list.TryDequeue(out result)); Assert.Equal(doc3.Etag, result.Etag); Assert.True(list.TryDequeue(out result)); Assert.Equal(doc4.Etag, result.Etag); }
private void LoadDocumentsFromDisk(Etag etag, Etag untilEtag) { var sp = Stopwatch.StartNew(); var jsonDocs = GetJsonDocsFromDisk(etag, untilEtag); if (log.IsDebugEnabled) { log.Debug("Loaded {0} documents ({3:#,#;;0} kb) from disk, starting from etag {1}, took {2}ms", jsonDocs.Count, etag, sp.ElapsedMilliseconds, jsonDocs.Sum(x => x.SerializedSizeOnDisk) / 1024); } // if we are forced to load from disk in a sync fashion, let us start the process // of making sure that we don't need to do this next time by starting an async load MaybeAddFutureBatch(jsonDocs); using (prefetchingQueue.EnterWriteLock()) { foreach (var jsonDocument in jsonDocs) { prefetchingQueue.Add(jsonDocument); } } }