/// <summary> /// Reads up events up to the last one available. Pre-loads the projection from its cache, /// if available, to reduce the necessary work. /// </summary> public async Task InitializeAsync(CancellationToken cancel = default(CancellationToken)) { var sw = Stopwatch.StartNew(); try { // Load project and discard events before that. _log?.Info($"{sw.Elapsed:mm':'ss'.'ff} [ES init] loading projections."); await _projection.TryLoadAsync(cancel).ConfigureAwait(false); var catchUp = _projection.Sequence + 1; if (_onEachCommitted != null) { foreach (var e in _onEachCommitted) { if (e.Start < catchUp) { catchUp = e.Start; } } } _log?.Info($"{sw.Elapsed:mm':'ss'.'ff} [ES init] advancing stream to seq {catchUp}."); await Stream.DiscardUpTo(catchUp, cancel).ConfigureAwait(false); if (Stream.Sequence < catchUp) { _log?.Warning( $"{sw.Elapsed:mm':'ss'.'ff} [ES init] invalid seq {catchUp} > {Stream.Sequence}, resetting everything."); // Cache is apparently beyond the available sequence. Could happen in // development environments with non-persistent events but persistent // caches. Treat cache as invalid and start from the beginning. Stream.Reset(); _projection.Reset(); } } catch (Exception e) { _log?.Warning( $"{sw.Elapsed:mm':'ss'.'ff} [ES init] error while reading cache.", e); // Something went wrong when reading the cache. Stop. Stream.Reset(); _projection.Reset(); } // Start reading everything _log?.Info($"{sw.Elapsed:mm':'ss'.'ff} [ES init] catching up with stream."); await CatchUpAsync(cancel).ConfigureAwait(false); _log?.Info($"{sw.Elapsed:mm':'ss'.'ff} [ES init] DONE !"); }
/// <summary> /// Catch up with the stream (updating the state) until there are no new /// events available. /// </summary> public async Task CatchUpAsync(CancellationToken cancel = default) { Func <bool> finishFetch; // Local variable, to avoid reaching the limit when not doing the // initial catch-up. var eventsSinceLastCacheLoad = 0u; do { var fetchTask = Stream.BackgroundFetchAsync(cancel); // We have started fetching the next batch of events in // the background, so we might as well start processing // those we already have. This pattern is optimized for // when fetching events takes longer than processing them, // and remains safe (i.e. no runaway memory usage) when // the reverse is true. eventsSinceLastCacheLoad += CatchUpLocal(); // Maybe we have reached the event count limit before our // save/load cycle ? if (eventsSinceLastCacheLoad >= EventsBetweenCacheSaves) { eventsSinceLastCacheLoad = 0; var sw = Stopwatch.StartNew(); if (await _projection.TrySaveAsync(cancel)) { // Reset first, to release any used memory. _projection.Reset(); await _projection.TryLoadAsync(cancel); if (_projection.Sequence != Stream.Sequence) { throw new InvalidOperationException( "Projection Save/Load cycle failed to restore sequence."); } _log.Info($"[ES read] cache save/load cycle in {sw.Elapsed} at seq {_projection.Sequence}."); } } finishFetch = await fetchTask; } while (finishFetch()); // We reach this point if 1° all events cached in the stream have // been processed and 2° the fetch operation returned no new events NotifyRefresh(); }
public async Task multi_apply_separate() { var cache = new Mock <IProjectionCacheProvider>(); ReturnsExtensions.ReturnsAsync(cache.Setup(c => c.OpenReadAsync("string")), new MemoryStream(new byte[] { 0x02, 0x00, 0x00, 0x00, // Current position (beginning) 0x30, 0x30, 0x30, 0x30, // Event data "0000" 0x02, 0x00, 0x00, 0x00 // Current position (end) })); var str = MockString(); str.Setup(p => p.TryLoadAsync(It.IsAny <Stream>(), It.IsAny <CancellationToken>())) .Returns <Stream, CancellationToken>((s, c) => { var bytes = new byte[4]; s.Read(bytes, 0, 4); return(Task.FromResult(Encoding.UTF8.GetString(bytes))); }); var reified = new ReifiedProjectionGroup <int, State>(new IProjection <int>[] { MockInteger().Object, str.Object }, cache.Object); await reified.TryLoadAsync(CancellationToken.None); reified.Apply(1U, 10); reified.Apply(4U, 14); Assert.AreEqual((uint)4U, (uint)reified.Sequence); Assert.AreEqual((int)24, (int)reified.Current.I.Value); Assert.AreEqual("0000(14:4)", reified.Current.S); }
public async Task multi_apply_separate() { var cache = new Testing.InMemoryCache { { "string", new byte[] { 0x02, 0x00, 0x00, 0x00, // Current position (beginning) 0x30, 0x30, 0x30, 0x30, // Event data "0000" 0x02, 0x00, 0x00, 0x00 // Current position (end) } } }; var str = MockString(); str.Setup(p => p.TryLoadAsync(It.IsAny <Stream>(), It.IsAny <CancellationToken>())) .Returns <Stream, CancellationToken>((s, c) => { var bytes = new byte[4]; s.Read(bytes, 0, 4); return(Task.FromResult(Encoding.UTF8.GetString(bytes))); }); var reified = new ReifiedProjectionGroup <int, State>(new IProjection <int>[] { MockInteger().Object, str.Object }, cache); await reified.TryLoadAsync(CancellationToken.None); reified.Apply(1U, 10); reified.Apply(4U, 14); Assert.Equal(4U, reified.Sequence); Assert.Equal(24, reified.Current.I.Value); Assert.Equal("0000(14:4)", reified.Current.S); }