public async Task discard_after_fetch() { var driver = new MemoryStorageDriver(); var stream = new EventStream <IStreamEvent>(driver); await stream.WriteAsync(Enumerable.Range(0, 20) .Select(i => (IStreamEvent) new IntegerEvent(i)).ToArray()); stream = new EventStream <IStreamEvent>(driver); while (await stream.FetchAsync()) { } await stream.DiscardUpTo(11); Assert.Equal((uint)10, (uint)stream.Sequence); var next = 10; IStreamEvent e; while ((e = stream.TryGetNext()) != null) { Assert.Equal(next, ((IntegerEvent)e).Integer); ++next; } Assert.Equal(next, 20); }
private async Task DiscardAndAssert(uint requestedSeq) { var stream = new EventStream <LargeEvt>(storeWithLargeEvents); var returnedSequence = await stream.DiscardUpTo(requestedSeq); var expectedSequence = ExpectedSequenceAfterDiscard(lastEvent, requestedSeq); Assert.Equal(expectedSequence, stream.Sequence); Assert.Equal(returnedSequence, stream.Sequence); if (requestedSeq == 0) { requestedSeq = 1; } var nextEvt = await stream.TryGetNextAsync(); if (requestedSeq <= lastEvent) { Assert.NotNull(nextEvt); Assert.Equal(requestedSeq, (uint)nextEvt.IntSeq); } else { Assert.Null(nextEvt); } }
public async Task discard() { var driver = new MemoryStorageDriver(); var stream = new EventStream <IStreamEvent>(driver); await stream.WriteAsync(Enumerable.Range(0, 20) .Select(i => (IStreamEvent) new IntegerEvent(i)).ToArray()); stream = new EventStream <IStreamEvent>(driver); await stream.DiscardUpTo(11); Func <bool> shouldContinue; var next = 10; do { var task = stream.BackgroundFetchAsync(); IStreamEvent e; while ((e = stream.TryGetNext()) != null) { Assert.Equal(next, ((IntegerEvent)e).Integer); ++next; } shouldContinue = await task; } while (shouldContinue()); Assert.Equal(next, 20); }
/// <summary> /// Reads up events up to the last one available. Pre-loads the projection from its cache, /// if available, to reduce the necessary work. /// </summary> public async Task InitializeAsync(CancellationToken cancel = default(CancellationToken)) { var sw = Stopwatch.StartNew(); try { // Load project and discard events before that. _log?.Info($"{sw.Elapsed:mm':'ss'.'ff} [ES init] loading projections."); await _projection.TryLoadAsync(cancel).ConfigureAwait(false); var catchUp = _projection.Sequence + 1; if (_onEachCommitted != null) { foreach (var e in _onEachCommitted) { if (e.Start < catchUp) { catchUp = e.Start; } } } _log?.Info($"{sw.Elapsed:mm':'ss'.'ff} [ES init] advancing stream to seq {catchUp}."); await Stream.DiscardUpTo(catchUp, cancel).ConfigureAwait(false); if (Stream.Sequence < catchUp) { _log?.Warning( $"{sw.Elapsed:mm':'ss'.'ff} [ES init] invalid seq {catchUp} > {Stream.Sequence}, resetting everything."); // Cache is apparently beyond the available sequence. Could happen in // development environments with non-persistent events but persistent // caches. Treat cache as invalid and start from the beginning. Stream.Reset(); _projection.Reset(); } } catch (Exception e) { _log?.Warning( $"{sw.Elapsed:mm':'ss'.'ff} [ES init] error while reading cache.", e); // Something went wrong when reading the cache. Stop. Stream.Reset(); _projection.Reset(); } // Start reading everything _log?.Info($"{sw.Elapsed:mm':'ss'.'ff} [ES init] catching up with stream."); await CatchUpAsync(cancel).ConfigureAwait(false); _log?.Info($"{sw.Elapsed:mm':'ss'.'ff} [ES init] DONE !"); }
public async Task discard_above_end() { var driver = new MemoryStorageDriver(); var stream = new EventStream <IStreamEvent>(driver); await stream.WriteAsync(Enumerable.Range(0, 20) .Select(i => (IStreamEvent) new IntegerEvent(i)).ToArray()); stream = new EventStream <IStreamEvent>(driver); while (await stream.FetchAsync()) { } await stream.DiscardUpTo(30); Assert.Equal((uint)20, (uint)stream.Sequence); }
/// <summary> Fetches a stream's events and places it in <see cref="Fetched"/>. </summary> private static void CmdFetch(string[] args) { if (args.Length < 2) { Console.WriteLine("Usage: fetch <name> <stream> <limit>?"); Console.WriteLine("Downloads events from remote stream, stores in-memory."); Console.WriteLine(" limit: if provided, only fetch events after this seq (included)."); return; } var name = args[0]; if (Fetched.ContainsKey(name)) { Console.WriteLine("Fetched stream `{0}` already exists.", name); Console.WriteLine("Use `drop {0}` to drop it.", name); return; } var limit = 0u; if (args.Length == 3) { if (!uint.TryParse(args[2], out limit)) { Console.WriteLine("Could not parse limit: {0}", args[2]); return; } } var sw = Stopwatch.StartNew(); var connection = new StorageConfiguration(Parse(args[1])) { ReadOnly = true }; connection.Trace = true; var driver = connection.Connect(); using (Release(driver)) { var stream = new EventStream <JObject>(driver); var list = new List <EventData>(); var start = 0L; Status("Connecting..."); Task.Run((Func <Task>)(async() => { Status("Current size:"); var maxPos = await driver.GetPositionAsync(); Console.WriteLine("Current size: {0:F2} MB", maxPos / (1024.0 * 1024.0)); Status("Current seq:"); var maxSeq = await driver.GetLastKeyAsync(); Console.WriteLine("Current seq: {0}", maxSeq); var asStatsDriver = driver as StatsDriverWrapper; var asReadOnlyDriver = (asStatsDriver?.Inner ?? driver) as ReadOnlyDriverWrapper; var asAzure = (asReadOnlyDriver?.Wrapped ?? asStatsDriver?.Inner ?? driver) as AzureStorageDriver; if (asAzure != null) { for (var i = 0; i < asAzure.Blobs.Count; ++i) { Console.WriteLine("Blob {0}: {1:F2} MB from seq {2}", asAzure.Blobs[i].Name, asAzure.Blobs[i].Properties.Length / (1024.0 * 1024.0), i < asAzure.FirstKey.Count ? asAzure.FirstKey[i] : maxSeq); } } if (limit > 0) { Status($"Moving to seq {limit} .."); await stream.DiscardUpTo(limit); start = stream.Position; } Func <bool> more; do { var fetch = stream.BackgroundFetchAsync(); JObject obj; while ((obj = stream.TryGetNext()) != null) { list.Add(new EventData(stream.Sequence, obj)); } Status("Fetching: {0}/{1} ({2:F2}/{3:F2} MB)", stream.Sequence, maxSeq, (stream.Position - start) / (1024.0 * 1024.0), (maxPos - start) / (1024.0 * 1024.0)); more = await fetch; } while (more()); })).Wait(); Console.WriteLine("Fetched `{0}` ({1} events, {2:F2} MB) in {3:F2}s.", name, list.Count, (stream.Position - start) / (1024.0 * 1024.0), sw.ElapsedMilliseconds / 1000.0); Fetched.Add(name, list); Peek(list); if (Current == null) { Current = list; } } }
/// <summary> Copies from source to destination, returns last position in source. </summary> private static long BackupCopy(string srcname, string dstname, uint maxseq) { var srcDriver = new StorageConfiguration(Parse(srcname)) { ReadOnly = true }.Connect(); using (Release(srcDriver)) { var dstDriver = new StorageConfiguration(Parse(dstname)).Connect(); using (Release(dstDriver)) { var src = new EventStream <JObject>(srcDriver); var dst = new MigrationStream <JObject>(dstDriver); var sw = Stopwatch.StartNew(); var events = 0; var initial = 0L; Status("Connecting..."); Task.Run((Func <Task>)(async() => { Status("Current source size: ..."); var maxPos = await srcDriver.GetPositionAsync(); Console.WriteLine("Current source size: {0:F2} MB", maxPos / (1024.0 * 1024.0)); Status("Current source seq: ..."); var maxSeq = await srcDriver.GetLastKeyAsync(); Console.WriteLine("Current source seq: {0}", maxSeq); Status("Current destination seq: ..."); var bakSeq = await dstDriver.GetLastKeyAsync(); Console.WriteLine("Current destination seq: {0}", bakSeq); if (bakSeq >= maxSeq) { Console.WriteLine("Destination already up-to-date."); return; } var asAzure = ((ReadOnlyDriverWrapper)srcDriver).Wrapped as AzureStorageDriver; if (asAzure != null) { for (var i = 0; i < asAzure.Blobs.Count; ++i) { Console.WriteLine("Blob {0}: {1:F2} MB from seq {2}", asAzure.Blobs[i].Name, asAzure.Blobs[i].Properties.Length / (1024.0 * 1024.0), i < asAzure.FirstKey.Count ? asAzure.FirstKey[i] : maxSeq); } } if (bakSeq > 0) { Status("Skipping to seq {0}...", bakSeq); await src.DiscardUpTo(bakSeq + 1); Console.WriteLine("Skipping to seq {0} done !", bakSeq); } initial = src.Position; Func <bool> more; do { var fetch = src.BackgroundFetchAsync(); var list = new List <KeyValuePair <uint, JObject> >(); JObject obj; while ((obj = src.TryGetNext()) != null) { list.Add(new KeyValuePair <uint, JObject>(src.Sequence, obj)); } events += list.Count; while (list.Count > 0 && list[list.Count - 1].Key > maxseq) { list.RemoveAt(list.Count - 1); } await dst.WriteAsync(list); Status("{0}/{1} ({2:F2}/{3:F2} MB)", src.Sequence, maxSeq, src.Position / (1024.0 * 1024.0), maxPos / (1024.0 * 1024.0)); more = await fetch; } while (more()); })).Wait(); Console.WriteLine("{0} events ({1:F2} MB) in {2:F2}s.", events, (src.Position - initial) / (1024.0 * 1024.0), sw.ElapsedMilliseconds / 1000.0); return(src.Position); } } }