public void SourceRef_must_send_messages_via_remoting() { _remoteActor.Tell("give"); var sourceRef = ExpectMsg <ISourceRef <string> >(); sourceRef.Source.RunWith(Sink.ActorRef <string>(_probe.Ref, "<COMPLETE>"), Materializer); _probe.ExpectMsg("hello"); _probe.ExpectMsg("world"); _probe.ExpectMsg("<COMPLETE>"); }
public void SourceRef_must_fail_when_remote_source_failed() { _remoteActor.Tell("give-fail"); var sourceRef = ExpectMsg <ISourceRef <string> >(); sourceRef.Source.RunWith(Sink.ActorRef <string>(_probe.Ref, "<COMPLETE>"), Materializer); var f = _probe.ExpectMsg <Status.Failure>(); f.Cause.Message.Should().Contain("Remote stream ("); f.Cause.Message.Should().Contain("Boom!"); }
public void ActorRefSink_should_cancel_a_stream_when_actor_terminates() { var fw = Sys.ActorOf(Props.Create(() => new Fw(TestActor)).WithDispatcher("akka.test.stream-dispatcher")); var publisher = this.SourceProbe <int>().To(Sink.ActorRef <int>(fw, onCompleteMessage: "done")) .Run(materializer) .SendNext(1) .SendNext(2); ExpectMsg(1); ExpectMsg(2); Sys.Stop(fw); publisher.ExpectCancellation(); }
protected override void PreStart() { var mat = Context.Materializer(); var self = Self; _tradeIdsQuery.PersistenceIds() .Where(x => x.EndsWith(EntityIdHelper .OrderBookSuffix)) // skip persistence ids belonging to price entities .Select(x => new Ping(EntityIdHelper.ExtractTickerFromPersistenceId(x))) .RunWith(Sink.ActorRef <Ping>(self, UnexpectedEndOfStream.Instance), mat); _heartbeatInterval = Context.System.Scheduler.ScheduleTellRepeatedlyCancelable(TimeSpan.FromSeconds(30), TimeSpan.FromSeconds(30), Self, Heartbeat.Instance, ActorRefs.NoSender); }
public async void SimpeStreamTest() { var materializer = ActorMaterializer.Create(Sys); var sourceUnderTest = Source.Tick(TimeSpan.FromSeconds(0), TimeSpan.FromMilliseconds(200), "Tick"); var probe = CreateTestProbe(); var cancellable = sourceUnderTest.To(Sink.ActorRef <string>(probe.Ref, "completed")).Run(materializer); probe.ExpectMsg("Tick"); probe.ExpectNoMsg(TimeSpan.FromMilliseconds(100)); probe.ExpectMsg("Tick", TimeSpan.FromMilliseconds(200)); cancellable.Cancel(); probe.ExpectMsg("completed"); }
private void GenerateReportAsync(OpenLogFileViewModel viewModel) { logFileRptGeneratorService.InitializeReport(); viewModel.OverallProgress = 0; viewModel.IsGeneratingReport = true; //var sourceQueue = Source.Queue<LogRptDto>(int.MaxValue, OverflowStrategy.Fail) // .SelectAsyncUnordered(int.MaxValue, l => logFileRptGeneratorService.GetCheckInforInUseOuts(l, viewModel.LogFiles)) // .To(Sink.ActorRef<Tuple<bool, LogRptDto>>(logFilesExcelProviderActor, logFileRptGeneratorService.GetReportRows())) // .Run(Context.Materializer()); var g = RunnableGraph.FromGraph(GraphDsl.Create(b => { var source = Source.From(viewModel.LogFiles); var sink = Sink.ActorRef <Tuple <bool, LogRptDto> >(logFilesExcelProviderActor, logFileRptGeneratorService.GetReportRows()); var parsing = Flow.Create <LogFile>() .Select(x => { viewModel.OverallProgress++; var parseTask = logFilesParsingService.ParseLogFileEventsAsync(x); parseTask.ContinueWith(t => denialsRptGeneratorService.Aggregate(t.Result)); return(parseTask.Result); }).WatchTermination((_, o) => { o.ContinueWith(t => getCheckInsActor.Tell(viewModel)); return(_); }); //.WatchTermination((_,u)=>u.PipeTo(getCheckInsActor)); //TODO: create new actor to run getCheckIns ===^ //Akka.Done //Akka.Actor.Status.Failure; var reportGen = Flow.Create <LogFile>() .SelectAsyncUnordered(int.MaxValue, logFileRptGeneratorService.GenerateReport) .Recover(exception => { throw exception; }) .SelectMany(x => x); //var getCheckIns = Flow.Create<LogRptDto>() // .SelectAsyncUnordered(int.MaxValue, x => getCheckInsActor.Tell(x)) //var getCheckIns = Flow.Create<LogRptDto>() // .SelectAsyncUnordered(int.MaxValue, l => logFileRptGeneratorService.GetCheckInforInUseOuts(l, viewModel.LogFiles)); b.From(source).Via(parsing).Via(reportGen).To(Sink.ForEach <LogRptDto>(l => getCheckInsActor.Tell(l)));//.Via(getCheckIns).To(sink); return(ClosedShape.Instance); })); g.Run(Context.Materializer()); }
/// <summary> /// Recovery has completed successfully. /// </summary> protected override void OnReplaySuccess() { var mat = Context.Materializer(); var self = Self; // transmit all tag events to myself _eventsByTag.EventsByTag(TickerSymbol, Offset.Sequence(QueryOffset)) .Where(x => x.Event is Match) // only care about Match events .RunWith(Sink.ActorRef <EventEnvelope>(self, UnexpectedEndOfStream.Instance), mat); _publishPricesTask = Context.System.Scheduler.ScheduleTellRepeatedlyCancelable(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(10), Self, PublishEvents.Instance, ActorRefs.NoSender); base.OnReplaySuccess(); }
public void PrimaryTwiceRateOfSecondary() { using (var mat = Sys.Materializer()) { var probe = CreateTestProbe(); var graph = RunnableGraph.FromGraph( GraphDsl.Create(builder => { var primary = Source.From( Enumerable.Range(0, 200) .Select(y => Mock.Of <ISyncData>(x => x.TimeStamp == 50 * y && x.SampleIndex == y ))); var secondary = Source.From( Enumerable.Range(0, 100) .Select(y => Mock.Of <ISyncData>(x => x.TimeStamp == 100 * y && x.SampleIndex == y ))); var merger = builder.Add(new MergeClosestN <ISyncData>(2)); var sink = Sink.ActorRef <IImmutableList <ISyncData> >(probe, "completed"); builder.From(primary).To(merger.In(0)); builder.From(secondary).To(merger.In(1)); builder.From(merger.Out).To(sink); return(ClosedShape.Instance); })); graph.Run(mat); var msgs = probe.ReceiveN(97, TimeSpan.FromSeconds(Debugger.IsAttached ? 300 : 3)); msgs.Should().AllBeAssignableTo(typeof(IImmutableList <ISyncData>)); var arrays = msgs.Cast <IImmutableList <ISyncData> >().ToList(); var timestamps = arrays.Select(x => x.Select(y => y.TimeStamp).ToArray()).ToList(); timestamps.Should().BeEquivalentTo(Enumerable.Range(0, 97).Select(x => new[] { x * 100, x * 100 })); var sampleIndices = arrays.Select(x => x.Select(y => y.SampleIndex).ToArray()).ToList(); sampleIndices.Should().BeEquivalentTo(Enumerable.Range(0, 97).Select(x => new[] { 2 * x, x })); } }
public void ExactSampleFiveStreamMerge() { using (var mat = Sys.Materializer()) { var probe = CreateTestProbe(); var graph = RunnableGraph.FromGraph( GraphDsl.Create(builder => { var sources = Enumerable.Range(0, 5).Select(x => Source.From( Enumerable.Range(0, 100) .Select(y => Mock.Of <ISyncData>(m => m.TimeStamp == 100 * y && m.SampleIndex == y )))).ToArray(); var merger = builder.Add(new MergeClosestN <ISyncData>(5)); var sink = Sink.ActorRef <IImmutableList <ISyncData> >(probe, "completed"); for (int i = 0; i < 5; i++) { builder.From(sources[i]).To(merger.In(i)); } builder.From(merger.Out).To(sink); return(ClosedShape.Instance); })); graph.Run(mat); var msgs = probe.ReceiveN(99, TimeSpan.FromSeconds(Debugger.IsAttached ? 300 : 3)); msgs.Should().AllBeAssignableTo(typeof(IImmutableList <ISyncData>)); var arrays = msgs.Cast <IImmutableList <ISyncData> >().ToList(); var timestamps = arrays.Select(x => x.Select(y => y.TimeStamp).ToArray()).ToList(); timestamps.Should().BeEquivalentTo( Enumerable.Range(0, 99).Select(x => Enumerable.Range(0, 5).Select(y => x * 100).ToArray())); var sampleIndices = arrays.Select(x => x.Select(y => y.SampleIndex).ToArray()).ToList(); sampleIndices.Should().BeEquivalentTo( Enumerable.Range(0, 99).Select(x => Enumerable.Range(0, 5).Select(y => x).ToArray())); } }
public void CombiningSinksWithSimplifiedApi() { var actor = CreateTestProbe(); var sendRemotely = Sink.ActorRef <int>(actor.Ref, "Done"); var localProcessing = Sink.Aggregate <int, int>(0, (acc, i) => acc + i) .MapMaterializedValue(_ => NotUsed.Instance); var sink = Sink.Combine(i => new Broadcast <int>(i), sendRemotely, localProcessing); WithMaterializer(m => { Source.From(new[] { 0, 1, 2 }).RunWith(sink, m); var received = actor.ReceiveN(3); Assert.That(received, Is.EquivalentTo(new[] { 0, 1, 2 })); }); }
public async void SourceStreamSinkTest() { var journal = Sys.ReadJournalFor <SqlReadJournal>(SqlReadJournal.Identifier); var materializer = ActorMaterializer.Create(Sys); var sourceUnderTest = journal.EventsByTag("Test"); var probe = CreateTestProbe(); sourceUnderTest.To(Sink.ActorRef <EventEnvelope>(probe.Ref, "completed")).Run(materializer); probe.ExpectMsg <EventEnvelope>(); // probe.ExpectMsg("Tick"); // probe.ExpectNoMsg(TimeSpan.FromMilliseconds(100)); // probe.ExpectMsg("Tick", TimeSpan.FromMilliseconds(200)); // probe.ExpectMsg("completed"); }
private ActorMaterializer ConfigureEventHandling(ActorSystem actorSystem, IActorRef primaryEventHandler, ActorSystemOptions options) { var actorMaterializer = actorSystem.Materializer(); var readJournal = PersistenceQuery.Get(actorSystem).ReadJournalFor <SqlReadJournal>("akka.persistence.query.journal.sql"); readJournal //TODO: [THIS IS MOST PROBABLY A BIG ISSUE] if we always query from sequence number 0, then all //event handlers will produce side effects again and again, each time domain events are replayed. //(e.g. this is obsiously a problem if your event handlers send emails) .EventsByPersistenceId(options.PersistenceId, fromSequenceNr: 0, toSequenceNr: long.MaxValue) .Collect(envelope => envelope.Event) .RunWith(Sink.ActorRef <object>(primaryEventHandler, new object() //TODO: you might want to provide these options //.RunWith(Sink.ActorRefWithAck<ItemAdded>(writer, //onInitMessage: CreateViewsActor.Init.Instance, //ackMessage: CreateViewsActor.Ack.Instance, //onCompleteMessage: CreateViewsActor.Done.Instance), materializer); ), actorMaterializer); return(actorMaterializer); }
private ISourceQueueWithComplete <ByteString> CreateAssemblerLogic(int samplesPerPacket, int sampleLength, IActorRef outputTarget) { var assemblerLogic = Flow.Create <ByteString>() .SelectMany(bs => { var sliceList = new List <ByteString>(); for (int i = 0; i < (samplesPerPacket * sampleLength); i += sampleLength) { var stream = new MemoryStream(); sliceList.Add(bs.Slice(i, sampleLength)); } return(sliceList); }) .Select(bytestring => (index: ++_sampleIndex, bytes: bytestring)) .SelectAsync(4, tup => Task.Run(() => AssembleSample(tup.bytes, tup.index))) .To(Sink.ActorRef <FpgaSample>(outputTarget, new FpgaPluginMessages.StreamComplete())); var source = Source.Queue <ByteString>(10000, OverflowStrategy.Backpressure); return(source.ToMaterialized(assemblerLogic, Keep.Left).Run(Context.System.Materializer())); }
static void Main(string[] args) { Log.Logger = LoggerFactory.Logger; var logSwitch = LoggerFactory.LoggingSwitch; var config = ConfigurationFactory.ParseString(File.ReadAllText("config.hocon")); logSwitch.MinimumLevel = LogEventLevel.Debug; using (var system = ActorSystem.Create("System", config)) using (var materialiser = system.Materializer()) { Log.Information("System Started"); // Create actors. var loggingActor = system.ActorOf <LoggingActor>("Logger"); // Create streams. var generator = Source.From <ISyncData>(GenerateData()); var decimator = new SpatialDecimator <ISyncData>(300, 1); var flow = Flow.Create <ISyncData>().Via(decimator); var sink = Sink.ActorRef <ISyncData>(loggingActor, PoisonPill.Instance); var queue = generator.Via(flow).RunWith(sink, materialiser); // Close system logic. Console.CancelKeyPress += (obj, a) => { system.Terminate().Wait(TimeSpan.FromSeconds(3)); }; system.WhenTerminated.Wait(); Log.Information("System Stopped"); Log.CloseAndFlush(); } }
public GetCheckInsActor(ILogFileRptGeneratorService logFileRptGeneratorService) { logFilesExcelProviderActor = Context.ActorOf(Context.DI().Props <LogFilesExcelProviderActor>(), ActorPaths.logFilesExcelProviderActor.Name); var sourceQueue = Source.Queue <LogRptDto>(0, OverflowStrategy.Backpressure) .Recover(ex => throw ex) .SelectAsyncUnordered(int.MaxValue, l => logFileRptGeneratorService.GetCheckInforInUseOuts(l, vm.LogFiles)) .ToMaterialized(Sink.ActorRef <Tuple <bool, LogRptDto> >(logFilesExcelProviderActor, logFileRptGeneratorService.GetReportRows()), Keep.Left) .Run(Context.Materializer()); Receive <OpenLogFileViewModel>(vmodel => { vm = vmodel; Stash.UnstashAll(); BecomeStacked(() => { Receive <LogRptDto>(l => sourceQueue.OfferAsync(l)); }); }); ReceiveAny(_ => Stash.Stash()); this.logFileRptGeneratorService = logFileRptGeneratorService; }
public Property RoundRobinDistributerSpecs() { return(Prop.ForAll <PositiveInt>(value => { var probes = Enumerable.Range(0, value.Get).Select(_ => CreateTestProbe()).ToList(); var sinks = probes.Select(p => Sink.ActorRef <int>(p, "completed")).ToList(); using (var mat = Sys.Materializer()) { var graph = RunnableGraph.FromGraph(GraphDsl.Create(builder => { var source = Source.From(Enumerable.Range(0, value.Get)); var roundRobin = builder.Add(new RoundRobinFanOut <int>(value.Get)); builder.From(source).To(roundRobin.In); for (int i = 0; i < value.Get; i++) { builder.From(roundRobin.Out(i)).To(sinks[i]); } return ClosedShape.Instance; })); graph.Run(mat); for (int i = 0; i < value.Get; i++) { var msg = probes[i].ExpectMsg <int>(TimeSpan.FromSeconds(3)); msg.Should().Be(i); var completeMsg = probes[i].ExpectMsg <string>(TimeSpan.FromSeconds(3)); completeMsg.Should().Be("completed"); } } })); }
public ConverterActor(IActorRef outputTarget) { var flowLogic = Flow.Create <FpgaSample>() .SelectAsync(4, sample => Task.Run(() => ConvertSample(sample))) .To(Sink.ActorRef <ChannelData <float> >(outputTarget, new FpgaConversionCompleted())); var source = Source.Queue <FpgaSample>(10000, OverflowStrategy.Backpressure); var queue = source.ToMaterialized(flowLogic, Keep.Left).Run(Context.System.Materializer()); Receive <FpgaSample>(msg => { queue.OfferAsync(msg).PipeTo(Self); }); Receive <IQueueOfferResult>(enqueueTask => { enqueueTask.Match() .With <QueueOfferResult.Enqueued>(msg => ++ _messagesEnqueued) .With <QueueOfferResult.Dropped>(msg => Log.Warning("FPGA conversion actor dropped a message. Total dropped:{0}", ++_messagesDropped)) .With <QueueOfferResult.Failure>(msg => throw msg.Cause) .With <QueueOfferResult.QueueClosed>(msg => throw new Exception("The stream queue was closed.")); }); }
public Property UnzipEnumerableProps() { return(Prop.ForAll <PositiveInt>(value => { using (var mat = Sys.Materializer()) { var probes = Enumerable.Range(0, value.Get).Select(x => CreateTestProbe()).ToList(); var graph = RunnableGraph.FromGraph(GraphDsl.Create(builder => { var source = Source.From(Enumerable.Range(1, 1).Select(x => Enumerable.Range(0, value.Get).ToList())); var unzipper = builder.Add(new UnzipEnumerable <List <int>, int>(x => x.ToImmutableList(), value.Get)); var sinks = probes.Select(p => Sink.ActorRef <int>(p, "completed")).ToList(); for (int i = 0; i < value.Get; i++) { builder.From(unzipper.Out(i)).To(sinks[i]); } builder.From(source).To(unzipper.In); return ClosedShape.Instance; })); graph.Run(mat); for (int i = 0; i < value.Get; i++) { var msg = probes[i].ExpectMsg <int>(TimeSpan.FromSeconds(3)); msg.Should().Be(i); } } })); }
protected override bool Receive(object message) { switch (message) { case "give": { /* * Here we're able to send a source to a remote recipient * For them it's a Source; for us it is a Sink we run data "into" */ var source = Source.From(new[] { "hello", "world" }); var aref = source.RunWith(StreamRefs.SourceRef <string>(), _materializer); aref.PipeTo(Sender); return(true); } case "give-infinite": { var source = Source.From(Enumerable.Range(1, int.MaxValue).Select(i => "ping-" + i)); var t = source.ToMaterialized(StreamRefs.SourceRef <string>(), Keep.Right).Run(_materializer); t.PipeTo(Sender); return(true); } case "give-fail": { var r = Source.Failed <string>(new Exception("Boom!")) .RunWith(StreamRefs.SourceRef <string>(), _materializer); r.PipeTo(Sender); return(true); } case "give-complete-asap": { var r = Source.Empty <string>().RunWith(StreamRefs.SourceRef <string>(), _materializer); r.PipeTo(Sender); return(true); } case "give-subscribe-timeout": { var r = Source.Repeat("is anyone there?") .ToMaterialized(StreamRefs.SourceRef <string>(), Keep.Right) .WithAttributes(StreamRefAttributes.CreateSubscriptionTimeout(TimeSpan.FromMilliseconds(500))) .Run(_materializer); r.PipeTo(Sender); return(true); } case "receive": { /* * We write out code, knowing that the other side will stream the data into it. * For them it's a Sink; for us it's a Source. */ var sink = StreamRefs.SinkRef <string>().To(Sink.ActorRef <string>(_probe, "<COMPLETE>")) .Run(_materializer); sink.PipeTo(Sender); return(true); } case "receive-ignore": { var sink = StreamRefs.SinkRef <string>().To(Sink.Ignore <string>()).Run(_materializer); sink.PipeTo(Sender); return(true); } case "receive-subscribe-timeout": { var sink = StreamRefs.SinkRef <string>() .WithAttributes(StreamRefAttributes.CreateSubscriptionTimeout(TimeSpan.FromMilliseconds(500))) .To(Sink.ActorRef <string>(_probe, "<COMPLETE>")) .Run(_materializer); sink.PipeTo(Sender); return(true); } case "receive-32": { // var t = StreamRefs.SinkRef<string>() // .ToMaterialized(TestSink.SinkProbe<string>(Context.System), Keep.Both) // .Run(_materializer); // // var sink = t.Item1; // var driver = t.Item2; // Task.Run(() => // { // driver.EnsureSubscription(); // driver.Request(2); // driver.ExpectNext(); // driver.ExpectNext(); // driver.ExpectNoMsg(TimeSpan.FromMilliseconds(100)); // driver.Request(30); // driver.ExpectNextN(30); // // return "<COMPLETED>"; // }).PipeTo(_probe); return(true); } default: return(false); } }
protected override async void PreStart() { journal.EventsByTag("Account") .To(Sink.ActorRef <EventEnvelope>(Context.Self, null)) .Run(ActorMaterializer.Create(Context.System)); }
private static RunnableGraph <ISourceQueueWithComplete <ChannelData <float> > > CreateGraph(IActorRef target, List <ChannelAdjusterConfig> configs, ChannelData <float> sample) { /* * Digital Merger is only necessary when there are additional digitals created and the same goes for the * Broadcast following the Analog Splitter. A broadcast is only required when the analog channel is produces * the additional digitals. Otherwise the analog is pushed straight to the merger +---------------+--------------+-----------------------------------------------------------------------+ | | | SyncData | | | | +-------------+----------------+-------------------------+ | | QueueSource | Channel Data | | FilterFlow | | Channel Data | | | Splitter | Analog | ================ | Analog | | | | | Splitter | Broadcast => Filter | Merger | | | | | | ----------------+----------------+ | | | | | \=> -FullScale | | | | | | | \=> +FullScale | Digital | | | | | | \=> FlatLining | Merger | | | | +-------------+-------------------------+ | | | | | Digitals | | | +---------------+--------------+-----------------------------------------------------------------------+ */ var indices = GetIndices(sample, configs); var number = indices.Count(); var temporalOffsets = configs.Select(x => - x.TemporalOffset).Append(0); var temp = temporalOffsets.Select(x => x - temporalOffsets.Min()).ToList(); var skipIndices = temp.Take(temp.Count - 1).ToList(); var zerothIndex = temp.Last(); var bufferSize = temp.Max() + 1; var skipFlowsNeeded = skipIndices.Any(x => x != 0); var graph = GraphDsl.Create(Source.Queue <ChannelData <float> >(10000, OverflowStrategy.Backpressure), (builder, source) => { //Split channel data into sync data, analogs and digitals var channelDataSplitter = new UnzipWith < ChannelData <float>, ISyncData, IReadOnlyList <DataChannel <float> >, IReadOnlyList <DataChannel <bool> > >(cd => Tuple.Create(cd as ISyncData, cd.Analogs, cd.Digitals)); var channelDataSplitterShape = builder.Add(channelDataSplitter); //Split, filter and reorder the analog channels into the required data channels var analogSplitter = new UnzipEnumerable < IReadOnlyList <DataChannel <float> >, DataChannel <float> >(list => indices.Select(i => list[i]).ToImmutableList(), number ); var analogSplitterShape = builder.Add(analogSplitter); //Re-combine the filtered analog channels var analogMerger = new ZipN <DataChannel <float> >(number); var analogMergerShape = builder.Add(analogMerger); //Digital additional flows var additionalDigitalFlows = new List <FlowShape <DataChannel <float>, DataChannel <bool> > >(); //Create the appropriate analog filtering flows. for (int i = 0; i < configs.Count(); i++) { var skipValue = skipIndices[i]; //Create new flows for the analogs switch (configs[i].Option) { // 1a) Each cfg generates one analog flow... case FilterOption.PassThrough: if (skipFlowsNeeded) { builder.From(analogSplitterShape.Out(i)) .Via( builder.Add( Flow.Create <DataChannel <float> >() .Buffer(bufferSize, OverflowStrategy.Backpressure) .Skip(skipValue) .Log("AnalogLog") ) ) .To(analogMergerShape.In(i)); } else { // Pass through channels can be connected straight from the splitter to the merger. builder.From(analogSplitterShape.Out(i)).To(analogMergerShape.In(i)); } break; case FilterOption.Filter: // Filtered channels create a single flow and connected from the splitter to the merger. var scale = configs[i].Scale; var offset = configs[i].Offset; var filterFlow = skipFlowsNeeded ? Flow.Create <DataChannel <float> >() .Buffer(bufferSize, OverflowStrategy.Backpressure) .Skip(skipValue) .Select(x => new DataChannel <float>(x.Name, x.Value * scale + offset, x.Units)) : Flow.Create <DataChannel <float> >() .Select(x => new DataChannel <float>(x.Name, x.Value * scale + offset, x.Units)); builder.From(analogSplitterShape.Out(i)).Via(builder.Add(filterFlow)).To(analogMergerShape.In(i)); break; // 1b) OR One analog flow and 3 additional digital flows. case FilterOption.CreateDigitals: // Filtered channels that create digitals creates a broadcaster for the analog channel first... var analogBroadcaster = new Broadcast <DataChannel <float> >(4); // ...then three flows for the digitals var d1Flow = builder.Add(Flow.Create <DataChannel <float> >().Select(x => new DataChannel <bool>($"{x.Name}_+FullScale", false))); var d2Flow = builder.Add(Flow.Create <DataChannel <float> >().Select(x => new DataChannel <bool>($"{x.Name}_-FullScale", false))); var d3Flow = builder.Add(Flow.Create <DataChannel <float> >().Select(x => new DataChannel <bool>($"{x.Name}_Flatlining", false))); // ...add the digital flow shapes to be connected later additionalDigitalFlows.Add(d1Flow); additionalDigitalFlows.Add(d2Flow); additionalDigitalFlows.Add(d3Flow); // ...create the broadcaster shape var analogBroadcasterShape = builder.Add(analogBroadcaster); // ...create the filter flow and connect the broadcaster to the merger via the filter var scaler = configs[i].Scale; var offsetter = configs[i].Offset; var filter = skipFlowsNeeded ? Flow.Create <DataChannel <float> >() .Buffer(bufferSize, OverflowStrategy.Backpressure) .Skip(skipValue) .Select(x => new DataChannel <float>(x.Name, x.Value * scaler + offsetter, x.Units)) : Flow.Create <DataChannel <float> >() .Select(x => new DataChannel <float>(x.Name, x.Value * scaler + offsetter, x.Units)); // ...link the analog splitter output to the broadcaster builder.From(analogSplitterShape.Out(i)) .Via(filter) .To(analogBroadcasterShape); builder.From(analogBroadcasterShape.Out(0)).To(analogMergerShape.In(i)); // ...link the broadcaster channels to the additional digital flows builder.From(analogBroadcasterShape.Out(1)).Via(d1Flow); builder.From(analogBroadcasterShape.Out(2)).Via(d2Flow); builder.From(analogBroadcasterShape.Out(3)).Via(d3Flow); break; case FilterOption.NotSet: throw new ArgumentException("Filter Option Not Set is not allowed."); } } //Merge everything back together var channelDataMerger = ZipWith.Apply < ISyncData, IImmutableList <DataChannel <float> >, IReadOnlyList <DataChannel <bool> >, ChannelData <float> >( (sync, analogs, digitals) => new ChannelData <float> ( analogs, digitals, sync.TimeStamp, sync.TachometerCount, sync.MasterSyncIncrement, sync.MasterSyncState, sync.SampleIndex ) ); var channelDataMergerShape = builder.Add(channelDataMerger); //Sink var sink = Sink.ActorRef <ChannelData <float> >(target, false); var sinkShape = builder.Add(sink); //_________Link stages_________ //=====Source===== //Source to the channel data splitter if (skipFlowsNeeded) { builder.From(source) .Via(builder.Add(Flow.Create <ChannelData <float> >().Buffer(bufferSize, OverflowStrategy.Backpressure))) .To(channelDataSplitterShape.In); //=====Splitter===== //Splitter sync data to merger. builder.From(channelDataSplitterShape.Out0) .Via(builder.Add(Flow.Create <ISyncData>().Buffer(bufferSize, OverflowStrategy.Backpressure).Skip(zerothIndex))) .To(channelDataMergerShape.In0); //Splitter analogs to analog splitter. builder.From(channelDataSplitterShape.Out1) .Via(builder.Add(Flow.Create <IReadOnlyList <DataChannel <float> > >().Buffer(bufferSize, OverflowStrategy.Backpressure))) .To(analogSplitterShape.In); //=====AdditionalDigitalFlows===== if (additionalDigitalFlows.Count > 0) { // Additonal Digital Merger var additionalDigitalMerger = new ZipWithN <DataChannel <bool>, IImmutableList <DataChannel <bool> > >(channel => channel, additionalDigitalFlows.Count); var additionalDigitalMergerShape = builder.Add(additionalDigitalMerger); //Combine the input digitals with the generated additional digitals var digitalMerger = ZipWith.Apply <List <DataChannel <bool> >, ImmutableList <DataChannel <bool> >, IReadOnlyList <DataChannel <bool> > >((channel1, channel2) => channel1.Concat(channel2).ToList()); var digitalMergerShape = builder.Add(digitalMerger); //Splitter digitals to digital merger. builder.From(channelDataSplitterShape.Out2) .Via(builder.Add(Flow.Create <IReadOnlyList <DataChannel <bool> > >().Buffer(bufferSize, OverflowStrategy.Backpressure))) .To(digitalMergerShape.In0); // Merge all additional flows together. for (int i = 0; i < additionalDigitalFlows.Count; i++) { builder.From(additionalDigitalFlows[i]).To(additionalDigitalMergerShape.In(i)); } //Additional digitals to digital merger builder.From(additionalDigitalMergerShape.Out).To(digitalMergerShape.In1); //=====DigitalMerger===== //Digital merger to channel data merger builder.From(digitalMergerShape.Out).To(channelDataMergerShape.In2); } else { // Splitter digitals to final merger. builder.From(channelDataSplitterShape.Out2) .Via(builder.Add(Flow.Create <IReadOnlyList <DataChannel <bool> > >().Buffer(bufferSize, OverflowStrategy.Backpressure))) .To(channelDataMergerShape.In2); } // Analog merger to final merger. builder.From(analogMergerShape.Out).To(channelDataMergerShape.In1); //=====Merger===== //Channel Data Merger to sink builder.From(channelDataMergerShape.Out).To(sinkShape); } else { builder.From(source).To(channelDataSplitterShape.In); //=====Splitter===== //Splitter sync data to merger. builder.From(channelDataSplitterShape.Out0).To(channelDataMergerShape.In0); //Splitter analogs to analog splitter. builder.From(channelDataSplitterShape.Out1).To(analogSplitterShape.In); //=====AdditionalDigitalFlows===== if (additionalDigitalFlows.Count > 0) { // Additonal Digital Merger var additionalDigitalMerger = new ZipWithN <DataChannel <bool>, IImmutableList <DataChannel <bool> > >(channel => channel, additionalDigitalFlows.Count); var additionalDigitalMergerShape = builder.Add(additionalDigitalMerger); //Combine the input digitals with the generated additional digitals var digitalMerger = ZipWith.Apply <List <DataChannel <bool> >, ImmutableList <DataChannel <bool> >, IReadOnlyList <DataChannel <bool> > >((channel1, channel2) => channel1.Concat(channel2).ToList()); var digitalMergerShape = builder.Add(digitalMerger); //Splitter digitals to digital merger. builder.From(channelDataSplitterShape.Out2).To(digitalMergerShape.In0); // Merge all additional flows together. for (int i = 0; i < additionalDigitalFlows.Count; i++) { builder.From(additionalDigitalFlows[i]).To(additionalDigitalMergerShape.In(i)); } //Additional digitals to digital merger builder.From(additionalDigitalMergerShape.Out).To(digitalMergerShape.In1); //=====DigitalMerger===== //Digital merger to channel data merger builder.From(digitalMergerShape.Out).To(channelDataMergerShape.In2); } else { // Splitter digitals to final merger. builder.From(channelDataSplitterShape.Out2).To(channelDataMergerShape.In2); } // Analog merger to final merger. builder.From(analogMergerShape.Out).To(channelDataMergerShape.In1); //=====Merger===== //Channel Data Merger to sink builder.From(channelDataMergerShape.Out).To(sinkShape); } return(ClosedShape.Instance); }); return(RunnableGraph.FromGraph(graph)); }
public void PublishSubscribeOnHubsAddsAndRemovesPublishersAndSubscribers() { const int publisherMaxCount = 16; const int subscriberMaxCount = 16; const int bufferSize = 4; // Source ToMat Bidi // +------------+ +------------+ // | MergeHub | |BroadcastHub| // | Source | ~> Message ~> | Sink | // | | | | // +------------+ +------------+ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // ~~ (Sink<Message>, Source<Message>) ~~ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ (Sink <string, NotUsed> mergeSink, Source <string, NotUsed> mergeSource) = MergeHub.Source <string>(perProducerBufferSize: publisherMaxCount) .ToMaterialized(BroadcastHub.Sink <string>(bufferSize: subscriberMaxCount), Keep.Both) .Run(_materializer); TestProbe sub0 = CreateTestProbe(); TestProbe sub1 = CreateTestProbe(); // Flow JoinMat Bidi // +------------+ +------------+ // | FromSink | ~> Message ~> |KillSwitches| ~> Message // | And | | Single | // | Source | <~ Message <~ | Bidi | <~ Message // +------------+ +------------+ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // ~~ UniqueKillSwitch ~~ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Flow <string, string, UniqueKillSwitch> busFlow = Flow.FromSinkAndSource(mergeSink, mergeSource) .JoinMaterialized(KillSwitches.SingleBidi <string, string>(), Keep.Right); var(pub0, uniqueKillSwitch0) = Source.ActorRef <string>(bufferSize, OverflowStrategy.Fail) .ViaMaterialized(busFlow, Keep.Both) .To(Sink.ActorRef <string>(sub0, "complete")) .Run(_materializer); pub0.Tell("It's chat member 0!"); sub0.ExpectMsg("It's chat member 0!"); // Echo. sub0.ExpectNoMsg(TimeSpan.FromMilliseconds(50)); var(pub1, uniqueKillSwitch1) = Source.ActorRef <string>(bufferSize, OverflowStrategy.Fail) .ViaMaterialized(busFlow, Keep.Both) .To(Sink.ActorRef <string>(sub1, "complete")) .Run(_materializer); pub1.Tell("Hi! It's chat member 1!"); sub1.ExpectMsg("Hi! It's chat member 1!"); // Echo. sub0.ExpectMsg("Hi! It's chat member 1!"); pub0.Tell("Oh, Hi! Sry, but I gotta go, bye!"); sub0.ExpectMsg("Oh, Hi! Sry, but I gotta go, bye!"); // Echo. uniqueKillSwitch0.Shutdown(); // Looks like this Shutdown is non-blocking. sub0.ExpectMsg("complete", TimeSpan.FromMilliseconds(1000)); // Wait for the running graph to stop. sub1.ExpectMsg("Oh, Hi! Sry, but I gotta go, bye!"); pub1.Tell("Oh, looks like I stayed alone."); sub1.ExpectMsg("Oh, looks like I stayed alone."); // Echo. sub0.ExpectNoMsg(); }