Ejemplo n.º 1
0
        public IStreamInput CreateStreamInput(string streamId)
        {
            Console.WriteLine($"Stream {streamId} started.");

            // these templates provide commonly-combined data, but you can make your own
            var input  = new SessionTelemetryDataInput(streamId, dataFormatClient);
            var output = new SessionTelemetryDataOutput(outputTopic, this.outputDataFormatId, dataFormatClient);

            this.outputFeed = output.DataOutput.BindFeed("");

            // we add output format reference to output session.
            output.SessionOutput.AddSessionDependency(DependencyTypes.DataFormat, this.outputDataFormatId);
            output.SessionOutput.AddSessionDependency(DependencyTypes.AtlasConfiguration, this.outputAtlasConfId);

            // automatically propagate session metadata and lifecycle
            input.LinkToOutput(output.SessionOutput, identifier => identifier + "_Models");

            // we simply forward laps.
            input.LapsInput.LapStarted += (s, e) => output.LapsOutput.SendLap(e.Lap);

            // we bind our models to specific feed and parameters.
            input.DataInput.BindDefaultFeed("gLat:Chassis", "gLong:Chassis").DataBuffered += this.gTotalModel;

            input.StreamFinished += (s, e) => Console.WriteLine($"Stream {e.StreamId} ended.");

            return(input);
        }
        /// <summary>
        ///     This method creates stream pipeline for each child stream.
        /// </summary>
        /// <param name="dataFormatClient">The data format client.</param>
        /// <param name="outputTopic">The output topic.</param>
        /// <param name="streamId">The stream identifier.</param>
        /// <returns>Stream pipeline.</returns>
        private IStreamInput CreateStreamInput(DataFormatClient dataFormatClient, IOutputTopic outputTopic, string streamId)
        {
            // these templates provide commonly-combined data, but you can make your own
            var input  = new SessionTelemetryDataInput(streamId, dataFormatClient);
            var output = new SessionTelemetryDataOutput(outputTopic, dataFormatId, dataFormatClient);

            // data is split into named feeds; use default feed if you don't need that
            var inputFeed  = input.DataInput.BindFeed(InputFeed, CreateInputDataFormat());
            var outputFeed = output.DataOutput.BindFeed(OutputFeed);

            output.SessionOutput.AddSessionDependency(DependencyTypes.AtlasConfiguration, atlasConfId);
            output.SessionOutput.AddSessionDependency(DependencyTypes.DataFormat, dataFormatId);

            // automatically propagate session metadata and lifecycle
            input.LinkToOutput(output.SessionOutput, identifier => identifier + "_" + OutputTopicName);
            input.LapsInput.LapStarted += (s, e) => output.LapsOutput.SendLap(e.Lap);

            // react to data
            inputFeed.DataBuffered += (sender, e) =>
            {
                var data = ProcessData(e.Buffer);

                outputFeed.EnqueueAndSendData(data);
            };

            return(input);
        }
        private IStreamInput ReadAndLinkOutput(
            string streamId,
            List <string> parameterIdentifiers,
            Writer writer,
            List <string> outputFeedNames,
            TelemetryDataHandler handler)
        {
            var input = new SessionTelemetryDataInput(streamId, DataFormatClient);

            Debug.WriteLine($"Linking stream {streamId}");
            // automatically propagate session metadata and lifecycle
            input.LinkToOutput(writer.SessionOutput, identifier => identifier + "_" + writer.TopicName);

            // react to data
            input.DataInput.BindDefaultFeed(parameterIdentifiers).DataBuffered += (sender, e) =>
            {
                var data = e.Buffer.GetData();
                handler(data);
                foreach (var outputFeedName in outputFeedNames)
                {
                    writer.Write(outputFeedName, data);
                }
            };

            input.StreamFinished += HandleStreamFinished;
            return(input);
        }
Ejemplo n.º 4
0
        private IStreamInput ProcessStream(string streamId, DataFormatClient dataFormatClient)
        {
            Logger.Info($"New stream: {streamId}");
            var input = new SessionTelemetryDataInput(streamId, dataFormatClient);

            input.DataInput.BindDefaultFeed(TopicConfiguration.ParameterIdentifier).DataBuffered += PrintSamples;
            input.StreamFinished += (sender, args) => Console.WriteLine("--------");
            return(input);
        }
        private IStreamInput Read(string streamId, TelemetrySamplesHandler handler)
        {
            var input = new SessionTelemetryDataInput(streamId, DataFormatClient);

            input.SamplesInput.AutoBindFeeds((s, e) => // Take the input and bind feed to an event handler
            {
                handler(e.Data);
            });

            input.StreamFinished += (sender, e) => Trace.WriteLine("Finished"); // Handle the steam finished event

            return(input);
        }
        private IStreamInput Read(
            string streamId,
            string parameterIdentifier,
            TelemetryDataHandler handler)
        {
            var input = new SessionTelemetryDataInput(streamId, DataFormatClient);

            input.DataInput.BindDefaultFeed(parameterIdentifier).DataBuffered += (sender, e) =>
            {
                handler(e.Buffer.GetData());
            };

            input.StreamFinished += HandleStreamFinished;
            return(input);
        }
Ejemplo n.º 7
0
        public void ReadTData()
        {
            const string brokerList           = "localhost:9092";                                                   // The host and port where the Kafka broker is running
            const string groupName            = "dev";                                                              // The group name
            const string topicName            = "data_in";                                                          // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the data_in_announce
            var          dependencyServiceUri = new Uri("http://localhost:8180/api/dependencies/");                 // The URI where the dependency services are running

            var client           = new KafkaStreamClient(brokerList);                                               // Create a new KafkaStreamClient for connecting to Kafka broker
            var dataFormatClient = new DataFormatClient(new HttpDependencyClient(dependencyServiceUri, groupName)); // Create a new DataFormatClient

            var pipeline = client.StreamTopic(topicName).Into(streamId =>                                           // Stream Kafka topic into the handler method
            {
                var input  = new SessionTelemetryDataInput(streamId, dataFormatClient);
                var buffer = input.EventsInput.Buffer;
                input.DataInput.BindDefaultFeed(ParameterId).DataBuffered += (sender, e) => // Bind the incoming feed and take the data
                {
                    if (DateTime.Now >= new DateTime(2020, 2, 17, 11, 45, 9, DateTimeKind.Utc))
                    {
                        var telemetryData = buffer.GetDataInCompleteWindow(new TimeStepCursor(1000, 0, 0));
                    }
                    var data = e.Buffer.GetData();
                    // In this sample we consume the incoming data and print it
                    var time = data.TimestampsNanos;
                    for (var i = 0; i < data.Parameters.Length; i++)
                    {
                        Trace.WriteLine($"Parameter[{i}]:");
                        var vCar = data.Parameters[i].AvgValues;
                        for (var j = 0; j < time.Length; j++)
                        {
                            var fromMilliseconds = TimeSpan.FromMilliseconds(time[j].NanosToMillis());
                            Trace.WriteLine($"{fromMilliseconds:hh\\:mm\\:ss\\.fff}, {  new string('.', (int)(50 * vCar[j])) }");
                        }
                    }
                };

                input.StreamFinished += (sender, e) => Trace.WriteLine("Finished"); // Handle the steam finished event
                return(input);
            });

            if (!pipeline.WaitUntilConnected(TimeSpan.FromSeconds(30), CancellationToken.None)) // Wait until the connection is established
            {
                throw new Exception("Couldn't connect");
            }
            pipeline.WaitUntilFirstStream(TimeSpan.FromMinutes(1), CancellationToken.None); // Wait until the first stream is ready to read.
            pipeline.WaitUntilIdle(TimeSpan.FromMinutes(5), CancellationToken.None);        // Wait for 5 minutes of the pipeline being idle before exit.

            pipeline.Dispose();
        }
        private IStreamInput ReadTData(
            string streamId,
            List <string> parameterIdentifiers,
            TelemetryDataHandler handler)
        {
            var input = new SessionTelemetryDataInput(streamId, DataFormatClient);

            input.DataInput.BindDefaultFeed(parameterIdentifiers).DataBuffered +=
                (sender, e) => //Subscribing to data event
            {
                handler(e.Buffer.GetData());
            };

            input.StreamFinished += HandleStreamFinished;
            return(input);
        }
        private IStreamInput ReadAndLinkOutput(
            string streamId,
            string parameterIdentifier,
            Writer writer,
            string outputFeedName,
            TelemetrySamplesHandler handler)
        {
            var input = new SessionTelemetryDataInput(streamId, DataFormatClient);

            Debug.WriteLine($"Linking stream {streamId}");
            // automatically propagate session metadata and lifecycle
            input.LinkToOutput(writer.SessionOutput, identifier => identifier + "_" + writer.TopicName);

            // react to data
            input.SamplesInput.GetFeed(outputFeedName).DataReceived += (s, e) => { handler(e.Data); };

            input.StreamFinished += HandleStreamFinished;
            return(input);
        }
Ejemplo n.º 10
0
        public void ReadTSamples()
        {
            ProtobufCodecs.RegisterCodecs(true);                                                                    // Enable Protobuff codec if the streamed data is Protobuff encoded

            const string brokerList = "localhost:9092";                                                             // The host and port where the Kafka broker is running
            const string groupName  = "dev";                                                                        // The group name
            const string topicName  = "sample_in";                                                                  // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the sample_in_announce

            var dependencyServiceUri = new Uri("http://localhost:8180/api/dependencies/");                          // The URI where the dependency services are running
            var client           = new KafkaStreamClient(brokerList);                                               // Create a new KafkaStreamClient for connecting to Kafka broker
            var dataFormatClient = new DataFormatClient(new HttpDependencyClient(dependencyServiceUri, groupName)); // Create a new DataFormatClient

            var pipeline = client.StreamTopic(topicName).Into(streamId =>                                           // Stream Kafka topic into the handler method
            {
                var input = new SessionTelemetryDataInput(streamId, dataFormatClient);

                input.SamplesInput.AutoBindFeeds((s, e) =>        // Take the input and bind feed to an event handler
                {
                    var data = e.Data;                            // The event handler here only takes the samples data
                    Trace.WriteLine(data.Parameters.First().Key); // and prints some information to the debug console
                    Trace.WriteLine(data.Parameters.Count);
                });

                input.StreamFinished += (sender, e) => Trace.WriteLine("Finished"); // Handle the steam finished event
                return(input);
            });

            if (!pipeline.WaitUntilConnected(TimeSpan.FromMinutes(2), default(CancellationToken))) // Wait until the connection is established
            {
                throw new Exception("Couldn't connect");
            }

            pipeline.WaitUntilFirstStream(TimeSpan.FromMinutes(5), CancellationToken.None); // Wait until the first stream is ready to read.
            pipeline.WaitUntilIdle(TimeSpan.FromMinutes(5), CancellationToken.None);        // Wait for 5 minutes of the pipeline being idle before exit.

            pipeline.Dispose();
        }
Ejemplo n.º 11
0
        public void ReadEvents()
        {
            const string brokerList           = "localhost:9092";                                                                   // The host and port where the Kafka broker is running
            const string groupName            = "dev";                                                                              // The dependency group name
            const string topicName            = "events_sample";                                                                    // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the data_in_announce
            var          dependencyServiceUri = new Uri("http://localhost:8180/api/dependencies/");                                 // The URI where the dependency services are running

            var client                   = new KafkaStreamClient(brokerList);                                                       // Create a new KafkaStreamClient for connecting to Kafka broker
            var dataFormatClient         = new DataFormatClient(new HttpDependencyClient(dependencyServiceUri, groupName));         // Create a new DataFormatClient
            var atlasConfigurationClient = new AtlasConfigurationClient(new HttpDependencyClient(dependencyServiceUri, groupName)); // Create a new AtlasConfigurationClient

            var pipeline = client.StreamTopic(topicName).Into(streamId =>                                                           // Stream Kafka topic into the handler method
            {
                var input = new SessionTelemetryDataInput(streamId, dataFormatClient);

                AtlasConfiguration atlasConfiguration = null;

                input.SessionInput.SessionDependenciesChanged += (s, a) =>
                {
                    if (!a.Session.Dependencies.TryGetValue("atlasConfiguration", out var atlasConfigIds))
                    {
                        return;
                    }

                    atlasConfiguration = atlasConfigurationClient.GetAtlasConfiguration(atlasConfigIds[0]); // Taking first atlas configuration for this example
                };

                input.EventsInput.EventsBuffered += (sender, e) => // Subscribe to incoming events
                {
                    if (atlasConfiguration == null)
                    {
                        return;
                    }

                    var events = e.Buffer.GetData(); // read incoming events from buffer

                    // In this sample we consume the incoming events and print it
                    foreach (var ev in events)
                    {
                        var eventDefinition = atlasConfiguration.AppGroups?.First().Value?.Events.GetValueOrDefault(ev.Id);
                        if (eventDefinition == null)
                        {
                            continue;
                        }

                        Console.WriteLine($"- Event: {ev.Id} - {eventDefinition.Description} - Priority: {eventDefinition.Priority.ToString()} - Value: {ev.Values?.First()}");
                    }
                };

                input.StreamFinished += (sender, e) => Trace.WriteLine("Finished"); // Handle the steam finished event
                return(input);
            });

            if (!pipeline.WaitUntilConnected(TimeSpan.FromSeconds(30), CancellationToken.None)) // Wait until the connection is established
            {
                throw new Exception("Couldn't connect");
            }
            pipeline.WaitUntilFirstStream(TimeSpan.FromMinutes(1), CancellationToken.None); // Wait until the first stream is ready to read.
            pipeline.WaitUntilIdle(TimeSpan.FromMinutes(5), CancellationToken.None);        // Wait for 5 minutes of the pipeline being idle before exit.

            pipeline.Dispose();
        }