/// <summary>
        ///     Listen input topic stream in a loop and produce new data into output topic.
        /// </summary>
        public Task Run(CancellationToken cancellationToken = default(CancellationToken))
        {
            return(Task.Factory.StartNew(() =>
            {
                // TODO: Replace this with dependency service REST API call.
                var dependencyClient = new HttpDependencyClient(new Uri("http://localhost:8180/api/dependencies/"), "dev", false);

                var dataFormatClient = new DataFormatClient(dependencyClient); // would be a web service in production
                dataFormatId = dataFormatClient.PutAndIdentifyDataFormat(CreateOutputDataFormat());

                var acClient = new AtlasConfigurationClient(dependencyClient);

                var atlasConfiguration = CreateAtlasConfiguration();
                atlasConfId = acClient.PutAndIdentifyAtlasConfiguration(atlasConfiguration);

                using (var client = new KafkaStreamClient(BrokerList))
                {
                    client.ConsumerGroup = ConsumerGroup;

                    using (var enrichTopic = client.OpenOutputTopic(OutputTopicName))
                        using (var pipeline = CreateStreamPipeline(client, dataFormatClient, enrichTopic))
                        {
                            cancellationToken.WaitHandle.WaitOne();
                            pipeline.Drain();
                            pipeline.WaitUntilStopped(DrainTimeout, default(CancellationToken));
                        }
                }
            }));
        }
        private OutputTopicWrapper CreateOutputTopicWrapper()
        {
            OutputTopicWrapper ForKafka()
            {
                var client = new KafkaStreamClient(KafkaConfigForSample.BrokerList);
                var topic  = client.OpenOutputTopic(TopicConfiguration.Topic);

                return(new OutputTopicWrapper(client, topic));
            }

            OutputTopicWrapper ForMqtt()
            {
                var client = new MqttStreamClient(MqttConnectionConfigForSample.Current);
                var topic  = client.OpenOutputTopic(TopicConfiguration.Topic);

                return(new OutputTopicWrapper(client, topic));
            }

            switch (Configuration.SelectedTransport)
            {
            case StreamingTransport.Kafka:
                return(ForKafka());

            case StreamingTransport.Mqtt:
                return(ForMqtt());

            default:
                throw new NotSupportedException();
            }
        }
Example #3
0
        public void WriteEvents()
        {
            const string brokerList           = "localhost:9092";                                                                               // The host and port where the Kafka broker is running
            const string groupName            = "dev";                                                                                          // The dependency group name
            const string topicName            = "events_sample";                                                                                // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the data_in_announce
            var          dependencyServiceUri = new Uri("http://localhost:8180/api/dependencies/");                                             // The URI where the dependency services are running

            var client               = new KafkaStreamClient(brokerList);                                                                       // Create a new KafkaStreamClient for connecting to Kafka broker
            var dataFormatClient     = new DataFormatClient(new HttpDependencyClient(dependencyServiceUri, groupName));                         // Create a new DataFormatClient
            var httpDependencyClient = new HttpDependencyClient(dependencyServiceUri, groupName);                                               // DependencyClient stores the Data format, Atlas Configuration

            var atlasConfigurationId = new AtlasConfigurationClient(httpDependencyClient).PutAndIdentifyAtlasConfiguration(AtlasConfiguration); // Uniq ID created for the AtlasConfiguration
            var dataFormat           = DataFormat.DefineFeed().BuildFormat();                                                                   // Create a dataformat based on the parameters, using the parameter id
            var dataFormatId         = dataFormatClient.PutAndIdentifyDataFormat(dataFormat);                                                   // Uniq ID created for the Data Format

            using (var outputTopic = client.OpenOutputTopic(topicName))                                                                         // Open a KafkaOutputTopic
            {
                var output = new SessionTelemetryDataOutput(outputTopic, dataFormatId, dataFormatClient);
                output.SessionOutput.AddSessionDependency(DependencyTypes.DataFormat, dataFormatId); // Add session dependencies to the output
                output.SessionOutput.AddSessionDependency(DependencyTypes.AtlasConfiguration, atlasConfigurationId);

                output.SessionOutput.SessionState      = StreamSessionState.Open;  // set the sessions state to open
                output.SessionOutput.SessionStart      = DateTime.Now;             // set the session start to current time
                output.SessionOutput.SessionIdentifier = "events_" + DateTime.Now; // set a custom session identifier
                output.SessionOutput.SendSession();

                var events = GenerateEvents(20, (DateTime)output.SessionOutput.SessionStart);  // Generate some events data
                var tasks  = events.Select(ev => output.EventsOutput.SendEvent(ev)).ToArray(); // enqueue and send the events to the output through the EventsOutput
                Task.WaitAll(tasks);

                output.SessionOutput.SessionState = StreamSessionState.Closed; // set session state to closed. In case of any unintended session close, set state to Truncated
                output.SessionOutput.SendSession();                            // send session
            }
        }
Example #4
0
        private StreamPipelineWrapper CreateStreamPipelineWrapper()
        {
            StreamPipelineWrapper ForKafka()
            {
                var client = new KafkaStreamClient(KafkaConfigForSample.BrokerList);
                var topic  = client.StreamTopic(TopicConfiguration.Topic).Into(streamId => ProcessStream(streamId, dataFormatClient));

                return(new StreamPipelineWrapper(client, topic));
            }

            StreamPipelineWrapper ForMqtt()
            {
                var client = new MqttStreamClient(MqttConnectionConfigForSample.Current);
                var topic  = client.StreamTopic(TopicConfiguration.Topic).Into(streamId => ProcessStream(streamId, dataFormatClient));

                return(new StreamPipelineWrapper(client, topic));
            }

            switch (Configuration.SelectedTransport)
            {
            case StreamingTransport.Kafka:
                return(ForKafka());

            case StreamingTransport.Mqtt:
                return(ForMqtt());

            default:
                throw new NotSupportedException();
            }
        }
        public void Run()
        {
            var dependenciesClient       = new HttpDependencyClient(DependenciesUri, "dev");
            var dataFormatClient         = new DataFormatClient(dependenciesClient);
            var atlasConfigurationClient = new AtlasConfigurationClient(dependenciesClient);

            var dataFormatId = dataFormatClient.PutAndIdentifyDataFormat(
                DataFormat.DefineFeed().Parameters(new List <string> {
                "Sin(x)", "Cos(x)"
            }).AtFrequency(Frequency).BuildFormat());

            var atlasConfigurationId = atlasConfigurationClient.PutAndIdentifyAtlasConfiguration(MakeAtlasConfiguration());

            var tasks = new List <Task>();

            using (var client = new KafkaStreamClient(brokerList))
            {
                foreach (var topicName in topicLists)
                {
                    tasks.Add(Task.Factory.StartNew(() =>
                    {
                        using (var topic = client.OpenOutputTopic(topicName))
                        {
                            GenerateData(topic, dataFormatClient, dataFormatId, atlasConfigurationId);
                            Console.WriteLine("Hit <enter> to exit");
                            Console.ReadLine();
                        }
                    }));
                }

                Task.WaitAll(tasks.ToArray());
            }
        }
 public KafkaStreamAdapter(string brokerList, string consumerGroup)
 {
     client = new KafkaStreamClient(brokerList)
     {
         ConsumerGroup = consumerGroup
     };
 }
 private IStreamPipeline CreateStreamPipeline(
     KafkaStreamClient client,
     DataFormatClient dataFormatClient,
     IOutputTopic outputTopic)
 {
     // lambda is called for each stream/session
     return(client
            .StreamTopic(InputTopicName)
            .Into(streamId => CreateStreamInput(dataFormatClient, outputTopic, streamId)));
 }
Example #8
0
        public void ReadTData()
        {
            const string brokerList           = "localhost:9092";                                                   // The host and port where the Kafka broker is running
            const string groupName            = "dev";                                                              // The group name
            const string topicName            = "data_in";                                                          // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the data_in_announce
            var          dependencyServiceUri = new Uri("http://localhost:8180/api/dependencies/");                 // The URI where the dependency services are running

            var client           = new KafkaStreamClient(brokerList);                                               // Create a new KafkaStreamClient for connecting to Kafka broker
            var dataFormatClient = new DataFormatClient(new HttpDependencyClient(dependencyServiceUri, groupName)); // Create a new DataFormatClient

            var pipeline = client.StreamTopic(topicName).Into(streamId =>                                           // Stream Kafka topic into the handler method
            {
                var input  = new SessionTelemetryDataInput(streamId, dataFormatClient);
                var buffer = input.EventsInput.Buffer;
                input.DataInput.BindDefaultFeed(ParameterId).DataBuffered += (sender, e) => // Bind the incoming feed and take the data
                {
                    if (DateTime.Now >= new DateTime(2020, 2, 17, 11, 45, 9, DateTimeKind.Utc))
                    {
                        var telemetryData = buffer.GetDataInCompleteWindow(new TimeStepCursor(1000, 0, 0));
                    }
                    var data = e.Buffer.GetData();
                    // In this sample we consume the incoming data and print it
                    var time = data.TimestampsNanos;
                    for (var i = 0; i < data.Parameters.Length; i++)
                    {
                        Trace.WriteLine($"Parameter[{i}]:");
                        var vCar = data.Parameters[i].AvgValues;
                        for (var j = 0; j < time.Length; j++)
                        {
                            var fromMilliseconds = TimeSpan.FromMilliseconds(time[j].NanosToMillis());
                            Trace.WriteLine($"{fromMilliseconds:hh\\:mm\\:ss\\.fff}, {  new string('.', (int)(50 * vCar[j])) }");
                        }
                    }
                };

                input.StreamFinished += (sender, e) => Trace.WriteLine("Finished"); // Handle the steam finished event
                return(input);
            });

            if (!pipeline.WaitUntilConnected(TimeSpan.FromSeconds(30), CancellationToken.None)) // Wait until the connection is established
            {
                throw new Exception("Couldn't connect");
            }
            pipeline.WaitUntilFirstStream(TimeSpan.FromMinutes(1), CancellationToken.None); // Wait until the first stream is ready to read.
            pipeline.WaitUntilIdle(TimeSpan.FromMinutes(5), CancellationToken.None);        // Wait for 5 minutes of the pipeline being idle before exit.

            pipeline.Dispose();
        }
Example #9
0
        public void WriteTData()
        {
            const string brokerList           = "localhost:9092";                                                                               // The host and port where the Kafka broker is running
            const string groupName            = "dev";                                                                                          // The group name
            const string topicName            = "data_in";                                                                                      // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the data_in_announce
            var          dependencyServiceUri = new Uri("http://localhost:8180/api/dependencies/");                                             // The URI where the dependency services are running

            var client               = new KafkaStreamClient(brokerList);                                                                       // Create a new KafkaStreamClient for connecting to Kafka broker
            var dataFormatClient     = new DataFormatClient(new HttpDependencyClient(dependencyServiceUri, groupName));                         // Create a new DataFormatClient
            var httpDependencyClient = new HttpDependencyClient(dependencyServiceUri, groupName);                                               // DependencyClient stores the Data format, Atlas Configuration

            var atlasConfigurationId = new AtlasConfigurationClient(httpDependencyClient).PutAndIdentifyAtlasConfiguration(AtlasConfiguration); // Uniq ID created for the AtlasConfiguration
            var dataFormat           = DataFormat.DefineFeed().Parameter(ParameterId).BuildFormat();                                            // Create a dataformat based on the parameters, using the parameter id
            var dataFormatId         = dataFormatClient.PutAndIdentifyDataFormat(dataFormat);                                                   // Uniq ID created for the Data Format

            using (var outputTopic = client.OpenOutputTopic(topicName))                                                                         // Open a KafkaOutputTopic
            {
                const int sampleCount = 10000;
                var       output      = new SessionTelemetryDataOutput(outputTopic, dataFormatId, dataFormatClient);
                output.SessionOutput.AddSessionDependency(DependencyTypes.DataFormat, dataFormatId); // Add session dependencies to the output
                output.SessionOutput.AddSessionDependency(DependencyTypes.AtlasConfiguration, atlasConfigurationId);

                output.SessionOutput.SessionState         = StreamSessionState.Open; // set the sessions state to open
                output.SessionOutput.SessionStart         = DateTime.Now;            // set the session start to current time
                output.SessionOutput.SessionDurationNanos = sampleCount * Interval;  // duration should be time elapsed between session start time and last sample time
                output.SessionOutput.SessionIdentifier    = "data_" + DateTime.Now;  // set a custom session identifier
                output.SessionOutput.SendSession();

                var telemetryData = GenerateData(sampleCount, (DateTime)output.SessionOutput.SessionStart); // Generate some telemetry data

                const string feedName   = "";                                                               // As sample DataFormat uses default feed, we will leave this empty.
                var          outputFeed = output.DataOutput.BindFeed(feedName);                             // bind your feed by its name to the Data Output

                Task.WaitAll(outputFeed.EnqueueAndSendData(telemetryData));                                 // enqueue and send the data to the output through the outputFeed

                output.SessionOutput.SessionState = StreamSessionState.Closed;                              // set session state to closed. In case of any unintended session close, set state to Truncated
                output.SessionOutput.SendSession();                                                         // send session
            }
        }
Example #10
0
        public void Run(CancellationToken cancellationToken = default(CancellationToken))
        {
            var outputDataFormat = DataFormat.DefineFeed()
                                   .Parameter("gTotal:vTag")
                                   .AtFrequency(100)
                                   .BuildFormat();

            this.dataFormatId = dataFormatClient.PutAndIdentifyDataFormat(outputDataFormat);


            var atlasConfiguration = this.CreateAtlasConfiguration();

            this.atlasConfId = this.acClient.PutAndIdentifyAtlasConfiguration(atlasConfiguration);

            using (var client = new KafkaStreamClient(BrokerList))
                using (var outputTopic = client.OpenOutputTopic(OutputTopicName))
                    using (var pipeline = client.StreamTopic(InputTopicName).Into(streamId => this.CreateStreamPipeline(streamId, outputTopic)))
                    {
                        cancellationToken.WaitHandle.WaitOne();
                        pipeline.Drain();
                        pipeline.WaitUntilStopped(TimeSpan.FromSeconds(1), CancellationToken.None);
                    }
        }
Example #11
0
        public void ReadTSamples()
        {
            ProtobufCodecs.RegisterCodecs(true);                                                                    // Enable Protobuff codec if the streamed data is Protobuff encoded

            const string brokerList = "localhost:9092";                                                             // The host and port where the Kafka broker is running
            const string groupName  = "dev";                                                                        // The group name
            const string topicName  = "sample_in";                                                                  // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the sample_in_announce

            var dependencyServiceUri = new Uri("http://localhost:8180/api/dependencies/");                          // The URI where the dependency services are running
            var client           = new KafkaStreamClient(brokerList);                                               // Create a new KafkaStreamClient for connecting to Kafka broker
            var dataFormatClient = new DataFormatClient(new HttpDependencyClient(dependencyServiceUri, groupName)); // Create a new DataFormatClient

            var pipeline = client.StreamTopic(topicName).Into(streamId =>                                           // Stream Kafka topic into the handler method
            {
                var input = new SessionTelemetryDataInput(streamId, dataFormatClient);

                input.SamplesInput.AutoBindFeeds((s, e) =>        // Take the input and bind feed to an event handler
                {
                    var data = e.Data;                            // The event handler here only takes the samples data
                    Trace.WriteLine(data.Parameters.First().Key); // and prints some information to the debug console
                    Trace.WriteLine(data.Parameters.Count);
                });

                input.StreamFinished += (sender, e) => Trace.WriteLine("Finished"); // Handle the steam finished event
                return(input);
            });

            if (!pipeline.WaitUntilConnected(TimeSpan.FromMinutes(2), default(CancellationToken))) // Wait until the connection is established
            {
                throw new Exception("Couldn't connect");
            }

            pipeline.WaitUntilFirstStream(TimeSpan.FromMinutes(5), CancellationToken.None); // Wait until the first stream is ready to read.
            pipeline.WaitUntilIdle(TimeSpan.FromMinutes(5), CancellationToken.None);        // Wait for 5 minutes of the pipeline being idle before exit.

            pipeline.Dispose();
        }
Example #12
0
        public void ReadEvents()
        {
            const string brokerList           = "localhost:9092";                                                                   // The host and port where the Kafka broker is running
            const string groupName            = "dev";                                                                              // The dependency group name
            const string topicName            = "events_sample";                                                                    // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the data_in_announce
            var          dependencyServiceUri = new Uri("http://localhost:8180/api/dependencies/");                                 // The URI where the dependency services are running

            var client                   = new KafkaStreamClient(brokerList);                                                       // Create a new KafkaStreamClient for connecting to Kafka broker
            var dataFormatClient         = new DataFormatClient(new HttpDependencyClient(dependencyServiceUri, groupName));         // Create a new DataFormatClient
            var atlasConfigurationClient = new AtlasConfigurationClient(new HttpDependencyClient(dependencyServiceUri, groupName)); // Create a new AtlasConfigurationClient

            var pipeline = client.StreamTopic(topicName).Into(streamId =>                                                           // Stream Kafka topic into the handler method
            {
                var input = new SessionTelemetryDataInput(streamId, dataFormatClient);

                AtlasConfiguration atlasConfiguration = null;

                input.SessionInput.SessionDependenciesChanged += (s, a) =>
                {
                    if (!a.Session.Dependencies.TryGetValue("atlasConfiguration", out var atlasConfigIds))
                    {
                        return;
                    }

                    atlasConfiguration = atlasConfigurationClient.GetAtlasConfiguration(atlasConfigIds[0]); // Taking first atlas configuration for this example
                };

                input.EventsInput.EventsBuffered += (sender, e) => // Subscribe to incoming events
                {
                    if (atlasConfiguration == null)
                    {
                        return;
                    }

                    var events = e.Buffer.GetData(); // read incoming events from buffer

                    // In this sample we consume the incoming events and print it
                    foreach (var ev in events)
                    {
                        var eventDefinition = atlasConfiguration.AppGroups?.First().Value?.Events.GetValueOrDefault(ev.Id);
                        if (eventDefinition == null)
                        {
                            continue;
                        }

                        Console.WriteLine($"- Event: {ev.Id} - {eventDefinition.Description} - Priority: {eventDefinition.Priority.ToString()} - Value: {ev.Values?.First()}");
                    }
                };

                input.StreamFinished += (sender, e) => Trace.WriteLine("Finished"); // Handle the steam finished event
                return(input);
            });

            if (!pipeline.WaitUntilConnected(TimeSpan.FromSeconds(30), CancellationToken.None)) // Wait until the connection is established
            {
                throw new Exception("Couldn't connect");
            }
            pipeline.WaitUntilFirstStream(TimeSpan.FromMinutes(1), CancellationToken.None); // Wait until the first stream is ready to read.
            pipeline.WaitUntilIdle(TimeSpan.FromMinutes(5), CancellationToken.None);        // Wait for 5 minutes of the pipeline being idle before exit.

            pipeline.Dispose();
        }