Beispiel #1
0
        public static void ReadAndLink()
        {
            var          dependencyUri = new Uri("http://localhost:8180/api/dependencies/");                                                          // The URI where the dependency services are running
            const string brokerList    = "localhost:9092";                                                                                            // The host and port where the Kafka broker is running
            const string groupName     = "dev";                                                                                                       // The group name
            const string readTopicName = "sample_in";                                                                                                 // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the sample_in_announce
            const string linkTopicName = "sample_out";                                                                                                // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the sample_out_announce
            var          readAdapter   = new KafkaStreamAdapter(brokerList, "readConsumerGroup");                                                     // The steam adapter used to manage streams and topics in the broker. Consumer group must be unique for each stream.
            var          writeAdapter  = new KafkaStreamAdapter(brokerList, "writeConsumerGroup");                                                    // The steam adapter used to manage streams and topics in the broker. Consumer group must be unique for each stream.
            var          stream        = readAdapter.OpenStreamTopic(readTopicName);                                                                  // Open the topic for streaming.

            using (var reader = new Reader(dependencyUri, groupName, stream))                                                                         // Create a Reader to read from the stream
            {
                using (var outputTopic = writeAdapter.OpenOutputTopic(linkTopicName))                                                                 // Open the output topic, where you want to link the streamed input
                {
                    using (var writer = new Writer(dependencyUri, AtlasConfiguration, GetDataFormat(), groupName, outputTopic))                       // Create a Writer for the output topic and pass
                    {
                        const string outputFeedName = "";                                                                                             // As sample DataFormat uses default feed, we will leave this empty.
                        using (IStreamPipeline pipeline = reader.ReadAndLinkTSamples(GetParameterIds(), Models.TraceSamples, writer, outputFeedName)) // TelemetryDataHandler parameter can be used to handle the data read from the stream.
                        {
                            Thread.Sleep(5000);                                                                                                       // NOTE: without this doesn't seem to work
                            Write();                                                                                                                  //Write some data to have something to read while connection is open.
                        }
                    }
                }
            }
        }
        public static void Write()
        {
            var          dependencyUri = new Uri("http://localhost:8180/api/dependencies/");                           // The URI where the dependency services are running
            const string brokerList    = "localhost:9092";                                                             // The host and port where the Kafka broker is running
            const string groupName     = "dev";                                                                        // The group name
            const string topicName     = "sample_in";                                                                  // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the sample_in_announce
            var          streamAdapter = new KafkaStreamAdapter(brokerList, "writeConsumerGroup");                     // The steam adapter used to manage streams and topics in the broker. Consumer group must be unique for each stream.

            using (var topicOutput = streamAdapter.OpenOutputTopic(topicName))                                         // Open a KafkaOutputTopic
            {
                var dataFormat = GetDataFormat(FeedNamFrequencyAndParameterList);                                      // Get the data format for our feed, frequency and parameterId pairs
                var feedNames  = dataFormat.Feeds.Keys;                                                                // Feed names must be the same that we just added above

                using (var writer = new Writer(dependencyUri, AtlasConfiguration, dataFormat, groupName, topicOutput)) // Create a Writer object that is used to manage the session and write data
                {
                    writer.OpenSession("sample_" + DateTime.Now);                                                      // The session must be opened before Write

                    for (var i = 0; i < 5; i++)                                                                        // Write 5 times within the session, but different data.
                    {
                        var generatedData = GenerateData(100, (DateTime)writer.SessionStart);                          // Generate some TelemetryData
                        foreach (var feedName in feedNames)
                        {
                            writer.Write(feedName, generatedData); // Write the TelemetryData to each feed
                        }
                    }
                    writer.CloseSession(); // The session must be closed after writing, otherwise it would be marked as Truncated, which is used as an error flag.
                }
            }
        }
Beispiel #3
0
        public static void Write()
        {
            var          dependencyUri = new Uri("http://localhost:8180/api/dependencies/");                                // The URI where the dependency services are running
            const string brokerList    = "localhost:9092";                                                                  // The host and port where the Kafka broker is running
            const string groupName     = "dev";                                                                             // The group name
            const string topicName     = "sample_in";                                                                       // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the sample_in_announce
            var          streamAdapter = new KafkaStreamAdapter(brokerList, "writeConsumerGroup");                          // The steam adapter used to manage streams and topics in the broker. Consumer group must be unique for each stream.

            using (var topicOutput = streamAdapter.OpenOutputTopic(topicName))                                              // Open a KafkaOutputTopic
            {
                using (var writer = new Writer(dependencyUri, AtlasConfiguration, GetDataFormat(), groupName, topicOutput)) // Create a Writer object that is used to manage the session and write data
                {
                    const string feedName = "";                                                                             // As sample DataFormat uses default feed, we will leave this empty.
                    writer.OpenSession("sample_" + DateTime.Now);                                                           // The session must be opened before Write

                    for (var i = 0; i < 5; i++)                                                                             // Write 5 times within the session, but different data.
                    {
                        if (writer.SessionStart == null)
                        {
                            continue;
                        }

                        var generatedData = GenerateData(100, (DateTime)writer.SessionStart); // Generate some TelemetryData
                        writer.Write(feedName, generatedData);                                // Write the TelemetryData to the given feed (default "" in this case)
                    }
                    writer.CloseSession();                                                    // The session must be closed after writing, otherwise it would be marked as Truncated, which is used as an error flag.
                }
            }
        }
Beispiel #4
0
        public static void Read()
        {
            var          dependencyUri = new Uri("http://localhost:8180/api/dependencies/");       // The URI where the dependency services are running
            const string brokerList    = "localhost:9092";                                         // The host and port where the Kafka broker is running
            const string groupName     = "dev";                                                    // The group name
            const string topicName     = "sample_in";                                              // The existing topic's name in the Kafka broker. The *_announce topic name must exist too. In this case the sample_in_announce
            var          streamAdapter = new KafkaStreamAdapter(brokerList, "writeConsumerGroup"); // The steam adapter used to manage streams and topics in the broker. Consumer group must be unique for each stream.
            var          stream        = streamAdapter.OpenStreamTopic(topicName);                 // Open the topic for streaming.

            using (var reader = new Reader(dependencyUri, groupName, stream))                      // Create a Reader to read from the stream
            {
                using (var pipeline = reader.ReadTSamples(GetParameterIds(), Models.TraceSamples)) // TelemetryDataHandler parameter can be used to handle the data read from the stream.
                {
                    Write();                                                                       //Write some data to have something to read while connection is open.
                }
            }
        }