Esempio n. 1
0
 private static void EncodeFetch(KafkaEncoder encoder, Fetch fetch)
 {
     encoder
     .Write(fetch.PartitionId)
     .Write(fetch.Offset)
     .Write(fetch.MaxBytes);
 }
Esempio n. 2
0
        public void Int16Tests(Int16 number, Byte[] expectedBytes)
        {
            var buffer  = new byte[2];
            var encoder = new KafkaEncoder(buffer);

            encoder.Write(number);
            Assert.That(buffer, Is.EqualTo(expectedBytes));
        }
Esempio n. 3
0
 internal KafkaEncoder EncodeHeader(KafkaEncoder encoder)
 {
     return(encoder
            .Write((Int16)apiKey)
            .Write(ApiVersion)
            .Write(CorrelationId)
            .Write(ClientId));
 }
Esempio n. 4
0
        public void StringTests(String value, Byte[] expectedBytes)
        {
            var buffer  = new byte[expectedBytes.Length];
            var encoder = new KafkaEncoder(buffer);

            encoder.Write(value);
            Assert.That(encoder.Offset, Is.EqualTo(expectedBytes.Length));
            Assert.That(buffer, Is.EqualTo(expectedBytes));
        }
        public void EnsureHeaderShouldPackCorrectByteLengths()
        {
            var result = KafkaEncoder.EncodeRequestBytes(new RequestContext(123456789, clientId: "test"), new ApiVersionsRequest());

            var withoutLength = new byte[result.Length - 4];

            Buffer.BlockCopy(result, 4, withoutLength, 0, result.Length - 4);
            Assert.That(withoutLength.Length, Is.EqualTo(14));
            Assert.That(withoutLength, Is.EqualTo(new byte[] { 0, 18, 0, 0, 7, 91, 205, 21, 0, 4, 116, 101, 115, 116 }));
        }
Esempio n. 6
0
 /// <summary>
 /// Encodes a collection of messages into one byte[].  Encoded in order of list.
 /// </summary>
 /// <param name="messages">The collection of messages to encode together.</param>
 /// <returns>Encoded byte[] representing the collection of messages.</returns>
 internal static KafkaEncoder EncodeMessageSet(KafkaEncoder encoder, IEnumerable <Message> messages)
 {
     foreach (var message in messages)
     {
         encoder.Write(InitialMessageOffset);
         var marker = encoder.PrepareForLength();
         EncodeMessage(message, encoder)
         .WriteLength(marker);
     }
     return(encoder);
 }
        public void EnsureHeaderShouldPackCorrectByteLengths()
        {
            var encoder = new KafkaEncoder(new byte[14]);
            var request = new FetchRequest {
                ClientId = "test", CorrelationId = 123456789
            };

            request.EncodeHeader(encoder);
            Assert.That(encoder.Offset, Is.EqualTo(14));
            Assert.That(encoder.Buffer, Is.EqualTo(new byte[] { 0, 1, 0, 0, 7, 91, 205, 21, 0, 4, 116, 101, 115, 116 }));
        }
Esempio n. 8
0
 /// <summary>
 /// Encodes a collection of messages into one byte[].  Encoded in order of list.
 /// </summary>
 /// <param name="messages">The collection of messages to encode together.</param>
 /// <returns>Encoded byte[] representing the collection of messages.</returns>
 internal static KafkaEncoder EncodeMessageSet(KafkaEncoder encoder, IEnumerable<Message> messages)
 {
     foreach (var message in messages)
     {
         encoder.Write(InitialMessageOffset);
         var marker = encoder.PrepareForLength();
         EncodeMessage(message, encoder)
             .WriteLength(marker);
     }
     return encoder;
 }
Esempio n. 9
0
        private static void EncodeCommit(KafkaEncoder encoder, int apiVersion, OffsetCommit commit)
        {
            encoder
            .Write(commit.PartitionId)
            .Write(commit.Offset);

            if (apiVersion == 1)
            {
                encoder.Write(commit.TimeStamp);
            }
            encoder.Write(commit.Metadata);
        }
Esempio n. 10
0
        private static KafkaEncoder EncodeOffsetRequest(OffsetRequest request, KafkaEncoder encoder)
        {
            request
            .EncodeHeader(encoder)
            .Write(ReplicaId);

            if (request.Offsets == null)
            {
                encoder.Write(0);
            }
            else if (request.Offsets.Count == 1)
            {
                // shortcut the single request
                var offset = request.Offsets[0];
                encoder
                .Write(1)
                .Write(offset.Topic)
                .Write(1)
                .Write(offset.PartitionId)
                .Write(offset.Time)
                .Write(offset.MaxOffsets);
            }
            else
            {
                // Full request
                var topicGroups = new Dictionary <string, List <Offset> >();
                foreach (var offset in request.Offsets)
                {
                    var offsets = topicGroups.GetOrCreate(offset.Topic, () => new List <Offset>(request.Offsets.Count));
                    offsets.Add(offset);
                }

                encoder.Write(topicGroups.Count);
                foreach (var kvp in topicGroups)
                {
                    var topic   = kvp.Key;
                    var offsets = kvp.Value;

                    encoder
                    .Write(topic)
                    .Write(offsets.Count);

                    foreach (var offset in offsets)
                    {
                        encoder
                        .Write(offset.PartitionId)
                        .Write(offset.Time)
                        .Write(offset.MaxOffsets);
                    }
                }
            }
            return(encoder);
        }
Esempio n. 11
0
        /// <summary>
        /// Encodes a message object to byte[]
        /// </summary>
        /// <param name="message">Message data to encode.</param>
        /// <returns>Encoded byte[] representation of the message object.</returns>
        /// <remarks>
        /// Format:
        /// Crc (Int32), MagicByte (Byte), Attribute (Byte), Key (Byte[]), Value (Byte[])
        /// </remarks>
        internal static KafkaEncoder EncodeMessage(Message message, KafkaEncoder encoder)
        {
            var marker = encoder.PrepareForCrc();

            encoder
            .Write(message.MagicNumber)
            .Write(message.Attribute)
            .Write(message.Key)
            .Write(message.Value)
            .CalculateCrc(marker);

            return(encoder);
        }
Esempio n. 12
0
        private static KafkaEncoder EncodeFetchRequest(FetchRequest request, KafkaEncoder encoder)
        {
            request
            .EncodeHeader(encoder)
            .Write(ReplicaId)
            .Write(request.MaxWaitTime)
            .Write(request.MinBytes);

            if (request.Fetches == null)
            {
                // no topics
                encoder.Write(0);
            }
            else if (request.Fetches.Count == 1)
            {
                // single topic/partition - quick mode
                var fetch = request.Fetches[0];
                encoder
                .Write(1)
                .Write(fetch.Topic)
                .Write(1);

                EncodeFetch(encoder, fetch);
            }
            else
            {
                // Multiple topics/partitions - slower mode
                var topicGroups = new Dictionary <string, List <Fetch> >();
                foreach (var fetch in request.Fetches)
                {
                    var fetchList = topicGroups.GetOrCreate(fetch.Topic, () => new List <Fetch>(request.Fetches.Count));
                    fetchList.Add(fetch);
                }

                encoder.Write(topicGroups.Count);
                foreach (var topicGroupKvp in topicGroups)
                {
                    var topicGroup = topicGroupKvp.Key;
                    var fetches    = topicGroupKvp.Value;
                    encoder
                    .Write(topicGroup)
                    .Write(fetches.Count);
                    foreach (var fetch in fetches)
                    {
                        EncodeFetch(encoder, fetch);
                    }
                }
            }
            return(encoder);
        }
Esempio n. 13
0
        private static KafkaEncoder EncodeProduceRequest(ProduceRequest request, KafkaEncoder encoder)
        {
            request.EncodeHeader(encoder)
            .Write(request.Acks)
            .Write(request.TimeoutMS);

            if (request.Payload == null)
            {
                encoder.Write(0);
            }
            else if (request.Payload.Count == 1)
            {
                // Short cut single request
                var payload = request.Payload[0];
                encoder
                .Write(1)
                .Write(payload.Topic)
                .Write(1);

                WritePayload(encoder, payload);
            }
            else
            {
                // More complex
                var topicGroups = new Dictionary <string, List <Payload> >();
                foreach (var payload in request.Payload)
                {
                    var payloads = topicGroups.GetOrCreate(payload.Topic, () => new List <Payload>(request.Payload.Count));
                    payloads.Add(payload);
                }

                encoder.Write(topicGroups.Count);
                foreach (var kvp in topicGroups)
                {
                    var topic    = kvp.Key;
                    var payloads = kvp.Value;

                    encoder
                    .Write(topic)
                    .Write(payloads.Count);

                    foreach (var payload in payloads)
                    {
                        WritePayload(encoder, payload);
                    }
                }
            }
            return(encoder);
        }
        public void DecodeMessageShouldThrowWhenCrcFails()
        {
            Assert.Throws(Is.TypeOf <FailCrcCheckException>(), () =>
            {
                var testMessage = new Message(value: "kafka test message.", key: "test");
                var buffer      = new byte[1024];
                var encoder     = new KafkaEncoder(buffer);

                Message.EncodeMessage(testMessage, encoder);
                buffer[0] += 1;

                var decoder = new KafkaDecoder(buffer, 0, encoder.Offset);
                var result  = Message.DecodeMessage(0, 0, decoder, encoder.Offset);
            });
        }
        public void EnsureMessageEncodeAndDecodeAreCompatible(string key, string value)
        {
            var testMessage = new Message(key: key, value: value);

            var buffer  = new byte[1024];
            var encoder = new KafkaEncoder(buffer);

            Message.EncodeMessage(testMessage, encoder);

            var decoder = new KafkaDecoder(buffer);
            var result  = Message.DecodeMessage(0, 0, decoder, encoder.Offset);

            Assert.That(testMessage.Key, Is.EqualTo(result.Key));
            Assert.That(testMessage.Value, Is.EqualTo(result.Value));
        }
Esempio n. 16
0
        /// <summary>
        /// Send kafka payload to server and receive a task event when response is received.
        /// </summary>
        /// <typeparam name="T">A Kafka response object return by decode function.</typeparam>
        /// <param name="request">The IRequest to send to the kafka servers.</param>
        /// <param name="context">The context for the request.</param>
        /// <param name="token">Cancellation token used to cancel the transfer.</param>
        /// <returns></returns>
        public async Task <T> SendAsync <T>(IRequest <T> request, CancellationToken token, IRequestContext context = null) where T : class, IResponse
        {
            var version = context?.ApiVersion;

            if (!version.HasValue)
            {
                version = await GetVersionAsync(request.ApiKey, token).ConfigureAwait(false);
            }
            context = new RequestContext(NextCorrelationId(), version, context?.ClientId, context?.Encoders ?? _configuration.Encoders, context?.ProtocolType ?? request.ProtocolType, context?.OnProduceRequestMessages ?? _configuration.OnProduceRequestMessages);

            var payload = KafkaEncoder.Encode(context, request);

            _log.Info(() => LogEvent.Create($"Sending {request.ApiKey} with correlation id {context.CorrelationId} (v {version.GetValueOrDefault()}, {payload.Buffer.Length} bytes) to {Endpoint}"));
            _log.Debug(() => LogEvent.Create($"-----> {request.ApiKey} with correlation id {context.CorrelationId} to {Endpoint}\n{request.ToFormattedString()}"));
            if (!request.ExpectResponse)
            {
                await _socket.WriteAsync(payload, token).ConfigureAwait(false);

                return(default(T));
            }

            using (var asyncRequest = new AsyncRequestItem(context.CorrelationId, request.ApiKey, _configuration.RequestTimeout)) {
                try {
                    AddAsyncRequestItemToResponseQueue(asyncRequest);
                    ExceptionDispatchInfo exceptionDispatchInfo = null;

                    try {
                        await _socket.WriteAsync(payload, token).ConfigureAwait(false);
                    } catch (Exception ex) {
                        exceptionDispatchInfo = ExceptionDispatchInfo.Capture(ex);
                    }

                    asyncRequest.MarkRequestAsSent(exceptionDispatchInfo, TriggerMessageTimeout);
                } catch (OperationCanceledException) {
                    TriggerMessageTimeout(asyncRequest);
                }

                var response = await asyncRequest.ReceiveTask.Task.ThrowIfCancellationRequested(token).ConfigureAwait(false);

                _log.Info(() => LogEvent.Create($"Receiving {request.ApiKey} with correlation id {context.CorrelationId} (v {version.GetValueOrDefault()}, {response.Length} bytes) from {Endpoint}"));
                var result = KafkaEncoder.Decode <T>(context, response);
                _log.Debug(() => LogEvent.Create($"<------- {request.ApiKey} with correlation id {context.CorrelationId} from {Endpoint}\n{result.ToFormattedString()}"));
                return(result);
            }
        }
Esempio n. 17
0
        private static void WritePayload(KafkaEncoder encoder, Payload payload)
        {
            encoder
            .Write(payload.Partition);

            var marker = encoder.PrepareForLength();

            switch (payload.Codec)
            {
            case MessageCodec.CodecNone:
                Message.EncodeMessageSet(encoder, payload.Messages);
                break;

            default:
                throw new NotSupportedException(string.Format("Codec type of {0} is not supported.", payload.Codec));
            }
            encoder.WriteLength(marker);
        }
        public void WhenMessageIsTruncatedThenBufferUnderRunExceptionIsThrown()
        {
            Assert.Throws <BufferUnderRunException>(() =>
            {
                // arrange
                var offset       = (Int64)0;
                var message      = new Byte[] { };
                var messageSize  = 5;
                var payloadBytes = new byte[16];
                var encoder      = new KafkaEncoder(payloadBytes);
                encoder.Write(offset);
                encoder.Write(messageSize);
                encoder.Write(message);

                var decoder = new KafkaDecoder(payloadBytes);

                Message.DecodeMessageSet(0, decoder, payloadBytes.Length);
            });
        }
Esempio n. 19
0
        private static KafkaEncoder EncodeMetadataRequest(MetadataRequest request, KafkaEncoder encoder)
        {
            request
            .EncodeHeader(encoder);

            if (request.Topics == null)
            {
                encoder.Write(0);
            }
            else
            {
                encoder.Write(request.Topics.Count);
                foreach (var topic in request.Topics)
                {
                    encoder.Write(topic);
                }
            }

            return(encoder);
        }
Esempio n. 20
0
        public static void AssertCanEncodeDecodeResponse <T>(this T response, short version, IProtocolTypeEncoder encoder = null) where T : class, IResponse
        {
            var encoders = ImmutableDictionary <string, IProtocolTypeEncoder> .Empty;

            if (encoder != null)
            {
                encoders = encoders.Add(encoder.Type, encoder);
            }

            var context = new RequestContext(16, version, "Test-Response", encoders, encoder?.Type);
            var data    = KafkaDecoder.EncodeResponseBytes(context, response);
            var decoded = KafkaEncoder.Decode <T>(context, data, true);

            if (!response.Equals(decoded))
            {
                var original = response.ToFormattedString();
                var final    = decoded.ToFormattedString();
                Console.WriteLine($"Original\n{original}\nFinal\n{final}");
                Assert.That(final, Is.EqualTo(original));
                Assert.Fail("Not equal, although strings suggest they are?");
            }
        }
        public void EncodeMessageSetEncodesMultipleMessages()
        {
            //expected generated from python library
            var expected = new byte[]
            {
                0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 45, 70, 24, 62, 0, 0, 0, 0, 0, 1, 49, 0, 0, 0, 1, 48, 0, 0, 0,
                0, 0, 0, 0, 0, 0, 0, 0, 16, 90, 65, 40, 168, 0, 0, 0, 0, 0, 1, 49, 0, 0, 0, 1, 49, 0, 0, 0, 0, 0, 0,
                0, 0, 0, 0, 0, 16, 195, 72, 121, 18, 0, 0, 0, 0, 0, 1, 49, 0, 0, 0, 1, 50
            };

            var messages = new[]
            {
                new Message("0", "1"),
                new Message("1", "1"),
                new Message("2", "1")
            };

            var buffer  = new byte[expected.Length];
            var encoder = new KafkaEncoder(buffer);

            Message.EncodeMessageSet(encoder, messages);

            Assert.That(buffer, Is.EqualTo(expected));
        }
Esempio n. 22
0
 internal override KafkaEncoder Encode(KafkaEncoder encoder)
 {
     return(EncodeFetchRequest(this, encoder));
 }
Esempio n. 23
0
        /// <summary>
        /// Encodes a message object to byte[]
        /// </summary>
        /// <param name="message">Message data to encode.</param>
        /// <returns>Encoded byte[] representation of the message object.</returns>
        /// <remarks>
        /// Format:
        /// Crc (Int32), MagicByte (Byte), Attribute (Byte), Key (Byte[]), Value (Byte[])
        /// </remarks>
        internal static KafkaEncoder EncodeMessage(Message message, KafkaEncoder encoder)
        {
            var marker = encoder.PrepareForCrc();
            encoder
                .Write(message.MagicNumber)
                .Write(message.Attribute)
                .Write(message.Key)
                .Write(message.Value)
                .CalculateCrc(marker);

            return encoder;
        }
Esempio n. 24
0
 /// <summary>
 /// Encode this request into the Kafka wire protocol.
 /// </summary>
 /// <param name="encoder">Encoder to use</param>
 internal abstract KafkaEncoder Encode(KafkaEncoder encoder);
        private static KafkaEncoder EncodeOffsetFetchRequest(OffsetFetchRequest request, KafkaEncoder encoder)
        {
            request
            .EncodeHeader(encoder)
            .Write(request.ConsumerGroup);

            if (request.Topics == null)
            {
                // nothing to fetch
                encoder.Write(0);
            }
            else if (request.Topics.Count == 1)
            {
                // Short cut single instance request
                var fetch = request.Topics[0];
                encoder
                .Write(1)
                .Write(fetch.Topic)
                .Write(1)
                .Write(fetch.PartitionId);
            }
            else
            {
                // more complex
                var topicGroups = new Dictionary <string, List <int> >();
                foreach (var fetch in request.Topics)
                {
                    var partitions = topicGroups.GetOrCreate(fetch.Topic, () => new List <int>(request.Topics.Count));
                    partitions.Add(fetch.PartitionId);
                }

                encoder.Write(topicGroups.Count);
                foreach (var kvp in topicGroups)
                {
                    var topic      = kvp.Key;
                    var partitions = kvp.Value;
                    encoder
                    .Write(topic)
                    .Write(partitions.Count);
                    foreach (var fetch in partitions)
                    {
                        encoder.Write(fetch);
                    }
                }
            }

            return(encoder);
        }
Esempio n. 26
0
 private static KafkaEncoder EncodeConsumerMetadataRequest(ConsumerMetadataRequest request, KafkaEncoder encoder)
 {
     return
         (request
          .EncodeHeader(encoder)
          .Write(request.ConsumerGroup));
 }
Esempio n. 27
0
 internal override KafkaEncoder Encode(KafkaEncoder encoder)
 {
     return(EncodeConsumerMetadataRequest(this, encoder));
 }
Esempio n. 28
0
 internal override KafkaEncoder Encode(KafkaEncoder encoder)
 {
     return(EncodeProduceRequest(this, encoder));
 }
Esempio n. 29
0
        public void MetadataResponseShouldDecode()
        {
            var response = KafkaEncoder.Decode <MetadataResponse>(new RequestContext(1), MessageHelper.CreateMetadataResponse(1, "Test").Skip(4).ToArray());

            Assert.That(response.Topics[0].TopicName, Is.EqualTo("Test"));
        }
Esempio n. 30
0
        private static KafkaEncoder EncodeOffsetCommitRequest(OffsetCommitRequest request, KafkaEncoder encoder)
        {
            request
            .EncodeHeader(encoder)
            .Write(request.ConsumerGroup);

            if (request.ApiVersion == 1)
            {
                encoder
                .Write(request.ConsumerGroupGenerationId)
                .Write(request.ConsumerId);
            }

            if (request.OffsetCommits == null)
            {
                // Nothing to commit
                encoder.Write(0);
            }
            else if (request.OffsetCommits.Count == 1)
            {
                var commit = request.OffsetCommits[0];
                // Shortcut the single version
                encoder
                .Write(1)
                .Write(commit.Topic)
                .Write(1);

                EncodeCommit(encoder, request.ApiVersion, commit);
            }
            else
            {
                // Complete complex request
                var topicGroups = new Dictionary <string, List <OffsetCommit> >();
                foreach (var commit in request.OffsetCommits)
                {
                    var topicGroup = topicGroups.GetOrCreate(commit.Topic, () => new List <OffsetCommit>(request.OffsetCommits.Count));
                    topicGroup.Add(commit);
                }

                encoder.Write(topicGroups.Count);
                foreach (var topicGroupKvp in topicGroups)
                {
                    var topic   = topicGroupKvp.Key;
                    var commits = topicGroupKvp.Value;
                    encoder
                    .Write(topic)
                    .Write(commits.Count);

                    foreach (var commit in commits)
                    {
                        EncodeCommit(encoder, request.ApiVersion, commit);
                    }
                }
            }
            return(encoder);
        }
Esempio n. 31
0
 internal override KafkaEncoder Encode(KafkaEncoder encoder)
 {
     return(EncodeOffsetCommitRequest(this, encoder));
 }