Exemple #1
0
        /// <summary>
        /// Parse the response bytes.
        /// </summary>
        /// <param name="response">The bytes of the response.</param>
        public override void Parse(byte[] response)
        {
            using (var stream = new MemoryStream(response))
            {
                var reader = new BinaryReader(stream);

                var errorCodeBytes = new byte[2];
                reader.Read(errorCodeBytes, 0, 2);
                this.ErrorCode = KafkaProtocolPrimitiveType.GetInt16(errorCodeBytes);

                var coordinatorIdBytes = new byte[4];
                reader.Read(coordinatorIdBytes, 0, 4);
                this.CoordinatorId = KafkaProtocolPrimitiveType.GetInt32(coordinatorIdBytes);

                var coordinatorHostSizeBytes = new byte[2];
                reader.Read(coordinatorHostSizeBytes, 0, 2);
                var coordinatorHostSize  = KafkaProtocolPrimitiveType.GetInt16(coordinatorHostSizeBytes);
                var coordinatorHostBytes = new byte[coordinatorHostSize];
                reader.Read(coordinatorHostBytes, 0, coordinatorHostSize);
                this.CoordinatorHost = Encoding.UTF8.GetString(coordinatorHostBytes);

                var coordinatorPortBytes = new byte[4];
                reader.Read(coordinatorPortBytes, 0, 4);
                this.CoordinatorPort = KafkaProtocolPrimitiveType.GetInt32(coordinatorPortBytes);
            }
        }
        /// <summary>
        /// Parse the response bytes.
        /// </summary>
        /// <param name="response">The bytes of the response.</param>
        public override void Parse(byte[] response)
        {
            //---------------------------------------------------\\
            // OffsetResponse => ReplicaId [TopicName[PartitionOffsets]]
            //  PartitionOffsets => Partition ErrorCode[Offset]
            //  Partition => int32
            //  ErrorCode => int16
            //  Offset => int64
            //---------------------------------------------------\\

            using (var stream = new MemoryStream(response))
            {
                var reader = new BinaryReader(stream);

                // size of topic array
                var topicInfoSizeBytes = new byte[4];
                reader.Read(topicInfoSizeBytes, 0, 4);
                var numberOfTopicInfos = KafkaProtocolPrimitiveType.GetInt32(topicInfoSizeBytes);

                this.TopicInfos = new KafkaProtocolOffsetListResponseTopicInfo[numberOfTopicInfos];
                for (int i = 0; i < numberOfTopicInfos; i++)
                {
                    this.TopicInfos[i] = new KafkaProtocolOffsetListResponseTopicInfo();

                    // topic name
                    var topicNameSizeBytes = new byte[2];
                    reader.Read(topicNameSizeBytes, 0, 2);
                    var topicNameSize  = KafkaProtocolPrimitiveType.GetInt16(topicNameSizeBytes);
                    var topicNameBytes = new byte[topicNameSize];
                    reader.Read(topicNameBytes, 0, topicNameSize);
                    this.TopicInfos[i].TopicName = Encoding.UTF8.GetString(topicNameBytes);

                    // size of partition offset array
                    var partitionInfoSizeBytes = new byte[4];
                    reader.Read(partitionInfoSizeBytes, 0, 4);
                    var partitionInfoSize = KafkaProtocolPrimitiveType.GetInt32(partitionInfoSizeBytes);
                    this.TopicInfos[i].PartitionInfos = new KafkaProtocolOffsetListResponsePartitionInfo[partitionInfoSize];
                    for (int j = 0; j < partitionInfoSize; j++)
                    {
                        this.TopicInfos[i].PartitionInfos[j] = new KafkaProtocolOffsetListResponsePartitionInfo();

                        // partition id
                        var partitionIdBytes = new byte[4];
                        reader.Read(partitionIdBytes, 0, 4);
                        this.TopicInfos[i].PartitionInfos[j].Partition = KafkaProtocolPrimitiveType.GetInt32(partitionIdBytes);

                        // error code
                        var errorCodeBytes = new byte[2];
                        reader.Read(errorCodeBytes, 0, 2);
                        this.TopicInfos[i].PartitionInfos[j].ErrorCode = KafkaProtocolPrimitiveType.GetInt16(errorCodeBytes);

                        // size of offset array
                        var offsetSizeBytes = new byte[4];
                        reader.Read(offsetSizeBytes, 0, 4);
                        var offsetSize = KafkaProtocolPrimitiveType.GetInt32(offsetSizeBytes);
                        this.TopicInfos[i].PartitionInfos[j].Offset = new long[offsetSize];
                        for (int k = 0; k < offsetSize; k++)
                        {
                            var offsetBytes = new byte[8];
                            reader.Read(offsetBytes, 0, 8);
                            this.TopicInfos[i].PartitionInfos[j].Offset[k] = KafkaProtocolPrimitiveType.GetInt64(offsetBytes);
                        }
                    }
                }
            }
        }
Exemple #3
0
        /// <summary>
        /// Parse the response bytes.
        /// </summary>
        /// <param name="response">The bytes of the response.</param>
        public override void Parse(byte[] response)
        {
            using (var stream = new MemoryStream(response))
            {
                var reader = new BinaryReader(stream);

                var topicInfoSizeBytes = new byte[4];
                reader.Read(topicInfoSizeBytes, 0, 4);
                var numOfTopicInfo = KafkaProtocolPrimitiveType.GetInt32(topicInfoSizeBytes);
                this.TopicInfos = new KafkaProtocolOffsetFetchResponseTopicInfo[numOfTopicInfo];
                for (int i = 0; i < numOfTopicInfo; i++)
                {
                    this.TopicInfos[i] = new KafkaProtocolOffsetFetchResponseTopicInfo();

                    // topic name
                    var topicNameSizeBytes = new byte[2];
                    reader.Read(topicNameSizeBytes, 0, 2);
                    var topicNameSize  = KafkaProtocolPrimitiveType.GetInt16(topicNameSizeBytes);
                    var topicNameBytes = new byte[topicNameSize];
                    reader.Read(topicNameBytes, 0, topicNameSize);
                    var topicName = Encoding.UTF8.GetString(topicNameBytes);
                    this.TopicInfos[i].TopicName = topicName;

                    // [partition info]
                    var partitionInfoSizeBytes = new byte[4];
                    reader.Read(partitionInfoSizeBytes, 0, 4);
                    var numOfPartitionInfos = KafkaProtocolPrimitiveType.GetInt32(partitionInfoSizeBytes);
                    this.TopicInfos[i].PartitionInfos = new KafkaProtocolOffsetFetchResponsePartitionInfo[numOfPartitionInfos];
                    for (int j = 0; j < numOfPartitionInfos; j++)
                    {
                        this.TopicInfos[i].PartitionInfos[j] = new KafkaProtocolOffsetFetchResponsePartitionInfo();

                        // partition id
                        var partitionBytes = new byte[4];
                        reader.Read(partitionBytes, 0, 4);
                        var partitionId = KafkaProtocolPrimitiveType.GetInt32(partitionBytes);
                        this.TopicInfos[i].PartitionInfos[j].Partition = partitionId;

                        // offset
                        var offsetBytes = new byte[8];
                        reader.Read(offsetBytes, 0, 8);
                        var offset = KafkaProtocolPrimitiveType.GetInt64(offsetBytes);
                        this.TopicInfos[i].PartitionInfos[j].Offset = offset;

                        // metadata
                        var metadataSizeBytes = new byte[2];
                        reader.Read(metadataSizeBytes, 0, 2);
                        var metadataSize  = KafkaProtocolPrimitiveType.GetInt16(metadataSizeBytes);
                        var metadataBytes = new byte[metadataSize];
                        reader.Read(metadataBytes, 0, metadataSize);
                        var metadata = Encoding.UTF8.GetString(metadataBytes);
                        this.TopicInfos[i].PartitionInfos[j].Metadata = metadata;

                        // error code
                        var errorCodeBytes = new byte[2];
                        reader.Read(errorCodeBytes, 0, 2);
                        var errorCode = KafkaProtocolPrimitiveType.GetInt16(errorCodeBytes);
                        this.TopicInfos[i].PartitionInfos[j].ErrorCode = errorCode;
                    }
                }
            }
        }
        /// <summary>
        /// Send the request to kafka and wait for the response.
        /// </summary>
        /// <param name="request">The request.</param>
        /// <returns>The response from kafka.</returns>
        public KafkaProtocolResponse Send(KafkaProtocolRequest request)
        {
            if (request == null)
            {
                return(null);
            }

            this.ConnectToBroker();

            var packet = request.Packet;

            if (packet != null && packet.Length > 0)
            {
                this.tcpStream.Write(packet, 0, packet.Length);
                this.tcpStream.Flush();
            }

            // read the response
            var bytes = new List <byte>();

            ////StringBuilder responseText = new StringBuilder();
            if (this.tcpStream.CanRead)
            {
                var buffer    = new byte[1024];
                var bytesRead = 0;

                do
                {
                    bytesRead = this.tcpStream.Read(buffer, 0, buffer.Length);
                    for (int i = 0; i < bytesRead; i++)
                    {
                        bytes.Add(buffer[i]);
                    }

                    // the data might not ready on server, so give server some time
                    Thread.Sleep(5);
                }while (this.tcpStream.DataAvailable && bytesRead > 0);
            }

            byte[] responseBytes = bytes.ToArray();
            if (responseBytes == null || responseBytes.Length < 4)
            {
                return(null);
            }

            // size
            var bytesOfSize = new byte[4];

            Array.Copy(responseBytes, bytesOfSize, 4);
            var responseSize = KafkaProtocolPrimitiveType.GetInt32(bytesOfSize);

            if (responseSize <= 0 || responseBytes.Length - 4 < responseSize)
            {
                return(null);
            }

            // correlationId
            var correlationIdBytes = new byte[4];

            Array.Copy(responseBytes, 4, correlationIdBytes, 0, 4);
            var correlationId = KafkaProtocolPrimitiveType.GetInt32(correlationIdBytes);

            if (correlationId != request.CorrelationId)
            {
                // this is not the response to the request
                return(null);
            }

            var data = new byte[responseSize - 4];

            Array.Copy(responseBytes, 8, data, 0, data.Length);

            KafkaProtocolResponse response = null;

            switch (request.ApiKey)
            {
            case KafkaProtocolApiKey.ProduceRequest:
                break;

            case KafkaProtocolApiKey.FetchRequest:
                break;

            case KafkaProtocolApiKey.OffsetRequest:
                break;

            case KafkaProtocolApiKey.MetadataRequest:
                response = new KafkaProtocolTopicMetadataResponse();
                break;

            case KafkaProtocolApiKey.LeaderAndIsrRequest:
                break;

            case KafkaProtocolApiKey.StopReplicaRequest:
                break;

            case KafkaProtocolApiKey.UpdateMetadataRequest:
                break;

            case KafkaProtocolApiKey.ControlledShutdownRequest:
                break;

            case KafkaProtocolApiKey.OffsetCommitRequest:
                break;

            case KafkaProtocolApiKey.OffsetFetchRequest:
                break;

            case KafkaProtocolApiKey.GroupCoordinatorRequest:
                break;

            case KafkaProtocolApiKey.JoinGroupRequest:
                break;

            case KafkaProtocolApiKey.HeartbeatRequest:
                break;

            case KafkaProtocolApiKey.LeaveGroupRequest:
                break;

            case KafkaProtocolApiKey.SyncGroupRequest:
                break;

            case KafkaProtocolApiKey.DescribeGroupsRequest:
                break;

            case KafkaProtocolApiKey.ListGroupsRequest:
                response = new KafkaProtocolConsumerListGroupResponse();
                break;

            case KafkaProtocolApiKey.SaslHandshakeRequest:
                break;

            case KafkaProtocolApiKey.ApiVersionsRequest:
                break;

            case KafkaProtocolApiKey.CreateTopicsRequest:
                break;

            case KafkaProtocolApiKey.DeleteTopicsRequest:
                break;
            }

            if (response != null)
            {
                response.Parse(data);
            }

            return(response);
        }