Example #1
0
        public void When_String_Value_Type_Is_Set_Should_Create_String_Listener()
        {
            var attribute = new KafkaTriggerAttribute("brokers:9092", "myTopic")
            {
                ValueType = typeof(string),
            };

            var executor       = new Mock <ITriggeredFunctionExecutor>();
            var listenerConfig = new KafkaListenerConfiguration()
            {
                BrokerList    = attribute.BrokerList,
                Topic         = attribute.Topic,
                ConsumerGroup = "group1",
            };

            var listener = KafkaListenerFactory.CreateFor(
                attribute,
                typeof(KafkaEventData),
                executor.Object,
                true,
                new KafkaOptions(),
                listenerConfig,
                NullLogger.Instance);

            Assert.NotNull(listener);
            Assert.IsType <KafkaListener <Ignore, string> >(listener);
            var typedListener = (KafkaListener <Ignore, string>)listener;

            Assert.Null(typedListener.ValueDeserializer);
        }
Example #2
0
        public void When_Avro_Schema_Is_Provided_Should_Create_GenericRecord_Listener()
        {
            var attribute = new KafkaTriggerAttribute("brokers:9092", "myTopic")
            {
                AvroSchema = "fakeAvroSchema"
            };

            var executor       = new Mock <ITriggeredFunctionExecutor>();
            var listenerConfig = new KafkaListenerConfiguration()
            {
                BrokerList    = attribute.BrokerList,
                Topic         = attribute.Topic,
                ConsumerGroup = "group1",
            };

            var listener = KafkaListenerFactory.CreateFor(
                attribute,
                typeof(KafkaEventData),
                executor.Object,
                true,
                new KafkaOptions(),
                listenerConfig,
                NullLogger.Instance);

            Assert.NotNull(listener);
            Assert.IsType <KafkaListener <Ignore, GenericRecord> >(listener);
            var typedListener = (KafkaListener <Ignore, GenericRecord>)listener;

            Assert.NotNull(typedListener.ValueDeserializer);
            Assert.IsType <AvroDeserializer <GenericRecord> >(typedListener.ValueDeserializer);
        }
Example #3
0
        public async Task When_Options_Are_Set_Should_Be_Set_In_Consumer_Config()
        {
            var executor = new Mock <ITriggeredFunctionExecutor>();
            var consumer = new Mock <IConsumer <Ignore, string> >();

            var listenerConfig = new KafkaListenerConfiguration()
            {
                BrokerList             = "testBroker",
                Topic                  = "topic",
                ConsumerGroup          = "group1",
                SslKeyPassword         = "******",
                SslCertificateLocation = "path/to/cert",
                SslKeyLocation         = "path/to/key",
                SslCaLocation          = "path/to/cacert"
            };

            var kafkaOptions = new KafkaOptions();
            var target       = new KafkaListenerForTest <Ignore, string>(
                executor.Object,
                true,
                kafkaOptions,
                listenerConfig,
                requiresKey: true,
                valueDeserializer: null,
                NullLogger.Instance,
                functionId: "testId"
                );

            target.SetConsumer(consumer.Object);

            await target.StartAsync(default);
Example #4
0
        public async Task When_Value_Type_Is_Protobuf_Should_Create_Protobuf_Listener(string functionName, Type expectedKeyType)
        {
            var attribute = new KafkaTriggerAttribute("brokers:9092", "myTopic")
            {
            };

            var executor       = new Mock <ITriggeredFunctionExecutor>();
            var listenerConfig = new KafkaListenerConfiguration()
            {
                BrokerList    = attribute.BrokerList,
                Topic         = attribute.Topic,
                ConsumerGroup = "group1",
            };

            var config = this.emptyConfiguration;

            var bindingProvider = new KafkaTriggerAttributeBindingProvider(
                config,
                Options.Create(new KafkaOptions()),
                new KafkaEventDataConvertManager(NullLogger.Instance),
                new DefaultNameResolver(config),
                NullLoggerFactory.Instance);

            var parameterInfo = new TriggerBindingProviderContext(this.GetParameterInfo(functionName), default);

            var triggerBinding = await bindingProvider.TryCreateAsync(parameterInfo);

            var listener = await triggerBinding.CreateListenerAsync(new ListenerFactoryContext(new FunctionDescriptor(), new Mock <ITriggeredFunctionExecutor>().Object, default));


            Assert.NotNull(listener);
            AssertIsCorrectKafkaListener(listener, expectedKeyType, typeof(ProtoUser), typeof(ProtobufDeserializer <ProtoUser>));
        }
        public async Task When_Options_Are_Set_Should_Be_Set_In_Consumer_Config()
        {
            var executor = new Mock <ITriggeredFunctionExecutor>();
            var consumer = new Mock <IConsumer <Ignore, string> >();

            var listenerConfig = new KafkaListenerConfiguration()
            {
                BrokerList    = "testBroker",
                Topic         = "topic",
                ConsumerGroup = "group1",
            };

            var kafkaOptions = new KafkaOptions();
            var target       = new KafkaListenerForTest <Ignore, string>(
                executor.Object,
                true,
                kafkaOptions,
                listenerConfig,
                valueDeserializer: null,
                NullLogger.Instance
                );

            target.SetConsumer(consumer.Object);

            await target.StartAsync(default);
Example #6
0
        public async Task When_Using_SingleItem_Binding_10_Events_Should_Execute_Function_Ten_Times()
        {
            const int ExpectedEventCount = 10;

            var executor = new Mock <ITriggeredFunctionExecutor>();
            var consumer = new Mock <IConsumer <Ignore, string> >();

            var offset = 0L;

            consumer.Setup(x => x.Consume(It.IsNotNull <TimeSpan>()))
            .Returns(() =>
            {
                if (offset < ExpectedEventCount)
                {
                    offset++;

                    return(CreateConsumeResult <Ignore, string>(offset.ToString(), 0, offset));
                }

                return(null);
            });

            var executorFinished = new SemaphoreSlim(0);
            var executorCalls    = 0;

            executor.Setup(x => x.TryExecuteAsync(It.IsNotNull <TriggeredFunctionData>(), It.IsAny <CancellationToken>()))
            .Callback(() =>
            {
                Interlocked.Increment(ref executorCalls);
                executorFinished.Release();
            })
            .ReturnsAsync(new FunctionResult(true));

            var listenerConfig = new KafkaListenerConfiguration()
            {
                BrokerList    = "testBroker",
                Topic         = "topic",
                ConsumerGroup = "group1",
            };

            var target = new KafkaListenerForTest <Ignore, string>(
                executor.Object,
                singleDispatch: true,
                options: new KafkaOptions(),
                listenerConfig,
                requiresKey: true,
                valueDeserializer: null,
                logger: NullLogger.Instance,
                functionId: "testId"
                );

            target.SetConsumer(consumer.Object);

            await target.StartAsync(default(CancellationToken));

            Assert.True(await executorFinished.WaitAsync(TimeSpan.FromSeconds(5)));

            await target.StopAsync(default(CancellationToken));
        }
 public KafkaListenerForTest(ITriggeredFunctionExecutor executor,
                             bool singleDispatch,
                             KafkaOptions options,
                             KafkaListenerConfiguration kafkaListenerConfiguration,
                             object valueDeserializer,
                             ILogger logger)
     : base(executor,
            singleDispatch,
            options,
            kafkaListenerConfiguration,
            valueDeserializer,
            logger)
 {
 }
 public KafkaListenerForTest(ITriggeredFunctionExecutor executor,
                             bool singleDispatch,
                             KafkaOptions options,
                             KafkaListenerConfiguration kafkaListenerConfiguration,
                             bool requiresKey,
                             IDeserializer <TValue> valueDeserializer,
                             ILogger logger,
                             string functionId)
     : base(executor,
            singleDispatch,
            options,
            kafkaListenerConfiguration,
            requiresKey,
            valueDeserializer,
            logger,
            functionId)
 {
 }
Example #9
0
        public async Task When_Value_Is_KafkaEventData_With_Key_Should_Create_Listener_With_Key(string functionName, Type keyType, Type valueType)
        {
            var attribute = new KafkaTriggerAttribute("brokers:9092", "myTopic")
            {
            };

            var executor       = new Mock <ITriggeredFunctionExecutor>();
            var listenerConfig = new KafkaListenerConfiguration()
            {
                BrokerList    = attribute.BrokerList,
                Topic         = attribute.Topic,
                ConsumerGroup = "group1",
            };

            var config = this.emptyConfiguration;

            var bindingProvider = new KafkaTriggerAttributeBindingProvider(
                config,
                Options.Create(new KafkaOptions()),
                new KafkaEventDataConvertManager(NullLogger.Instance),
                new DefaultNameResolver(config),
                NullLoggerFactory.Instance);

            var parameterInfo = new TriggerBindingProviderContext(this.GetParameterInfo(functionName), default);

            var triggerBinding = await bindingProvider.TryCreateAsync(parameterInfo);

            var listener = await triggerBinding.CreateListenerAsync(new ListenerFactoryContext(new FunctionDescriptor(), new Mock <ITriggeredFunctionExecutor>().Object, default));


            Assert.NotNull(listener);
            Assert.True(listener.GetType().IsGenericType);
            var genericTypes = listener.GetType().GetGenericArguments();

            Assert.Equal(keyType, genericTypes[0]);
            Assert.Equal(valueType, genericTypes[1]);
        }
Example #10
0
        public async Task When_Topic_Has_Multiple_Partitions_Should_Execute_And_Commit_In_Order(bool singleDispatch)
        {
            const int MessagesPerPartition = 5;
            const int PartitionCount       = 2;
            const int BatchCount           = 2;

            const long Offset_A = 0;
            const long Offset_B = 1;
            const long Offset_C = 2;
            const long Offset_D = 3;
            const long Offset_E = 4;

            const long Offset_1 = 0;
            const long Offset_2 = 1;
            const long Offset_3 = 2;
            const long Offset_4 = 3;
            const long Offset_5 = 4;

            var executor = new Mock <ITriggeredFunctionExecutor>();
            var consumer = new Mock <IConsumer <Null, string> >();

            var committed = new ConcurrentQueue <TopicPartitionOffset>();

            consumer.Setup(x => x.StoreOffset(It.IsNotNull <TopicPartitionOffset>()))
            .Callback <TopicPartitionOffset>((topicPartitionOffset) =>
            {
                committed.Enqueue(topicPartitionOffset);
            });

            // Batch 1: AB12C
            // Batch 2: 34DE5
            consumer.SetupSequence(x => x.Consume(It.IsNotNull <TimeSpan>()))
            .Returns(CreateConsumeResult <Null, string>("A", 0, Offset_A))
            .Returns(CreateConsumeResult <Null, string>("B", 0, Offset_B))
            .Returns(CreateConsumeResult <Null, string>("1", 1, Offset_1))
            .Returns(CreateConsumeResult <Null, string>("2", 1, Offset_2))
            .Returns(CreateConsumeResult <Null, string>("C", 0, Offset_C))
            .Returns((ConsumeResult <Null, string>)null)
            .Returns(CreateConsumeResult <Null, string>("3", 1, Offset_3))
            .Returns(CreateConsumeResult <Null, string>("4", 1, Offset_4))
            .Returns(CreateConsumeResult <Null, string>("D", 0, Offset_D))
            .Returns(CreateConsumeResult <Null, string>("E", 0, Offset_E))
            .Returns(() =>
            {
                // from now on return null
                consumer.Setup(x => x.Consume(It.IsNotNull <TimeSpan>()))
                .Returns((ConsumeResult <Null, string>)null);

                return(CreateConsumeResult <Null, string>("5", 1, Offset_5));
            });

            var partition0       = new ConcurrentQueue <string>();
            var partition1       = new ConcurrentQueue <string>();
            var executorFinished = new SemaphoreSlim(0);

            executor.Setup(x => x.TryExecuteAsync(It.IsNotNull <TriggeredFunctionData>(), It.IsAny <CancellationToken>()))
            .Callback <TriggeredFunctionData, CancellationToken>((t, _) =>
            {
                var triggerData = (KafkaTriggerInput)t.TriggerValue;

                if (singleDispatch)
                {
                    Assert.Single(triggerData.Events);
                }

                foreach (var ev in triggerData.Events)
                {
                    switch (ev.Partition)
                    {
                    case 0:
                        partition0.Enqueue(ev.Value.ToString());
                        break;

                    case 1:
                        partition1.Enqueue(ev.Value.ToString());
                        break;

                    default:
                        Assert.True(false, "Unknown partition");
                        break;
                    }
                }

                if (partition0.Count == 5 && partition1.Count == 5)
                {
                    executorFinished.Release();
                }
            })
            .ReturnsAsync(new FunctionResult(true));

            var listenerConfig = new KafkaListenerConfiguration()
            {
                BrokerList    = "testBroker",
                Topic         = "topic",
                ConsumerGroup = "group1",
            };

            var target = new KafkaListenerForTest <Null, string>(
                executor.Object,
                singleDispatch,
                new KafkaOptions(),
                listenerConfig,
                requiresKey: true,
                valueDeserializer: null,
                NullLogger.Instance,
                functionId: "testId"
                );

            target.SetConsumer(consumer.Object);

            await target.StartAsync(default(CancellationToken));

            Assert.True(await executorFinished.WaitAsync(TimeSpan.FromSeconds(5)));

            // Give time for the commit to be saved
            await Task.Delay(1500);

            Assert.Equal(new[] { "A", "B", "C", "D", "E" }, partition0.ToArray());
            Assert.Equal(new[] { "1", "2", "3", "4", "5" }, partition1.ToArray());

            // Committing will be the one we read + 1
            // Batch 1: AB12C
            // Batch 2: 34DE5
            var committedArray = committed.ToArray();

            if (singleDispatch)
            {
                // In single dispatch we expected to commit once per message / per partition
                Assert.Equal(MessagesPerPartition * PartitionCount, committedArray.Length);

                // In single dispatch each item will be committed individually
                Assert.Equal(new[] { Offset_A + 1, Offset_B + 1, Offset_C + 1, Offset_D + 1, Offset_E + 1 }, committedArray.Where(x => x.Partition == 0).Select(x => (long)x.Offset).ToArray());
                Assert.Equal(new[] { Offset_1 + 1, Offset_2 + 1, Offset_3 + 1, Offset_4 + 1, Offset_5 + 1 }, committedArray.Where(x => x.Partition == 1).Select(x => (long)x.Offset).ToArray());
            }
            else
            {
                // In multi dispatch we expected to commit once per batch / per partition
                Assert.Equal(BatchCount * PartitionCount, committedArray.Length);

                // In multi dispatch we batch/partition pair will be committed
                Assert.Equal(new[] { Offset_C + 1, Offset_E + 1 }, committedArray.Where(x => x.Partition == 0).Select(x => (long)x.Offset).ToArray());
                Assert.Equal(new[] { Offset_2 + 1, Offset_5 + 1 }, committedArray.Where(x => x.Partition == 1).Select(x => (long)x.Offset).ToArray());
            }

            await target.StopAsync(default(CancellationToken));
        }
Example #11
0
        public async Task When_Using_MultiItem_Binding_10_Events_Should_Execute_Function_Once()
        {
            const int ExpectedEventCount = 10;

            var executor = new Mock <ITriggeredFunctionExecutor>();
            var consumer = new Mock <IConsumer <Null, string> >();

            var offset = 0L;

            consumer.Setup(x => x.Consume(It.IsNotNull <TimeSpan>()))
            .Returns(() =>
            {
                if (offset < ExpectedEventCount)
                {
                    offset++;

                    return(CreateConsumeResult <Null, string>(offset.ToString(), 0, offset));
                }

                return(null);
            });

            var executorFinished   = new SemaphoreSlim(0);
            var processedItemCount = 0;

            executor.Setup(x => x.TryExecuteAsync(It.IsNotNull <TriggeredFunctionData>(), It.IsAny <CancellationToken>()))
            .Callback <TriggeredFunctionData, CancellationToken>((td, _) =>
            {
                var triggerData      = (KafkaTriggerInput)td.TriggerValue;
                var alreadyProcessed = Interlocked.Add(ref processedItemCount, triggerData.Events.Length);
                if (alreadyProcessed == ExpectedEventCount)
                {
                    executorFinished.Release();
                }
            })
            .ReturnsAsync(new FunctionResult(true));

            var listenerConfig = new KafkaListenerConfiguration()
            {
                BrokerList    = "testBroker",
                Topic         = "topic",
                ConsumerGroup = "group1",
            };

            var target = new KafkaListenerForTest <Null, string>(
                executor.Object,
                singleDispatch: false,
                options: new KafkaOptions(),
                listenerConfig,
                requiresKey: true,
                valueDeserializer: null,
                logger: NullLogger.Instance,
                functionId: "testId"
                );

            target.SetConsumer(consumer.Object);

            await target.StartAsync(default(CancellationToken));

            Assert.True(await executorFinished.WaitAsync(TimeSpan.FromSeconds(5)));

            await target.StopAsync(default(CancellationToken));

            executor.Verify(x => x.TryExecuteAsync(It.IsNotNull <TriggeredFunctionData>(), It.IsAny <CancellationToken>()), Times.Once);
        }