public FirebaseCloudMessageTriggerAttributeBindingProvider(INameResolver nameResolver, IConverterManager converterManager, FirebaseCloudMessagingConfiguration configuration, ILoggerFactory loggerFactory) { _nameResolver = nameResolver; _converterManager = converterManager; _configuration = configuration; _logger = loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("FirebaseCloudMessaging")); }
public TeamsFxBindingExtensionProvider(ILoggerFactory loggerFactory) { _loggerFactory = loggerFactory; _logger = _loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("TeamsFx")); _bindingProvider = new TeamsFxBindingProvider(_logger); }
public AzureSearchExtensionConfigProvider( IAppCache cacheService, ILoggerFactory factory) { _cacheService = cacheService; _logger = factory.CreateLogger( LogCategories.CreateTriggerCategory("AzureSearch")); }
public void Initialize(ExtensionConfigContext context) { if (context == null) { throw new ArgumentNullException("context"); } _logger = _loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("Dapr")); _logger.LogInformation($"Registered dapr extension"); context.AddConverter <JObject, SaveStateOptions>(SaveStateOptions); context.AddConverter <string, SaveStateOptions>(SaveStateOptions); context.AddConverter <byte[], SaveStateOptions>(SaveStateOptions); context.AddConverter <JObject, InvokeMethodOptions>(InvokeMethodOptions); var daprStateConverter = new DaprStateConverter(_daprService); var stateRule = context.AddBindingRule <DaprStateAttribute>(); stateRule.BindToInput <byte[]>(daprStateConverter); stateRule.BindToInput <string>(daprStateConverter); stateRule.BindToInput <Stream>(daprStateConverter); stateRule.BindToInput <JToken>(daprStateConverter); stateRule.BindToInput <JObject>(daprStateConverter); stateRule.BindToCollector <SaveStateOptions>((attr) => { return(new DaprSaveStateAsyncCollector(attr, _daprService)); }); var invokeRule = context.AddBindingRule <DaprInvokeAttribute>(); invokeRule.BindToCollector <InvokeMethodOptions>((attr) => { return(new DaprInvokeMethodAsyncCollector(attr, _daprService)); }); }
public void SetUp() { _loggerFactory = new LoggerFactory(); _loggerProvider = new TestLoggerProvider(); _loggerFactory.AddProvider(_loggerProvider); _consumerClientMock = new Mock <IEventHubConsumerClient>(MockBehavior.Strict); _consumerClientMock.Setup(c => c.ConsumerGroup).Returns(_consumerGroup); _consumerClientMock.Setup(c => c.EventHubName).Returns(_eventHubName); _consumerClientMock.Setup(c => c.FullyQualifiedNamespace).Returns(_namespace); _consumerClientMock.Setup(client => client.GetPartitionsAsync()) .Returns(() => Task.FromResult(_partitions.Select(p => p.Id).ToArray())); _consumerClientMock.Setup(client => client.GetPartitionPropertiesAsync(IsAny <string>())) .Returns((string id) => Task.FromResult(_partitions.SingleOrDefault(p => p.Id == id))); this._mockCheckpointStore = new Mock <BlobCheckpointStoreInternal>(MockBehavior.Strict); _mockCheckpointStore.Setup(s => s.GetCheckpointAsync(_namespace, _eventHubName, _consumerGroup, It.IsAny <string>(), default)) .Returns <string, string, string, string, CancellationToken>((ns, hub, cg, partitionId, ct) => Task.FromResult(_checkpoints.SingleOrDefault(cp => cp.PartitionId == partitionId))); _scaleMonitor = new EventHubsScaleMonitor( _functionId, _consumerClientMock.Object, _mockCheckpointStore.Object, _loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("EventHub"))); }
public async Task AddAsync_AddsMessagesToQueue() { var mockRabbitMQService = new Mock <IRabbitMQService>(MockBehavior.Strict); var mockBatch = new Mock <IBasicPublishBatch>(); mockRabbitMQService.Setup(m => m.BasicPublishBatch).Returns(mockBatch.Object); var attribute = new RabbitMQAttribute { HostName = Constants.LocalHost, QueueName = "queue", }; var context = new RabbitMQContext { ResolvedAttribute = attribute, Service = mockRabbitMQService.Object }; ILoggerFactory loggerFactory = new LoggerFactory(); ILogger logger = loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory(Constants.RabbitMQ)); var collector = new RabbitMQAsyncCollector(context, logger); byte[] body = Encoding.UTF8.GetBytes("hi"); await collector.AddAsync(body); mockBatch.Verify(m => m.Add(It.IsAny <string>(), It.IsAny <string>(), It.IsAny <bool>(), It.IsAny <IBasicProperties>(), body), Times.Exactly(1)); }
/// <summary> /// Constructs a new instance. /// </summary> /// <param name="queue">The queue the <see cref="QueueProcessor"/> will operate on.</param> /// <param name="loggerFactory">The <see cref="ILoggerFactory"/> to create an <see cref="ILogger"/> from.</param> /// <param name="options">The queue configuration.</param> /// <param name="poisonQueue">The queue to move messages to when unable to process a message after the maximum dequeue count has been exceeded. May be null.</param> internal QueueProcessorOptions(QueueClient queue, ILoggerFactory loggerFactory, QueuesOptions options, QueueClient poisonQueue = null) { Queue = queue ?? throw new ArgumentNullException(nameof(queue)); PoisonQueue = poisonQueue; Logger = loggerFactory?.CreateLogger(LogCategories.CreateTriggerCategory("Queue")); Options = options; }
private IKafkaProducer Create(KafkaAttribute attribute, string brokerList) { Type keyType = attribute.KeyType ?? typeof(Null); Type valueType = attribute.ValueType; string avroSchema = null; if (valueType == null) { if (!string.IsNullOrEmpty(attribute.AvroSchema)) { avroSchema = attribute.AvroSchema; valueType = typeof(GenericRecord); } else { valueType = typeof(string); } } else { if (typeof(ISpecificRecord).IsAssignableFrom(valueType)) { var specificRecord = (ISpecificRecord)Activator.CreateInstance(valueType); avroSchema = specificRecord.Schema.ToString(); } } return((IKafkaProducer)Activator.CreateInstance( typeof(KafkaProducer <,>).MakeGenericType(keyType, valueType), this.GetProducerConfig(brokerList), avroSchema, this.loggerProvider.CreateLogger(LogCategories.CreateTriggerCategory("Kafka")))); }
public IServiceProvider BuildServiceProvider() { var services = new ServiceCollection(); services.AddSingleton(typeof(AzureSearchServiceContext), new AzureSearchServiceContext( AzureSearchConfiguration.SearchServiceName, AzureSearchConfiguration.SearchAdminApiKey, CosmosDBConfiguration.EndPointUrl, CosmosDBConfiguration.PrimaryKey, CosmosDBConfiguration.DatabaseId, LoggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("Http")) ) ); // SearchService services.AddSingleton <ISearchRepository, SearchRepository>(); services.AddSingleton <ISearchService, SearchService>(); // StorageAccountRepoistory services.AddSingleton <IStorageAccountRepository, StorageAccountRepository>(); services.AddSingleton(typeof(IStorageAccountContext), new StorageAccountContext( LoggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("Http")))); return(services.BuildServiceProvider(true)); }
public RabbitMQExtensionConfigProvider(IOptions <RabbitMQOptions> options, INameResolver nameResolver, IRabbitMQServiceFactory rabbitMQServiceFactory, ILoggerFactory loggerFactory) { _options = options; _nameResolver = nameResolver; _rabbitMQServiceFactory = rabbitMQServiceFactory; _logger = loggerFactory?.CreateLogger(LogCategories.CreateTriggerCategory("RabbitMQ")); }
public CosmosStoreTriggerAttributeBindingProvider(IConfiguration configuration, INameResolver nameResolver, CosmosStoreBindingOptions bindingOptions, ILoggerFactory loggerFactory) { _configuration = configuration; _nameResolver = nameResolver; _bindingOptions = bindingOptions; _logger = loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("CosmosStore")); }
/// <summary> /// Initializes the extension configuration provider. /// </summary> /// <param name="context">The extension configuration context.</param> public void Initialize(ExtensionConfigContext context) { var logger = context.Config.LoggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("Mqtt")); var nameResolver = context.Config.GetService <INameResolver>(); context.Config.RegisterBindingExtension(new MqttTriggerAttributeBindingProvider(nameResolver, logger)); }
public CosmosDBTriggerAttributeBindingProvider(INameResolver nameResolver, CosmosDBOptions options, CosmosDBExtensionConfigProvider configProvider, ILoggerFactory loggerFactory) { _nameResolver = nameResolver; _options = options; _configProvider = configProvider; _logger = loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("CosmosDB")); }
public RethinkDbTriggerAttributeBindingProvider(IConfiguration configuration, RethinkDbOptions options, IRethinkDBConnectionFactory rethinkDBConnectionFactory, INameResolver nameResolver, ILoggerFactory loggerFactory) { _configuration = configuration; _options = options; _rethinkDBConnectionFactory = rethinkDBConnectionFactory; _nameResolver = nameResolver; _logger = loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("RethinkDB")); }
/// <summary> /// Initializes a new instance of the <see cref="SqlTriggerAttributeBindingProvider"/> class. /// </summary> /// <param name="configuration"> /// Used to extract the connection string from connectionStringSetting /// </param> /// <param name="loggerFactory"> /// Used to create a logger for the SQL trigger binding /// </param> /// <exception cref="ArgumentNullException"> /// Thrown if either parameter is null /// </exception> public SqlTriggerAttributeBindingProvider(IConfiguration configuration, ILoggerFactory loggerFactory) { _configuration = configuration ?? throw new ArgumentNullException(nameof(configuration)); if (loggerFactory == null) { throw new ArgumentNullException(nameof(loggerFactory)); } _logger = loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("Sql")); }
public void Initialize(ExtensionConfigContext context) { var logger = _loggerFactory .CreateLogger(LogCategories.CreateTriggerCategory("Inject")); context .AddBindingRule <InjectAttribute>() .Bind(new InjectAttributeBindingProvider(context, _options.Value, logger)); }
public CosmosDBCassandraTriggerAttributeBindingProvider(IConfiguration configuration, INameResolver nameResolver, CosmosDBCassandraOptions options, CosmosDBCassandraExtensionConfigProvider configProvider, ILoggerFactory loggerFactory) { _configuration = configuration; _nameResolver = nameResolver; _options = options; _configProvider = configProvider; _logger = loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("CosmosDB")); }
public async Task Validate_HostLogs() { // Validate the host startup traces. Order by message string as the requests may come in // slightly out-of-order or on different threads TraceTelemetry[] traces = null; string routesManagerLogCategory = typeof(WebHost.WebScriptHostHttpRoutesManager).FullName; await TestHelpers.Await(() => { traces = _fixture.Channel.Telemetries .OfType <TraceTelemetry>() .Where(t => { string category = t.Properties[LogConstants.CategoryNameKey].ToString(); return(category.StartsWith("Host.") || category.StartsWith(routesManagerLogCategory)); }) .OrderBy(t => t.Message) .ToArray(); // When these two messages are logged, we know we've completed initialization. return(traces .Where(t => t.Message.Contains("Host lock lease acquired by instance ID") || t.Message.Contains("Job host started")) .Count() == 2); }, userMessageCallback : () => string.Join(Environment.NewLine, _fixture.Channel.Telemetries.OfType <TraceTelemetry>().Select(t => t.Message))); // Excluding Node buffer deprecation warning for now // TODO: Remove this once the issue https://github.com/Azure/azure-functions-nodejs-worker/issues/98 is resolved // We may have any number of "Host Status" calls as we wait for startup. Let's ignore them. traces = traces.Where(t => !t.Message.Contains("[DEP0005]") && !t.Message.StartsWith("Host Status") ).ToArray(); int expectedCount = 14; Assert.True(traces.Length == expectedCount, $"Expected {expectedCount} messages, but found {traces.Length}. Actual logs:{Environment.NewLine}{string.Join(Environment.NewLine, traces.Select(t => t.Message))}"); int idx = 0; ValidateTrace(traces[idx++], "2 functions loaded", LogCategories.Startup); ValidateTrace(traces[idx++], "A function allow list has been specified", LogCategories.Startup); ValidateTrace(traces[idx++], "Found the following functions:\r\n", LogCategories.Startup); ValidateTrace(traces[idx++], "Generating 2 job function(s)", LogCategories.Startup); ValidateTrace(traces[idx++], "Host initialization: ConsecutiveErrors=0, StartupCount=1", LogCategories.Startup); ValidateTrace(traces[idx++], "Host initialized (", LogCategories.Startup); ValidateTrace(traces[idx++], "Host lock lease acquired by instance ID", ScriptConstants.LogCategoryHostGeneral); ValidateTrace(traces[idx++], "Host started (", LogCategories.Startup); ValidateTrace(traces[idx++], "Initializing function HTTP routes" + Environment.NewLine + "Mapped function route 'api/HttpTrigger-Scenarios'", routesManagerLogCategory); ValidateTrace(traces[idx++], "Initializing Host", LogCategories.Startup); ValidateTrace(traces[idx++], "Initializing Warmup Extension", LogCategories.CreateTriggerCategory("Warmup")); ValidateTrace(traces[idx++], "Job host started", LogCategories.Startup); ValidateTrace(traces[idx++], "Loading functions metadata", LogCategories.Startup); ValidateTrace(traces[idx++], "Starting Host (HostId=", LogCategories.Startup); }
public EventHubTriggerAttributeBindingProvider( IConverterManager converterManager, IOptions <EventHubOptions> options, ILoggerFactory loggerFactory, EventHubClientFactory clientFactory) { _converterManager = converterManager; _options = options; _clientFactory = clientFactory; _logger = loggerFactory?.CreateLogger(LogCategories.CreateTriggerCategory("EventHub")); }
public EventHubTriggerAttributeBindingProvider( INameResolver nameResolver, IConverterManager converterManager, EventHubConfiguration eventHubConfig, ILoggerFactory loggerFactory) { _nameResolver = nameResolver; _converterManager = converterManager; _eventHubConfig = eventHubConfig; _logger = loggerFactory?.CreateLogger(LogCategories.CreateTriggerCategory("EventHub")); }
/// <summary> /// Constructs a new instance. /// </summary> /// <param name="queue">The <see cref="CloudQueue"/> the <see cref="QueueProcessor"/> will operate on.</param> /// <param name="loggerFactory">The <see cref="ILoggerFactory"/> to create an <see cref="ILogger"/> from.</param> /// <param name="poisonQueue">The queue to move messages to when unable to process a message after the maximum dequeue count has been exceeded. May be null.</param> public QueueProcessorFactoryContext(CloudQueue queue, ILoggerFactory loggerFactory, CloudQueue poisonQueue = null) { if (queue == null) { throw new ArgumentNullException("queue"); } Queue = queue; PoisonQueue = poisonQueue; Logger = loggerFactory?.CreateLogger(LogCategories.CreateTriggerCategory("Queue")); }
public WebPubSubConfigProvider( IOptions <WebPubSubOptions> options, INameResolver nameResolver, ILoggerFactory loggerFactory, IConfiguration configuration) { _options = options.Value; _logger = loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("WebPubSub")); _nameResolver = nameResolver; _configuration = configuration; _dispatcher = new WebPubSubTriggerDispatcher(_logger); }
public EventHubTriggerAttributeBindingProvider( IConfiguration configuration, INameResolver nameResolver, IConverterManager converterManager, IOptions <EventHubOptions> options, ILoggerFactory loggerFactory) { _config = configuration; _nameResolver = nameResolver; _converterManager = converterManager; _options = options; _logger = loggerFactory?.CreateLogger(LogCategories.CreateTriggerCategory("EventHub")); }
public void Initialize(ExtensionConfigContext context) { if (context == null) { throw new ArgumentNullException("context"); } ILogger logger = _loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("Timer")); var bindingProvider = new TimerTriggerAttributeBindingProvider(_options.Value, _nameResolver, logger, _scheduleMonitor); context.AddBindingRule <TimerTriggerAttribute>() .BindToTrigger(bindingProvider); }
public void InvalidFormat_Throws_JsonException() { string str = "wrong format"; byte[] strBytes = Encoding.UTF8.GetBytes(str); BasicDeliverEventArgs args = new BasicDeliverEventArgs("tag", 1, false, "", "queue", null, strBytes); ILoggerFactory loggerFactory = new LoggerFactory(); ILogger logger = loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("RabbitMQ")); BasicDeliverEventArgsToPocoConverter <TestClass> converter = new BasicDeliverEventArgsToPocoConverter <TestClass>(logger); Assert.Throws <JsonReaderException>(() => converter.Convert(args)); }
private IKafkaProducer Create(Handle producerBaseHandle, KafkaAttribute attribute) { var valueType = SerializationHelper.GetValueType(attribute.ValueType, attribute.AvroSchema, null, out var avroSchema); var keyType = attribute.KeyType ?? typeof(Null); var valueSerializer = SerializationHelper.ResolveValueSerializer(valueType, attribute.AvroSchema); return((IKafkaProducer)Activator.CreateInstance( typeof(KafkaProducer <,>).MakeGenericType(keyType, valueType), producerBaseHandle, valueSerializer, loggerProvider.CreateLogger(LogCategories.CreateTriggerCategory("Kafka")))); }
public KafkaTriggerAttributeBindingProvider( IConfiguration config, IOptions <KafkaOptions> options, IConverterManager converterManager, INameResolver nameResolver, ILoggerFactory loggerFactory) { this.config = config; this.converterManager = converterManager; this.nameResolver = nameResolver; this.options = options; this.logger = loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("Kafka")); }
public void InvalidFormat_Returns_DefaultObject() { string str = "wrong format"; byte[] strBytes = Encoding.UTF8.GetBytes(str); BasicDeliverEventArgs args = new BasicDeliverEventArgs("tag", 1, false, "", "queue", null, strBytes); ILoggerFactory loggerFactory = new LoggerFactory(); ILogger logger = loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("RabbitMQ")); BasicDeliverEventArgsToPocoConverter <TestClass> converter = new BasicDeliverEventArgsToPocoConverter <TestClass>(logger); TestClass actualObj = converter.Convert(args); Assert.Equal(default, actualObj);
private IKafkaProducer Create(Handle producerBaseHandle, KafkaProducerEntity entity) { var valueType = entity.ValueType ?? typeof(byte[]); var keyType = entity.KeyType ?? typeof(Null); var valueSerializer = SerializationHelper.ResolveValueSerializer(valueType, entity.AvroSchema); return((IKafkaProducer)Activator.CreateInstance( typeof(KafkaProducer <,>).MakeGenericType(keyType, valueType), producerBaseHandle, valueSerializer, loggerProvider.CreateLogger(LogCategories.CreateTriggerCategory("Kafka")))); }
public SignalRConfigProvider( INameResolver nameResolver, ILoggerFactory loggerFactory, IConfiguration configuration, IServiceManagerStore serviceManagerStore, ISecurityTokenValidator securityTokenValidator = null, ISignalRConnectionInfoConfigurer signalRConnectionInfoConfigurer = null) { this.logger = loggerFactory.CreateLogger(LogCategories.CreateTriggerCategory("SignalR")); this.nameResolver = nameResolver; this.serviceManagerStore = serviceManagerStore; this._dispatcher = new SignalRTriggerDispatcher(); inputBindingProvider = new InputBindingProvider(configuration, nameResolver, securityTokenValidator, signalRConnectionInfoConfigurer); }