public FileListener(FilesConfiguration config, FileTriggerAttribute attribute, ITriggeredFunctionExecutor triggerExecutor)
 {
     _config = config;
     _attribute = attribute;
     _triggerExecutor = triggerExecutor;
     _cancellationTokenSource = new CancellationTokenSource();
     _watchPath = Path.Combine(_config.RootPath, _attribute.GetNormalizedPath());
 }
        /// <summary>
        /// Enables use of File System extensions
        /// </summary>
        /// <param name="config">The <see cref="JobHostConfiguration"/> to configure.</param>
        public static void UseFiles(this JobHostConfiguration config)
        {
            if (config == null)
            {
                throw new ArgumentNullException("config");
            }

            FilesConfiguration filesConfiguration = new FilesConfiguration();

            config.UseFiles(filesConfiguration);
        }
        public static void Main(string[] args)
        {
            JobHostConfiguration config = new JobHostConfiguration();

            config.Tracing.ConsoleLevel = TraceLevel.Verbose;

            // Set to a short polling interval to facilitate local
            // debugging. You wouldn't want to run prod this way.
            config.Queues.MaxPollingInterval = TimeSpan.FromSeconds(2);

            FilesConfiguration filesConfig = new FilesConfiguration();
            if (string.IsNullOrEmpty(filesConfig.RootPath))
            {
                // when running locally, set this to a valid directory
                filesConfig.RootPath = @"c:\temp\files";
            }
            EnsureSampleDirectoriesExist(filesConfig.RootPath);

            config.UseFiles(filesConfig);
            config.UseTimers();
            config.UseSample();
            config.UseCore();
            var sendGridConfiguration = new SendGridConfiguration()
            {
                ToAddress = "*****@*****.**",
                FromAddress = new MailAddress("*****@*****.**", "WebJobs Extensions Samples")
            };
            config.UseSendGrid(sendGridConfiguration);

            ConfigureTraceMonitor(config, sendGridConfiguration);

            WebHooksConfiguration webHooksConfig = new WebHooksConfiguration();
            webHooksConfig.UseReceiver<GitHubWebHookReceiver>();
            config.UseWebHooks(webHooksConfig);

            JobHost host = new JobHost(config);

            host.Call(typeof(MiscellaneousSamples).GetMethod("ExecutionContext"));
            host.Call(typeof(FileSamples).GetMethod("ReadWrite"));
            host.Call(typeof(SampleSamples).GetMethod("Sample_BindToStream"));
            host.Call(typeof(SampleSamples).GetMethod("Sample_BindToString"));
            host.Call(typeof(TableSamples).GetMethod("CustomBinding"));

            // When running in Azure Web Apps, a JobHost will gracefully shut itself
            // down, ensuring that all listeners are stopped, etc. For this sample,
            // we want to ensure that same behavior when the console app window is
            // closed. This ensures that Singleton locks that are taken are released
            // immediately, etc.
            ShutdownHandler.Register(() => { host.Stop(); });

            host.RunAndBlock();
        }
        /// <summary>
        /// Enables use of File System extensions
        /// </summary>
        /// <param name="config">The <see cref="JobHostConfiguration"/> to configure.</param>
        /// <param name="filesConfig">The <see cref="FilesConfiguration"></see> to use./></param>
        public static void UseFiles(this JobHostConfiguration config, FilesConfiguration filesConfig)
        {
            if (config == null)
            {
                throw new ArgumentNullException("config");
            }
            if (filesConfig == null)
            {
                throw new ArgumentNullException("filesConfig");
            }

            config.RegisterExtensionConfigProvider(new FilesExtensionConfig(filesConfig));
        }
        public void Constructor_Defaults()
        {
            Environment.SetEnvironmentVariable("HOME", null);

            FilesConfiguration config = new FilesConfiguration();
            Assert.Null(config.RootPath);
            Assert.Equal(typeof(DefaultFileProcessorFactory), config.ProcessorFactory.GetType());

            Environment.SetEnvironmentVariable("HOME", @"D:\home");
            config = new FilesConfiguration();
            Assert.Equal(@"D:\home\data", config.RootPath);

            Environment.SetEnvironmentVariable("HOME", null);
        }
        public static void Main(string[] args)
        {
            JobHostConfiguration config = new JobHostConfiguration();

            config.Tracing.ConsoleLevel = TraceLevel.Verbose;

            // Set to a short polling interval to facilitate local
            // debugging. You wouldn't want to run prod this way.
            config.Queues.MaxPollingInterval = TimeSpan.FromSeconds(2);

            FilesConfiguration filesConfig = new FilesConfiguration();
            if (string.IsNullOrEmpty(filesConfig.RootPath))
            {
                // when running locally, set this to a valid directory
                filesConfig.RootPath = @"c:\temp\files";
            }
            EnsureSampleDirectoriesExist(filesConfig.RootPath);
            config.UseFiles(filesConfig);

            config.UseTimers();
            config.UseSample();
            config.UseCore();
            var sendGridConfiguration = new SendGridConfiguration()
            {
                ToAddress = "*****@*****.**",
                FromAddress = new MailAddress("*****@*****.**", "WebJobs Extensions Samples")
            };
            config.UseSendGrid(sendGridConfiguration);

            ConfigureTraceMonitor(config, sendGridConfiguration);

            WebHooksConfiguration webHooksConfig = new WebHooksConfiguration();
            webHooksConfig.UseReceiver<GitHubWebHookReceiver>();
            config.UseWebHooks(webHooksConfig);

            JobHost host = new JobHost(config);

            host.Call(typeof(MiscellaneousSamples).GetMethod("ExecutionContext"));
            host.Call(typeof(FileSamples).GetMethod("ReadWrite"));
            host.Call(typeof(SampleSamples).GetMethod("Sample_BindToStream"));
            host.Call(typeof(SampleSamples).GetMethod("Sample_BindToString"));
            host.Call(typeof(TableSamples).GetMethod("CustomBinding"));

            host.RunAndBlock();
        }
        public static void Main(string[] args)
        {
            JobHostConfiguration config = new JobHostConfiguration();
            FilesConfiguration filesConfig = new FilesConfiguration();

            // See https://github.com/Azure/azure-webjobs-sdk/wiki/Running-Locally for details
            // on how to set up your local environment
            if (config.IsDevelopment)
            {
                config.UseDevelopmentSettings();
                filesConfig.RootPath = @"c:\temp\files";
            }

            config.UseFiles(filesConfig);
            config.UseTimers();
            config.UseSample();
            config.UseCore();
            var sendGridConfiguration = new SendGridConfiguration()
            {
                ToAddress = "*****@*****.**",
                FromAddress = new MailAddress("*****@*****.**", "WebJobs Extensions Samples")
            };
            config.UseSendGrid(sendGridConfiguration);

            ConfigureTraceMonitor(config, sendGridConfiguration);
            
            EnsureSampleDirectoriesExist(filesConfig.RootPath);

            WebHooksConfiguration webHooksConfig = new WebHooksConfiguration();
            webHooksConfig.UseReceiver<GitHubWebHookReceiver>();
            config.UseWebHooks(webHooksConfig);

            JobHost host = new JobHost(config);

            host.Call(typeof(MiscellaneousSamples).GetMethod("ExecutionContext"));
            host.Call(typeof(FileSamples).GetMethod("ReadWrite"));
            host.Call(typeof(SampleSamples).GetMethod("Sample_BindToStream"));
            host.Call(typeof(SampleSamples).GetMethod("Sample_BindToString"));
            host.Call(typeof(TableSamples).GetMethod("CustomBinding"));

            host.RunAndBlock();
        }
        public FileProcessorTests()
        {
            rootPath = Path.GetTempPath();
            combinedTestFilePath = Path.Combine(rootPath, attributeSubPath);
            Directory.CreateDirectory(combinedTestFilePath);
            DeleteTestFiles(combinedTestFilePath);

            config = new FilesConfiguration()
            {
                RootPath = rootPath
            };
            
            FileTriggerAttribute attribute = new FileTriggerAttribute(attributeSubPath, "*.dat");
            processor = CreateTestProcessor(attribute);

            JsonSerializerSettings settings = new JsonSerializerSettings
            {
                DateFormatHandling = DateFormatHandling.IsoDateFormat,
            };
            _serializer = JsonSerializer.Create(settings);

            Environment.SetEnvironmentVariable("WEBSITE_INSTANCE_ID", InstanceId);
        }
 public FilesExtensionConfig(FilesConfiguration filesConfig)
 {
     _filesConfig = filesConfig;
 }
        private JobHost CreateTestJobHost()
        {
            ExplicitTypeLocator locator = new ExplicitTypeLocator(typeof(FilesTestJobs));
            JobHostConfiguration config = new JobHostConfiguration
            {
                TypeLocator = locator
            };

            FilesConfiguration filesConfig = new FilesConfiguration
            {
                RootPath = rootPath
            };
            config.UseFiles(filesConfig);

            return new JobHost(config);
        }
        public async Task ConcurrentListeners_ProcessFilesCorrectly(int concurrentListenerCount, int inputFileCount)
        {
            // mock out the executor so we can capture function invocations
            Mock<ITriggeredFunctionExecutor> mockExecutor = new Mock<ITriggeredFunctionExecutor>(MockBehavior.Strict);
            ConcurrentBag<string> processedFiles = new ConcurrentBag<string>();
            FunctionResult result = new FunctionResult(true);
            mockExecutor.Setup(p => p.TryExecuteAsync(It.IsAny<TriggeredFunctionData>(), It.IsAny<CancellationToken>()))
                .Callback<TriggeredFunctionData, CancellationToken>(async (mockData, mockToken) =>
                    {
                        await Task.Delay(50);
                        FileSystemEventArgs fileEvent = mockData.TriggerValue as FileSystemEventArgs;
                        processedFiles.Add(fileEvent.Name);
                    })
                .ReturnsAsync(result);

            FilesConfiguration config = new FilesConfiguration()
            {
                RootPath = rootPath
            };
            FileTriggerAttribute attribute = new FileTriggerAttribute(attributeSubPath, changeTypes: WatcherChangeTypes.Created | WatcherChangeTypes.Changed, filter: "*.dat");

            // create a bunch of listeners and start them
            CancellationTokenSource tokenSource = new CancellationTokenSource();
            CancellationToken cancellationToken = tokenSource.Token;
            List<Task> listenerStartupTasks = new List<Task>();
            List<FileListener> listeners = new List<FileListener>();
            for (int i = 0; i < concurrentListenerCount; i++)
            {
                FileListener listener = new FileListener(config, attribute, mockExecutor.Object, new TestTraceWriter());
                listeners.Add(listener);
                listenerStartupTasks.Add(listener.StartAsync(cancellationToken));
            };
            await Task.WhenAll(listenerStartupTasks);

            // now start creating files
            List<string> expectedFiles = new List<string>();
            for (int i = 0; i < inputFileCount; i++)
            {
                string file = WriteTestFile();
                await Task.Delay(50);
                expectedFiles.Add(Path.GetFileName(file));
            }

            // wait for all files to be processed
            await TestHelpers.Await(() =>
            {
                return processedFiles.Count >= inputFileCount;
            });
            Assert.Equal(inputFileCount, processedFiles.Count);

            // verify that each file was only processed once
            Assert.True(expectedFiles.OrderBy(p => p).SequenceEqual(processedFiles.OrderBy(p => p)));
            Assert.Equal(expectedFiles.Count * 2, Directory.GetFiles(testFileDir).Length);

            // verify contents of each status file
            FileProcessor processor = listeners[0].Processor;
            foreach (string processedFile in processedFiles)
            {
                string statusFilePath = processor.GetStatusFile(Path.Combine(testFileDir, processedFile));

                string[] statusLines = File.ReadAllLines(statusFilePath);

                Assert.Equal(2, statusLines.Length);
                StatusFileEntry statusEntry = JsonConvert.DeserializeObject<StatusFileEntry>(statusLines[0]);
                Assert.Equal(ProcessingState.Processing, statusEntry.State);
                Assert.Equal(WatcherChangeTypes.Created, statusEntry.ChangeType);

                statusEntry = JsonConvert.DeserializeObject<StatusFileEntry>(statusLines[1]);
                Assert.Equal(ProcessingState.Processed, statusEntry.State);
                Assert.Equal(WatcherChangeTypes.Created, statusEntry.ChangeType);
            }

            // Now test concurrency handling for updates by updating some files
            // and verifying the updates are only processed once
            string[] filesToUpdate = processedFiles.Take(50).Select(p => Path.Combine(testFileDir, p)).ToArray();
            string item;
            while (!processedFiles.IsEmpty)
            {
                processedFiles.TryTake(out item);
            }
            await Task.Delay(1000);
            foreach (string fileToUpdate in filesToUpdate)
            {
                await Task.Delay(50);
                File.AppendAllText(fileToUpdate, "update");
            }

            // wait for all files to be processed
            await TestHelpers.Await(() =>
            {
                return processedFiles.Count >= filesToUpdate.Length;
            });
            Assert.Equal(filesToUpdate.Length, processedFiles.Count);
            Assert.Equal(expectedFiles.Count * 2, Directory.GetFiles(testFileDir).Length);

            // verify the status files are correct for each of the updated files
            foreach (string updatedFile in filesToUpdate)
            {
                string statusFilePath = processor.GetStatusFile(updatedFile);

                string[] statusLines = File.ReadAllLines(statusFilePath);

                Assert.Equal(4, statusLines.Length);
                StatusFileEntry statusEntry = JsonConvert.DeserializeObject<StatusFileEntry>(statusLines[0]);
                Assert.Equal(ProcessingState.Processing, statusEntry.State);
                Assert.Equal(WatcherChangeTypes.Created, statusEntry.ChangeType);

                statusEntry = JsonConvert.DeserializeObject<StatusFileEntry>(statusLines[1]);
                Assert.Equal(ProcessingState.Processed, statusEntry.State);
                Assert.Equal(WatcherChangeTypes.Created, statusEntry.ChangeType);

                statusEntry = JsonConvert.DeserializeObject<StatusFileEntry>(statusLines[2]);
                Assert.Equal(ProcessingState.Processing, statusEntry.State);
                Assert.Equal(WatcherChangeTypes.Changed, statusEntry.ChangeType);

                statusEntry = JsonConvert.DeserializeObject<StatusFileEntry>(statusLines[3]);
                Assert.Equal(ProcessingState.Processed, statusEntry.State);
                Assert.Equal(WatcherChangeTypes.Changed, statusEntry.ChangeType);
            }

            // Now call purge to clean up all processed files
            processor.CleanupProcessedFiles();
            Assert.Equal(0, Directory.GetFiles(testFileDir).Length);

            foreach (FileListener listener in listeners)
            {
                listener.Dispose();
            }
        }
        private JobHost CreateTestJobHost()
        {
            ExplicitTypeLocator locator = new ExplicitTypeLocator(typeof(FilesTestJobs));
            var resolver = new TestNameResolver();
            resolver.Values.Add("test", "TestValue");
            JobHostConfiguration config = new JobHostConfiguration
            {
                TypeLocator = locator,
                NameResolver = resolver
            };

            FilesConfiguration filesConfig = new FilesConfiguration
            {
                RootPath = rootPath
            };
            config.UseFiles(filesConfig);

            return new JobHost(config);
        }
        static void Main(string[] args)
        {
            JobHostConfiguration config = new JobHostConfiguration();

            config.Tracing.ConsoleLevel = TraceLevel.Verbose;

            // Set to a short polling interval to facilitate local
            // debugging. You wouldn't want to run prod this way.
            config.Queues.MaxPollingInterval = TimeSpan.FromSeconds(2);

            FilesConfiguration filesConfig = new FilesConfiguration();
            if (string.IsNullOrEmpty(filesConfig.RootPath))
            {
                // when running locally, set this to a valid directory
                filesConfig.RootPath = @"c:\temp\files";
            };
            EnsureSampleDirectoriesExist(filesConfig.RootPath);
            config.UseFiles(filesConfig);

            config.UseTimers();
            config.UseSample();
            config.UseCore();

            JobHost host = new JobHost(config);

            host.Call(typeof(FileSamples).GetMethod("ReadWrite"));
            host.Call(typeof(SampleSamples).GetMethod("Sample_BindToStream"));
            host.Call(typeof(SampleSamples).GetMethod("Sample_BindToString"));
            host.Call(typeof(TableSamples).GetMethod("CustomBinding"));

            host.RunAndBlock();
        }