public async Task CatchAllFilter_AutoDelete_ProcessesAndDeletesFiles() { Mock <ITriggeredFunctionExecutor> mockExecutor = new Mock <ITriggeredFunctionExecutor>(MockBehavior.Strict); ConcurrentBag <string> processedFiles = new ConcurrentBag <string>(); FunctionResult result = new FunctionResult(true); mockExecutor.Setup(p => p.TryExecuteAsync(It.IsAny <TriggeredFunctionData>(), It.IsAny <CancellationToken>())).ReturnsAsync(result); var options = new FilesOptions() { RootPath = rootPath }; FileTriggerAttribute attribute = new FileTriggerAttribute(attributeSubPath, changeTypes: WatcherChangeTypes.Created, filter: "*.*", autoDelete: true); FileListener listener = new FileListener(new OptionsWrapper <FilesOptions>(options), attribute, mockExecutor.Object, new TestLogger("Test"), new DefaultFileProcessorFactory()); await listener.StartAsync(CancellationToken.None); // create a few files with different extensions WriteTestFile("jpg"); WriteTestFile("txt"); WriteTestFile("png"); // wait for the files to be processed fully and all files deleted (autoDelete = true) await TestHelpers.Await(() => { return(Directory.EnumerateFiles(testFileDir).Count() == 0); }); listener.Dispose(); }
public static IServiceCollection AddFilesServices(this IServiceCollection services, Action <FilesOptions> configure = null) { var options = new FilesOptions(); configure?.Invoke(options); return(services.AddFilesServices(options)); }
public FileProcessorTests() { rootPath = Path.GetTempPath(); combinedTestFilePath = Path.Combine(rootPath, AttributeSubPath); Directory.CreateDirectory(combinedTestFilePath); DeleteTestFiles(combinedTestFilePath); options = new FilesOptions() { RootPath = rootPath }; FileTriggerAttribute attribute = new FileTriggerAttribute(AttributeSubPath, "*.dat"); processor = CreateTestProcessor(attribute); JsonSerializerSettings settings = new JsonSerializerSettings { DateFormatHandling = DateFormatHandling.IsoDateFormat, }; _serializer = JsonSerializer.Create(settings); Environment.SetEnvironmentVariable("WEBSITE_INSTANCE_ID", InstanceId); }
public ImageResizeMiddleware(RequestDelegate next, IHostingEnvironment hostingEnvironment, IOptions <FilesOptions> options) { _next = next; _options = options.Value; _hostingEnvironment = hostingEnvironment; }
public FilesService(SteamfitterContext context, IAuthorizationService authorizationService, IPrincipal user, IMapper mapper, FilesOptions fileSettings) { _context = context; _authorizationService = authorizationService; _user = user as ClaimsPrincipal; _mapper = mapper; _options = fileSettings; }
public FileService(IOptions <FilesOptions> options, ILogger <FileService> logger, IHostingEnvironment hostingEnvironment, IHttpContextAccessor httpContextAccessor) { _options = options.Value; _logger = logger; _hostingEnvironment = hostingEnvironment; _httpContextAccessor = httpContextAccessor; }
public FileService(IOptions <FilesOptions> fileOptions, IOptions <UrlsOptions> urlOptions, IHostingEnvironment hostingEnvironment, IMapper mapper, ILogger <FileService> logger) { _fileOptions = fileOptions.Value; _urlOptions = urlOptions.Value; _hostingEnvironment = hostingEnvironment; _mapper = mapper; _logger = logger; }
public void Constructor_ThrowsOnInvalidRootPath() { var options = new FilesOptions(); FileTriggerAttribute attrib = new FileTriggerAttribute("test", "*.dat"); Mock <ITriggeredFunctionExecutor> mockExecutor = new Mock <ITriggeredFunctionExecutor>(); var ex = Assert.Throws <InvalidOperationException>(() => { new FileListener(new OptionsWrapper <FilesOptions>(options), attrib, mockExecutor.Object, new TestLogger("Test"), new DefaultFileProcessorFactory()); }); Assert.Equal("Path '' is invalid. FilesConfiguration.RootPath must be set to a valid directory location.", ex.Message); }
public void Constructor_Defaults() { Environment.SetEnvironmentVariable("HOME", null); var options = new FilesOptions(); Assert.Null(options.RootPath); Environment.SetEnvironmentVariable("HOME", @"D:\home"); options = new FilesOptions(); Assert.Equal(@"D:\home\data", options.RootPath); Environment.SetEnvironmentVariable("HOME", null); }
/// <summary> /// Initializes a new instance of the <see cref="RuleProcessor"/> class. /// </summary> /// <param name="files">The files.</param> public RuleProcessor(IOptionsMonitor <FilesOptions> files) { filesOptions = files.CurrentValue; fieldsArray = filesOptions.Columns; }
public async Task ConcurrentListeners_ProcessFilesCorrectly(int concurrentListenerCount, int inputFileCount) { // mock out the executor so we can capture function invocations Mock <ITriggeredFunctionExecutor> mockExecutor = new Mock <ITriggeredFunctionExecutor>(MockBehavior.Strict); ConcurrentBag <string> processedFiles = new ConcurrentBag <string>(); FunctionResult result = new FunctionResult(true); mockExecutor.Setup(p => p.TryExecuteAsync(It.IsAny <TriggeredFunctionData>(), It.IsAny <CancellationToken>())) .Callback <TriggeredFunctionData, CancellationToken>(async(mockData, mockToken) => { await Task.Delay(50); FileSystemEventArgs fileEvent = mockData.TriggerValue as FileSystemEventArgs; processedFiles.Add(fileEvent.Name); }) .ReturnsAsync(result); var options = new FilesOptions() { RootPath = rootPath }; FileTriggerAttribute attribute = new FileTriggerAttribute(attributeSubPath, changeTypes: WatcherChangeTypes.Created | WatcherChangeTypes.Changed, filter: "*.dat"); // create a bunch of listeners and start them CancellationTokenSource tokenSource = new CancellationTokenSource(); CancellationToken cancellationToken = tokenSource.Token; List <Task> listenerStartupTasks = new List <Task>(); List <FileListener> listeners = new List <FileListener>(); for (int i = 0; i < concurrentListenerCount; i++) { FileListener listener = new FileListener(new OptionsWrapper <FilesOptions>(options), attribute, mockExecutor.Object, new TestLogger("Test"), new DefaultFileProcessorFactory()); listeners.Add(listener); listenerStartupTasks.Add(listener.StartAsync(cancellationToken)); } await Task.WhenAll(listenerStartupTasks); // now start creating files List <string> expectedFiles = new List <string>(); for (int i = 0; i < inputFileCount; i++) { string file = WriteTestFile(); await Task.Delay(50); expectedFiles.Add(Path.GetFileName(file)); } // wait for all files to be processed await TestHelpers.Await(() => { return(processedFiles.Count >= inputFileCount); }); Assert.Equal(inputFileCount, processedFiles.Count); // verify that each file was only processed once Assert.True(expectedFiles.OrderBy(p => p).SequenceEqual(processedFiles.OrderBy(p => p))); Assert.Equal(expectedFiles.Count * 2, Directory.GetFiles(testFileDir).Length); // verify contents of each status file FileProcessor processor = listeners[0].Processor; foreach (string processedFile in processedFiles) { string statusFilePath = processor.GetStatusFile(Path.Combine(testFileDir, processedFile)); string[] statusLines = File.ReadAllLines(statusFilePath); Assert.Equal(2, statusLines.Length); StatusFileEntry statusEntry = JsonConvert.DeserializeObject <StatusFileEntry>(statusLines[0]); Assert.Equal(ProcessingState.Processing, statusEntry.State); Assert.Equal(WatcherChangeTypes.Created, statusEntry.ChangeType); statusEntry = JsonConvert.DeserializeObject <StatusFileEntry>(statusLines[1]); Assert.Equal(ProcessingState.Processed, statusEntry.State); Assert.Equal(WatcherChangeTypes.Created, statusEntry.ChangeType); } // Now test concurrency handling for updates by updating some files // and verifying the updates are only processed once string[] filesToUpdate = processedFiles.Take(50).Select(p => Path.Combine(testFileDir, p)).ToArray(); string item; while (!processedFiles.IsEmpty) { processedFiles.TryTake(out item); } await Task.Delay(1000); foreach (string fileToUpdate in filesToUpdate) { await Task.Delay(50); File.AppendAllText(fileToUpdate, "update"); } // wait for all files to be processed await TestHelpers.Await(() => { return(processedFiles.Count >= filesToUpdate.Length); }); Assert.Equal(filesToUpdate.Length, processedFiles.Count); Assert.Equal(expectedFiles.Count * 2, Directory.GetFiles(testFileDir).Length); // verify the status files are correct for each of the updated files foreach (string updatedFile in filesToUpdate) { string statusFilePath = processor.GetStatusFile(updatedFile); string[] statusLines = File.ReadAllLines(statusFilePath); Assert.Equal(4, statusLines.Length); StatusFileEntry statusEntry = JsonConvert.DeserializeObject <StatusFileEntry>(statusLines[0]); Assert.Equal(ProcessingState.Processing, statusEntry.State); Assert.Equal(WatcherChangeTypes.Created, statusEntry.ChangeType); statusEntry = JsonConvert.DeserializeObject <StatusFileEntry>(statusLines[1]); Assert.Equal(ProcessingState.Processed, statusEntry.State); Assert.Equal(WatcherChangeTypes.Created, statusEntry.ChangeType); statusEntry = JsonConvert.DeserializeObject <StatusFileEntry>(statusLines[2]); Assert.Equal(ProcessingState.Processing, statusEntry.State); Assert.Equal(WatcherChangeTypes.Changed, statusEntry.ChangeType); statusEntry = JsonConvert.DeserializeObject <StatusFileEntry>(statusLines[3]); Assert.Equal(ProcessingState.Processed, statusEntry.State); Assert.Equal(WatcherChangeTypes.Changed, statusEntry.ChangeType); } // Now clean up all processed files processor.CleanupProcessedFiles(); Assert.Empty(Directory.GetFiles(testFileDir)); foreach (FileListener listener in listeners) { listener.Dispose(); } }
public static IServiceCollection AddFilesServices(this IServiceCollection services, FilesOptions options) { if (options == null) { throw new ArgumentNullException(nameof(options)); } services.AddSingleton(options); services.AddSingleton(provider => provider.GridFSBucket()); services.AddSingleton <IFileService, FileService>(); return(services); }
/// <summary> /// Initializes a new instance of the <see cref="FileProcessorService"/> class. /// </summary> /// <param name="files">The files.</param> /// <param name="fileHelper">The file helper instance.</param> public FileProcessorService(IOptionsMonitor <FilesOptions> files, IFileHelper fileHelper) { filesOptions = files.CurrentValue; fileHelperInstance = fileHelper; }
public FolderHelper(IHostingEnvironment hostingEnvironment, IOptions <FilesOptions> filesOptions) { _hostingEnvironment = hostingEnvironment; _filesOptions = filesOptions.Value; }
static void Main(string[] args) { if (args.Length != 3) { Info.ShowUsage(); return; } var Conn = SqlConnet(args[0], args[1], args[2]); var setting = new Setting(Conn); var filesOptions = new FilesOptions(Conn, setting); var execOptions = new ExecOptions(Conn, setting); try { do { Console.Write("SQL> "); string str = Console.ReadLine(); if (str.ToLower() == "exit") { Conn.Close(); break; } else if (str.ToLower() == "help") { Info.ShowModuleUsage(); continue; } string[] cmdline = str.Split(new char[] { ' ' }, 3); String s = String.Empty; for (int i = 1; i < cmdline.Length; i++) { s += cmdline[i] + " "; } switch (cmdline[0].ToLower()) { case "enable_xp_cmdshell": setting.Enable_xp_cmdshell(); break; case "disable_xp_cmdshell": setting.Disable_xp_cmdshell(); break; case "xp_cmdshell": execOptions.xp_cmdshell(s); break; case "enable_ole": setting.Enable_ola(); break; case "disable_ole": setting.Disable_ole(); break; case "sp_cmdshell": execOptions.sp_cmdshell(s); break; case "upload": filesOptions.UploadFiles(cmdline[1], cmdline[2]); break; case "download": filesOptions.DownloadFiles(cmdline[2], cmdline[1]); break; default: Console.WriteLine(Batch.RemoteExec(Conn, str, true)); break; } if (!ConnectionState.Open.Equals(Conn.State)) { Console.WriteLine("[!] Disconnect...."); break; } }while (true); } catch (Exception ex) { Conn.Close(); Console.WriteLine("[!] Error log: \r\n" + ex.Message); } }
/// <summary> /// Initializes a new instance of the <see cref="FileHelper"/> class. /// </summary> /// <param name="files">The files.</param> /// <param name="validator">The validator.</param> public FileHelper(IOptionsMonitor <FilesOptions> files, IValidator validator) { _validator = validator; filesOptions = files.CurrentValue; }
/// <summary> /// Initializes a new instance of the <see cref="Validator"/> class. /// </summary> /// <param name="files">The files.</param> /// <param name="ruleProcessor">The rule processor.</param> public Validator(IOptionsMonitor <FilesOptions> files, IRuleProcessor ruleProcessor) { filesOptions = files.CurrentValue; columnsList = filesOptions.Columns; rulesList = ruleProcessor.GetRules(); }