public static MvcOptions ConfigureCsvFormatter([NotNull] this MvcOptions thisValue, [NotNull] Action <CsvConfiguration> configureCsv) { thisValue.RespectBrowserAcceptHeader = true; CsvConfiguration settings = new CsvConfiguration(CultureInfo.InvariantCulture); configureCsv(settings); CsvInputFormatter input = thisValue.InputFormatters.OfType <CsvInputFormatter>() .FirstOrDefault(); if (input == null) { input = new CsvInputFormatter(settings); thisValue.InputFormatters.Add(input); } CsvOutputFormatter output = thisValue.OutputFormatters.OfType <CsvOutputFormatter>() .FirstOrDefault(); if (output == null) { output = new CsvOutputFormatter(settings); thisValue.OutputFormatters.Add(output); } thisValue.FormatterMappings.SetMediaTypeMappingForFormat("csv", MediaTypeHeaderValues.ApplicationCsv); return(thisValue); }
static OutputFormatterTest() { JsonFormatter = new(); YamlFormatter = new(); CsvFormatter = new(); DateTime date = new(2021, 11, 21); string sha = "da39a3ee5e6b4b0d3255bfef95601890afd80709"; LibYearResult libYearResult = new(); libYearResult.Add(new LibYearPackageResult("polyglot", "0.3.3", new DateTime(2011, 11, 01), "0.3.3", new DateTime(2011, 11, 01), 0.0, false, false)); s_metricsResultTestData = new(date, sha, libYearResult); s_metricsResultListTestData = new List <MetricsResult>() { s_metricsResultTestData, s_metricsResultTestData }; }
public void Test_CsvOutputFormatter_CanWrite() { // Arrange var options = new CsvFormatterOptions { CsvDelimiter = ",", UseSingleLineHeaderInCsv = true }; var instance = new CsvOutputFormatter(options); // Expected output after conversion. var content = $"HeaderOne,HeaderTwo,HeaderThree{Environment.NewLine}1,2,3{Environment.NewLine}4,5,6"; // Mock contexts for using with the output formatter (along with object that will be converted to Csv). var mockHttpContext = FakeHttpContext.GetResponseHttpContext(Encoding.ASCII.GetBytes(content), "text/cv"); var mockContext = new FakeOutputFormatterContext(mockHttpContext, (s, e) => new StreamWriter(new MemoryStream()), typeof(List <TestCsvParsed>), new List <TestCsvParsed>() { new TestCsvParsed { HeaderOne = "1", HeaderTwo = "2", HeaderThree = "3" }, new TestCsvParsed { HeaderOne = "4", HeaderTwo = "5", HeaderThree = "6" } }); // Act instance.WriteResponseBodyAsync(mockContext).GetAwaiter().GetResult(); // Assert instance.ContentType.Should().Be("text/csv"); instance.CanWriteResult(mockContext).Should().BeTrue(); instance.SupportedMediaTypes.Should().BeEquivalentTo(new MediaTypeCollection() { "text/csv" }); Assert.Throws <ArgumentNullException>(() => instance.CanWriteTypeInternal(null)); StreamReader reader = new StreamReader(mockContext.HttpContext.Response.Body); string strResponse = reader.ReadToEnd(); strResponse.Should().Be(content); }
static OutputFormatterTest() { JsonFormatter = new JsonOutputFormatter(); YamlFormatter = new YamlOutputFormatter(); CsvFormatter = new CsvOutputFormatter(); var date = new DateTime(2021, 11, 21); var sha = "da39a3ee5e6b4b0d3255bfef95601890afd80709"; var libYearResult = new LibYearResult { new("polyglot", "0.3.3", new DateTime(2011, 11, 01), "0.3.3", new DateTime(2011, 11, 01), 0.0, false, false) }; s_metricsResultTestData = new MetricsResult(date, sha, libYearResult); s_metricsResultListTestData = new List <MetricsResult> { s_metricsResultTestData, s_metricsResultTestData }; }
public async Task <string> OpenAsync(CancellationToken cancellationToken) { var listener = new EventHubsListener(_configuration); var parser = new EventHubListenerMessageJsonParser(); var builder = new MessageRouterBuilder(); var clusteringSerializer = new CsvOutputFormatter("id", "type", "version", "enqueueTimeUtc", "gameSessionId", "lat", "lon", "geoHash", "geoHashPrecision", "geoHashCenterLat", "geoHashCenterLon", "rnd"); var clusteringDlsOutputManager = new DataLakeStoreOutputManager( clusteringSerializer, new PipelineDateFilePathAlgorithm(newFileOption: NewFileNameOptions.Every5Minutes), _serviceClientCredentials, _subscription, _dataLakeName); var clusteringConsoleOutputManager = new ConsoleOutputManager(clusteringSerializer); builder.Pipeline("clustering") .HandlesMessageType("geo-location", "1.0.0") .HandlesMessageType("geo-location", "1.0.1") .AddHandler(new RandomIntMessageHandler()) .OutputTo(clusteringConsoleOutputManager, clusteringDlsOutputManager); // Build all pipelines var router = builder.Build(); // Attach the differeing parts of the message processor together _messageProcessor = new MessageProcessor <EventHubListenerMessage>(listener, parser, router); // The following method will never exit //await messageProcessor.ProcessAndBlockAsync(); return(_configuration.EventHubPath); }
public async Task RunAsync() { // Check that all configurations are set before continuing if (!Config.Check()) { // Exiting due to missing configuration Console.WriteLine("Press any key to continue"); Console.ReadKey(true); return; } // Authenticate against Azure AD once and re-use for all needed purposes var serviceClientCretentials = await ApplicationTokenProvider.LoginSilentAsync(Config.Root[Config.NAH_AAD_Domain], new ClientCredential(Config.Root[Config.NAH_AAD_CLIENTID], Config.Root[Config.NAH_AAD_CLIENTSECRET])); // Setup Listener. This will be the same for all pipelines we are building. var listenerConfig = new EventHubsListenerConfiguration { EventHubConnectionString = Config.Root[Config.NAH_EHLISTENER_CONNECTIONSTRING], EventHubPath = Config.Root[Config.NAH_EHLISTENER_EVENTHUBPATH], ConsumerGroupName = Config.Root[Config.NAH_EHLISTENER_CONSUMERGROUP], StorageConnectionString = Config.Root[Config.NAH_EHLISTENER_STORAGECONNECTIONSTRING], LeaseContainerName = Config.Root[Config.NAH_EHLISTENER_LEASECONTAINERNAME] }; var listener = new EventHubsListener(listenerConfig); // Setup Message Parser. By default we are using Nether JSON Messages // Setting up parser that knows how to parse those messages. var parser = new EventHubListenerMessageJsonParser(new ConsoleCorruptMessageHandler()) { AllowDbgEnqueuedTime = true }; // User a builder to create routing infrastructure for messages and the pipelines var builder = new MessageRouterBuilder(); var filePathAlgorithm = new PipelineDateFilePathAlgorithm(newFileOption: NewFileNameOptions.Every3Hours); // Setting up "Geo Clustering Recipe" var clusteringSerializer = new CsvOutputFormatter("id", "type", "version", "enqueuedTimeUtc", "gameSession", "lat", "lon", "geoHash", "geoHashPrecision", "geoHashCenterLat", "geoHashCenterLon", "geoHashCenterDist", "rnd"); builder.Pipeline("clustering") .HandlesMessageType("geo-location", "1.0.0") .AddHandler(new GeoHashMessageHandler { CalculateGeoHashCenterCoordinates = true }) .AddHandler(new RandomIntMessageHandler()) .OutputTo(new ConsoleOutputManager(clusteringSerializer) , new FileOutputManager(clusteringSerializer, filePathAlgorithm, Config.Root[Config.NAH_FILEOUTPUTMANAGER_LOCALDATAFOLDER]) , new DataLakeStoreOutputManager( clusteringSerializer, filePathAlgorithm, serviceClientCretentials, Config.Root[Config.NAH_AZURE_SUBSCRIPTIONID], Config.Root[Config.NAH_AZURE_DLSOUTPUTMANAGER_ACCOUNTNAME]) ); // Setting up "Daily Active Users Recipe" var dauSerializer = new CsvOutputFormatter("id", "type", "version", "enqueuedTimeUtc", "gameSession", "gamerTag"); builder.Pipeline("dau") .HandlesMessageType("session-start", "1.0.0") .OutputTo(new ConsoleOutputManager(dauSerializer) , new FileOutputManager(dauSerializer, filePathAlgorithm, Config.Root[Config.NAH_FILEOUTPUTMANAGER_LOCALDATAFOLDER]) , new DataLakeStoreOutputManager( dauSerializer, filePathAlgorithm, serviceClientCretentials, Config.Root[Config.NAH_AZURE_SUBSCRIPTIONID], Config.Root[Config.NAH_AZURE_DLSOUTPUTMANAGER_ACCOUNTNAME]) ); var sessionSerializer = new CsvOutputFormatter("id", "type", "version", "enqueuedTimeUtc", "gameSession"); builder.Pipeline("sessions") .HandlesMessageType("heartbeat", "1.0.0") .OutputTo(new ConsoleOutputManager(sessionSerializer) , new FileOutputManager(sessionSerializer, filePathAlgorithm, Config.Root[Config.NAH_FILEOUTPUTMANAGER_LOCALDATAFOLDER]) , new DataLakeStoreOutputManager( sessionSerializer, filePathAlgorithm, serviceClientCretentials, Config.Root[Config.NAH_AZURE_SUBSCRIPTIONID], Config.Root[Config.NAH_AZURE_DLSOUTPUTMANAGER_ACCOUNTNAME]) ); builder.DefaultPipeline .AddHandler(new RandomIntMessageHandler()) .OutputTo(new ConsoleOutputManager(new CsvOutputFormatter { IncludeHeaders = false })); // Build all pipelines var router = builder.Build(); // Attach the differeing parts of the message processor together var messageProcessor = new MessageProcessor<EventHubListenerMessage>(listener, parser, router); // The following method will never exit await messageProcessor.ProcessAndBlockAsync(); }
private static void Main(string[] args) { Greet(); SetupConfigurationProviders(); // Check that all configurations are set before continuing var configStatus = CheckConfigurationStatus( NAH_EHListener_ConnectionString, NAH_EHListener_EventHubPath, NAH_EHListener_ConsumerGroup, NAH_EHListener_StorageConnectionString, NAH_EHListener_LeaseContainerName, NAH_AAD_Domain, NAH_AAD_ClientId, NAH_AAD_ClientSecret, NAH_Azure_SubscriptionId, NAH_Azure_DLSOutputManager_AccountName); if (configStatus != ConfigurationStatus.Ok) { // Exiting due to missing configuration Console.WriteLine("Press any key to continue"); Console.ReadKey(true); return; } // Setup Listener. This will be the same for all pipelines we are building. var listenerConfig = new EventHubsListenerConfiguration { EventHubConnectionString = s_configuration[NAH_EHListener_ConnectionString], EventHubPath = s_configuration[NAH_EHListener_EventHubPath], ConsumerGroupName = s_configuration[NAH_EHListener_ConsumerGroup], StorageConnectionString = s_configuration[NAH_EHListener_StorageConnectionString], LeaseContainerName = s_configuration[NAH_EHListener_LeaseContainerName] }; var listener = new EventHubsListener(listenerConfig); // Setup Message Parser. By default we are using Nether JSON Messages // Setting up parser that knows how to parse those messages. var parser = new EventHubListenerMessageJsonParser(); // User a builder to create routing infrastructure for messages and the pipelines var builder = new MessageRouterBuilder(); // Setting up "Geo Clustering Recipe" var clusteringSerializer = new CsvOutputFormatter("id", "type", "version", "enqueueTimeUtc", "gameSessionId", "lat", "lon", "geoHash", "geoHashPrecision", "geoHashCenterLat", "geoHashCenterLon"); var clusteringDlsOutputManager = new DataLakeStoreOutputManager( clusteringSerializer, new PipelineDateFilePathAlgorithm(newFileOption: NewFileNameOptions.Every5Minutes), domain: s_configuration[NAH_AAD_Domain], clientId: s_configuration[NAH_AAD_ClientId], clientSecret: s_configuration[NAH_AAD_ClientSecret], subscriptionId: s_configuration[NAH_Azure_SubscriptionId], adlsAccountName: s_configuration[NAH_Azure_DLSOutputManager_AccountName]); var clusteringConsoleOutputManager = new ConsoleOutputManager(clusteringSerializer); builder.Pipeline("clustering") .HandlesMessageType("geo-location", "1.0.0") .HandlesMessageType("geo-location", "1.0.1") .AddHandler(new GeoHashMessageHandler { CalculateGeoHashCenterCoordinates = true }) .OutputTo(clusteringConsoleOutputManager, clusteringDlsOutputManager); // Build all pipelines var router = builder.Build(); // Attach the differeing parts of the message processor together var messageProcessor = new MessageProcessor <EventHubListenerMessage>(listener, parser, router); // Run in an async context since main method is not allowed to be marked as async Task.Run(async() => { await messageProcessor.ProcessAndBlockAsync(); }).GetAwaiter().GetResult(); }