public static void Run( [BlobTrigger("ectower-cookeast/raw/Flux/{name}", Connection = "CookEastFluxConnectionString")] System.IO.Stream inBlob, string name, TraceWriter log) { string content; using (var reader = new StreamReader(inBlob, true)) { content = reader.ReadToEnd(); } TOA5Extractor extractor = new TOA5Extractor(name, content, -8); Tweeter tweeter = new Tweeter( ConfigurationManager.AppSettings["TwitterConsumerKey"], ConfigurationManager.AppSettings["TwitterConsumerSecret"], ConfigurationManager.AppSettings["TwitterAccessToken"], ConfigurationManager.AppSettings["TwitterAccessTokenSecret"]); StatusChecker statusChecker = new StatusChecker( extractor, tweeter); string alert = statusChecker.BroadcastStatus(); if (!string.IsNullOrEmpty(alert)) { log.Info($"Alert: {alert}"); } }
public StatusChecker( TOA5Extractor extractor, ISendAlerts alerter) { this.extractor = extractor; this.alerter = alerter; }
public void GetTOA5_ValidContentV1_ReturnsCorrectTOA5() { TOA5Extractor sut = new TOA5Extractor(pathToFileWithValidContentV1); List <IObservation> expectedObservations = LoggerNetArranger.GetFluxObservationsDerivedFromActualDataV1(); TOA5 fluxTable = sut.GetTOA5(new Flux()); Assert.Equal(expectedObservations, fluxTable.Observations); //TODO: Test actual metadata }
public void FilePathConstructor_ValidContentV1_SetsData() { //# Arrange string expectedFileName = "CookEastEcTower_Met_Raw_2017_06_20_1115.dat"; int expectedContentLength = 710; //# Act TOA5Extractor sut = new TOA5Extractor(pathToFileWithValidContentV2); //# Assert Assert.Equal(expectedFileName, sut.FileName); Assert.Equal(expectedContentLength, sut.FileContent.Length); }
public void GetTOA5_ActualContentV2_ReturnsCorrectTOA5() { // Arrange TOA5 expected = LoggerNetArranger.GetToa5MeteorologyDerivedFromActualDataV2(); TOA5Extractor sut = new TOA5Extractor( pathToFileWithActualLoggerOutputV2); // Act TOA5 actual = sut.GetTOA5(new Meteorology()); // Assert Assert.Equal(expected, actual); }
public void GetTOA5_TestContentV2_ReturnsCorrectTOA5() { // Arrange TOA5 expected = LoggerNetArranger.GetToa5FluxDerivedFromTestDataV2(); TOA5Extractor sut = new TOA5Extractor( pathToFileWithTestLoggerOutputV2); // Act TOA5 actual = sut.GetTOA5(new Flux()); // Assert Assert.Equal(expected, actual); }
public void GetObservations_AdjustedTimezoneV1_ReturnsCorrectTimes() { //# Arrange List <IObservation> actualObservations = new List <IObservation>(); //# Act TOA5Extractor sut = new TOA5Extractor(pathToFileToTestTimeZoneV2, -8); actualObservations = sut.GetObservations(new Meteorology()).ToList(); //# Assert Assert.Equal(actualObservations[0].TIMESTAMP, new DateTime(2017, 06, 20, 8, 30, 0)); Assert.Equal(actualObservations[1].TIMESTAMP, new DateTime(2017, 06, 20, 19, 30, 0)); Assert.Equal(actualObservations[2].TIMESTAMP, new DateTime(2017, 06, 21, 7, 15, 0)); }
public void GetObservations_TestContentV2_ReturnsCorrectObservations() { //# Arrange List <IObservation> expectedObservations = LoggerNetArranger.GetFluxObservationsDerivedFromTestDataV2(); List <IObservation> actualObservations = new List <IObservation>(); TOA5Extractor sut = new TOA5Extractor(pathToFileWithTestLoggerOutputV2); //# Act actualObservations = sut.GetObservations(new Flux()) .Cast <IObservation>() .ToList(); //# Assert Assert.Equal(expectedObservations[0], actualObservations[0]); }
public void GetObservations_ValidContentV1_ReturnsCorrectObservations() { //# Arrange List <IObservation> expectedObservations = LoggerNetArranger.GetFluxObservationsDerivedFromActualDataV1(); List <IObservation> actualObservations = new List <IObservation>(); TOA5Extractor sut = new TOA5Extractor(pathToFileWithValidContentV1); //# Act actualObservations = sut.GetObservations <Flux>() .Cast <IObservation>() .ToList(); //# Assert Assert.Equal(expectedObservations[0], actualObservations[0]); }
public void CheckStatus_HasNan_CreatesSingleAlert() { // Arrange var s = new FileStream(fileWithBadNAN, FileMode.Open); string contents = convertStreamToString(s); var e = new TOA5Extractor( "CookEastEcTower_Flux_Raw_2017_11_03_1300_badNAN.dat", contents, -8); var a = new MockTweeter(); StatusChecker sut = new StatusChecker(e, a); // Act var alerts = sut.CheckStatus(); // Assert Assert.Single(alerts); }
public void CheckStatus_HasNanAtOkLocations_NoAlert() { // Arrange var s = new FileStream(fileWithNANOkLocations, FileMode.Open); string contents = convertStreamToString(s); var e = new TOA5Extractor( "CookEastEcTower_Flux_Raw_2017_11_03_1300_okNAN.dat", contents, -8); var a = new MockTweeter(); StatusChecker sut = new StatusChecker(e, a); // Act var alerts = sut.CheckStatus(); // Assert Assert.Empty(alerts); }
public void GetObservations_ValidContentV1_ReturnsCorrectObservations() { //# Arrange List <IObservation> actualObservations = new List <IObservation>(); Meteorology expectedRecord = new Meteorology() { TIMESTAMP = new System.DateTime(2017, 6, 20, 11, 30, 00), RECORD = 15, amb_tmpr_Avg = 23.13316, rslt_wnd_spd = 4.940109, wnd_dir_compass = 259.7, RH_Avg = 56.22676, Precipitation_Tot = 0, amb_press_Avg = 93.25672, PAR_density_Avg = 1956.598, batt_volt_Avg = 13.63667, panel_tmpr_Avg = 25.22728, std_wnd_dir = 14.26, VPD_air = 1.244421, Rn_meas_Avg = 643.2509 }; //# Act TOA5Extractor sut = new TOA5Extractor(pathToFileWithValidContentV2); actualObservations = sut.GetObservations(new Meteorology()).ToList(); //# Assert // TODO: Override obj.Equals for better test Assert.Equal(expectedRecord.amb_tmpr_Avg, actualObservations[1] .GetType() .GetProperty("amb_tmpr_Avg") .GetValue(actualObservations[1], null)); Assert.Equal(expectedRecord.RECORD, actualObservations[1].RECORD); Assert.Equal(expectedRecord.Rn_meas_Avg, actualObservations[1] .GetType() .GetProperty("Rn_meas_Avg") .GetValue(actualObservations[1], null)); }
public void CheckStatus_HasBadValueAtSecondRowAndNAN_CreatesExpectedAlertString() { // Arrange var s = new FileStream(fileWithBadDataAtSecondRowAndNAN, FileMode.Open); string contents = convertStreamToString(s); var e = new TOA5Extractor( "CookEastEcTower_Flux_Raw_2017_11_03_1300_2linesBadCO2BadNAN.dat", contents, -8); var a = new MockTweeter(); StatusChecker sut = new StatusChecker(e, a); string expected = "[E] CookEastEcTower_Flux_Raw_2017_11_03_1300_2linesBadCO2BadNAN.dat: Null values > 3.\r\n[W] CookEastEcTower_Flux_Raw_2017_11_03_1300_2linesBadCO2BadNAN.dat: CO2_sig_strgth_Min < 0.8 (0.7)."; // Act var alerts = sut.CheckStatus(); // Assert Assert.Equal(2, alerts.Count); Assert.Equal( expected, string.Join("\r\n", alerts)); }
public void GetObservations_ContentWithNanV1_DoesNotErrorSetsNull() { // Arrange string dataPath = @"Nodes/LoggerNet/Assets/CookEastEcTower_Met_Raw_2017_10_24_0615.dat"; TOA5Extractor sut = new TOA5Extractor(dataPath); // Act List <IObservation> actualObs = sut.GetObservations(new Meteorology()) .Cast <IObservation>() .ToList(); // Assert // //observation.GetType().GetProperty(variable.FieldName).GetValue(observation, null) Assert.Null(actualObs[0] .GetType() .GetProperty("VPD_air") .GetValue(actualObs[0], null)); Assert.Null(actualObs[0] .GetType() .GetProperty("RH_Avg") .GetValue(actualObs[0], null)); }
public static async Task Run( [BlobTrigger("ectower-cookeast/raw/Flux/{name}", Connection = "ltarcafdatastreamConnectionString")] Stream myBlob, string name, TraceWriter log, ExecutionContext context) { log.Info($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes"); //var config = new ConfigurationBuilder() // .SetBasePath(context.FunctionAppDirectory) // .AddJsonFile("local.settings.json", optional: true, reloadOnChange: true) // .AddEnvironmentVariables() // .Build(); EtlEvent etlEvent = new EtlEvent( "EtlEvent", "AzureFunction", "http://files.cafltar.org/data/schema/documentDb/v2/etlEvent.json", "CafMeteorologyEcTower", "1.1", "LoggerNetFluxToCosmosDBSqlApiMeasurementCookEast", DateTime.UtcNow); etlEvent.Outputs = null; etlEvent.Inputs.Add($"ectower-cookeast/raw/Flux/{name}"); etlEvent.Logs.Add($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes"); StreamReader reader = new StreamReader(myBlob); string contents = ""; log.Info("About to read contents"); try { contents = reader.ReadToEnd(); } catch (Exception e) { etlEvent.Logs.Add( $"Error reading Blob: {e.Message}"); } //DocumentClient client = new DocumentClient( // new Uri( // config["Values:AzureCosmosDBUri"]), // config["Values:AzureCosmosDBKey"]); DocumentClient client; try { client = new DocumentClient( new Uri( ConfigurationManager.AppSettings["AzureCosmosDBUri"]), ConfigurationManager.AppSettings["AzureCosmosDBKey"]); } catch (Exception e) { etlEvent.Logs.Add( $"Error creating DocumentClient: {e.Message}"); log.Error($"Error creating DocumentClient: {e.Message}"); throw new Exception("Error creating DocumentClient", e); } DocumentLoader loader = new DocumentLoader( client, "cafdb", "items"); log.Info("Created client and loader"); if (!String.IsNullOrEmpty(contents)) { try { log.Info("Attempting extract and transform"); TOA5Extractor extractor = new TOA5Extractor( name, contents, -8); TOA5 fluxTable = extractor.GetTOA5 <Flux>(); // TODO: Move strings and such to settings file DocumentDbMeasurementV2Transformer transformer = new DocumentDbMeasurementV2Transformer( new MapFromFluxDataTableToCafStandards(), "http://files.cafltar.org/data/schema/documentDb/v2/measurement.json", etlEvent.Id, "Measurement", "CafMeteorologyEcTower", 1800); List <MeasurementV2> measurements = transformer.ToMeasurements(fluxTable); log.Info("Attempting load"); /// Using the bulkImport sproc doesn't provide much benefit since /// most data tables will only have a few measurements with the /// same partition key. But it's better than nothing. StoredProcedureResponse <bool>[] results = await loader.LoadBulk(measurements); log.Info($"Loaded {results.Length.ToString()} measurements"); } catch (Exception e) { etlEvent.Logs.Add( $"Error in ETL pipeline: {e.Message}"); log.Error($"Error in ETL pipeline: {e.Message}"); throw new Exception("Error in ETL pipeline", e); } finally { log.Info("Loading etlEvent to db"); etlEvent.DateTimeEnd = DateTime.UtcNow; ResourceResponse <Document> result = await loader.LoadNoReplace(etlEvent); log.Info($"Result of writing EtlEvent: {result.StatusCode.ToString()}"); } log.Info("Function completed"); } }
public void GetMetadata_ValidContentV1_ReturnsCorrectMetadata() { //# Arrange Metadata actualMetadata = new Metadata(); Metadata expectedMetadata = new Metadata() { FileFormat = "TOA5", StationName = "LTAR_CookEast", DataLoggerType = "CR3000", SerialNumber = 6503, OperatingSystemVersion = "CR3000.Std.31", DataloggerProgramName = "CPU:DEFAULT.CR3", DataloggerProgramSignature = 13636, TableName = "LTAR_Met", Variables = new List <Variable>() { new Variable() { FieldName = "TIMESTAMP", Units = "TS", Processing = "" }, new Variable() { FieldName = "RECORD", Units = "", Processing = "" }, new Variable() { FieldName = "amb_tmpr_Avg", Units = "C", Processing = "Avg" }, new Variable() { FieldName = "rslt_wnd_spd", Units = "m/s", Processing = "Smp" }, new Variable() { FieldName = "wnd_dir_compass", Units = "degrees", Processing = "Smp" }, new Variable() { FieldName = "RH_Avg", Units = "%", Processing = "Avg" }, new Variable() { FieldName = "Precipitation_Tot", Units = "mm", Processing = "Tot" }, new Variable() { FieldName = "amb_press_Avg", Units = "kPa", Processing = "Avg" }, new Variable() { FieldName = "PAR_density_Avg", Units = "umol/(s m^2)", Processing = "Avg" }, new Variable() { FieldName = "batt_volt_Avg", Units = "V", Processing = "Avg" }, new Variable() { FieldName = "panel_tmpr_Avg", Units = "C", Processing = "Avg" }, new Variable() { FieldName = "std_wnd_dir", Units = "degrees", Processing = "Smp" }, new Variable() { FieldName = "VPD_air", Units = "kpa", Processing = "Smp" }, new Variable() { FieldName = "Rn_meas_Avg", Units = "W/m^2", Processing = "Avg" } } }; //# Act TOA5Extractor sut = new TOA5Extractor(pathToFileWithValidContentV2); actualMetadata = sut.GetMetadata(); //# Assert // TODO: Override obj.Equals for better testing Assert.Equal(expectedMetadata.FileFormat, actualMetadata.FileFormat); Assert.Equal(expectedMetadata.TableName, actualMetadata.TableName); Assert.Equal(expectedMetadata.SerialNumber, actualMetadata.SerialNumber); Assert.Equal( expectedMetadata.Variables.Find(mv => mv.FieldName == "TIMESTAMP").Processing, actualMetadata.Variables.Find(mv => mv.FieldName == "TIMESTAMP").Processing); Assert.Equal( expectedMetadata.Variables.Find(mv => mv.FieldName == "amb_tmpr_Avg").Units, actualMetadata.Variables.Find(mv => mv.FieldName == "amb_tmpr_Avg").Units); Assert.Equal( expectedMetadata.Variables.Find(mv => mv.FieldName == "Rn_meas_Avg").Units, actualMetadata.Variables.Find(mv => mv.FieldName == "Rn_meas_Avg").Units); }
public async Task PipeItAsync() { EtlEvent etlEvent = new EtlEvent( "EtlEvent", "AzureFunction", "http://files.cafltar.org/data/schema/documentDb/v2/etlEvent.json", "CafMeteorologyEcTower", version, functionName, DateTime.UtcNow); etlEvent.Inputs.Add(blobPath); StreamReader reader = new StreamReader(myBlob); string contents = ""; log.LogInformation("About to read contents"); try { contents = reader.ReadToEnd(); } catch (Exception e) { etlEvent.Logs.Add( $"Error reading Blob: {e.Message}"); } DocumentLoader loader = new DocumentLoader( client, "cafdb", "items"); log.LogInformation("Created loader"); if (!String.IsNullOrEmpty(contents)) { try { log.LogInformation("Attempting extract and transform"); TOA5Extractor extractor = new TOA5Extractor( name, contents, -8); TOA5 toa5 = extractor.GetTOA5(observation); CosmosDBSqlApiV2Transformer transformer = new CosmosDBSqlApiV2Transformer( new MapFromToa5DataTableToCafStandards(), "http://files.cafltar.org/data/schema/documentDb/v2/measurement.json", etlEvent.Id, "Measurement", "CafMeteorologyEcTower", timestep); List <MeasurementV2> measurements = transformer.ToMeasurements(toa5); log.LogInformation("Attempting load"); int docsLoaded = 0; int docsError = 0; foreach (MeasurementV2 measurement in measurements) { try { ResourceResponse <Document> result = await loader.LoadNoReplace(measurement); if ( result.StatusCode == HttpStatusCode.Created || result.StatusCode == HttpStatusCode.OK) { etlEvent.Outputs.Add(result.Resource.Id); docsLoaded++; } else { etlEvent.Logs.Add( $"StatusCode: {result.StatusCode} on MeasurementV2: {measurement.Id.ToString()}"); docsError++; } } catch (Exception e) { etlEvent.Logs.Add( $"Error loading {measurement.Id.ToString()} MeasurementV2: {e.Message}"); log.LogError($"Error loading MeasurementV2: {e.Message}"); docsError++; } } log.LogInformation( $"Loaded {docsLoaded.ToString()} MeasurementV2s."); log.LogInformation( $"Error loading {docsError.ToString()} MeasurementV2s."); etlEvent.Logs.Add( $"Loaded {docsLoaded.ToString()} MeasurementV2s"); etlEvent.Logs.Add( $"Error loading {docsError.ToString()} MeasurementV2s"); } catch (Exception e) { etlEvent.Logs.Add( $"Error in ETL pipeline: {e.Message}"); log.LogError($"Error in ETL pipeline: {e.Message}"); throw new Exception("Error in ETL pipeline", e); } finally { log.LogInformation("Loading etlEvent to db"); etlEvent.DateTimeEnd = DateTime.UtcNow; ResourceResponse <Document> result = await loader.LoadNoReplace(etlEvent); log.LogInformation($"Result of writing EtlEvent: {result.StatusCode.ToString()}"); } } }
public static async Task Run( [EventGridTrigger] EventGridEvent eventGridEvent, ILogger log) { log.LogInformation(eventGridEvent.Subject); string eventCallerName = "CafMeteorologyECTowerFcns.DataLakeTransientToRaw()"; string eventCallerVersion = "v0.1.7"; log.LogInformation(eventCallerVersion); EtlEventService etlEventService = new EtlEventService( eventCallerName, eventCallerVersion, "AzureFunction"); etlEventService.LogInformation( eventCallerName, eventCallerVersion, eventGridEvent.ToString()); etlEventService.AddInput($"EventGridEvent.Subject: {eventGridEvent.Subject}"); etlEventService.LogInformation( eventCallerName, eventCallerVersion, $"EventGridEvent.Data: {eventGridEvent.Data}"); // Authenticate the Function for access to blob containers string objectId = Environment.GetEnvironmentVariable("FUNCTION_OBJECT_ID"); ManagedIdentityCredential credential = new ManagedIdentityCredential(objectId); // Read parameters string OUTPUT_CONTAINER = Environment.GetEnvironmentVariable("OUTPUT_CONTAINER"); string PROJECT_ID = Environment.GetEnvironmentVariable("PROJECT_ID"); string DATALAKE_ENDPOINT = Environment.GetEnvironmentVariable("DATALAKE_ENDPOINT"); if (eventGridEvent.EventType != "Microsoft.Storage.BlobCreated") { string msg = "EventType not BlobCreated, aborting"; etlEventService.LogError( eventCallerName, eventCallerVersion, msg); throw new Exception(msg); } // Get info from the event log.LogInformation("Parsing Event"); JsonDocument json = JsonDocument.Parse(eventGridEvent.Data.ToString()); string apiCall = json.RootElement.GetProperty("api").GetString(); log.LogInformation($"api: {apiCall}"); if (!(apiCall == "FlushWithClose" | apiCall == "PutBlob" | apiCall == "PutBlockList" | apiCall == "CopyBlob")) { string msg = "EventGridEvent api not completely committed, aborting"; log.LogInformation(msg); return; } try { string inputBlobUri = json.RootElement .GetProperty("url") .GetString(); BlobUriBuilder inputBlobUriBuilder = new BlobUriBuilder( new Uri(inputBlobUri)); // Get input blob contents log.LogInformation("Creating blob container client"); var blobContainerClient = new BlobContainerClient( new Uri( $"https://{inputBlobUriBuilder.Host}/{inputBlobUriBuilder.BlobContainerName}"), credential); var inputBlobClient = blobContainerClient.GetBlobClient(inputBlobUriBuilder.BlobName); if (!inputBlobClient.Exists()) { log.LogInformation("Blob does not exist, exiting"); return; } log.LogInformation("Found blob, downloading content"); BlobDownloadInfo download = await inputBlobClient.DownloadAsync(); string blobContent; using (StreamReader reader = new StreamReader(download.Content)) blobContent = await reader.ReadToEndAsync(); string blobName = Path.GetFileName(new Uri(inputBlobUri).AbsolutePath); log.LogInformation($"Blob length: {blobContent.Length}"); if (blobContent.Length <= 0) { log.LogInformation("Blob is empty, exiting"); return; } // Get metadata from input blob log.LogInformation("Parsing Blob into TOA5 metadata"); TOA5Extractor extractor = new TOA5Extractor( blobName, blobContent, -8); Metadata blobMetadata = extractor.GetMetadata(); string outputBlobDataset = GetOutputDatasetName(blobMetadata); string outputBlobDirPath = GetOutputSubDirPath(blobName); // Move blob log.LogInformation("Moving blob"); AzureDataLakeService dataLakeService = new AzureDataLakeService( DATALAKE_ENDPOINT, credential); string outputBlobPath = $"{PROJECT_ID}/{outputBlobDataset}/{outputBlobDirPath}/{blobName}"; log.LogInformation(outputBlobPath); string outputUri = await dataLakeService.MoveBlob( inputBlobUriBuilder.BlobContainerName, inputBlobUriBuilder.BlobName, OUTPUT_CONTAINER, outputBlobPath, etlEventService); log.LogInformation("Blob moved"); etlEventService.AddOutput(outputUri); } catch (XmlException xmlException) { log.LogError(xmlException.Message); } catch (Exception e) { etlEventService.LogError( eventCallerName, eventCallerVersion, $"Exception occured: {e}"); throw new Exception("Error in function", e); } finally { // Write EtlEvent EtlEventServiceConfig etlEventServiceConfig = new EtlEventServiceConfig() { Zone = OUTPUT_CONTAINER, Project = PROJECT_ID, Endpoint = new Uri(DATALAKE_ENDPOINT), Credential = credential }; log.LogInformation("Writing EtlEvent"); string etlEventUri = await etlEventService.WriteAsync(etlEventServiceConfig); log.LogInformation($"Wrote EtlEvent to: {etlEventUri}"); } }