public List <MeasurementV2> ToMeasurements(TOA5 toa5) { List <MeasurementV2> measurements = new List <MeasurementV2>(); foreach (IObservation obs in toa5.Observations) { foreach (Variable variable in toa5.Metadata.Variables) { // Skip TIMESTAMP and RECORD if (variable.FieldName == "TIMESTAMP" || variable.FieldName == "RECORD") { continue; } MeasurementV2 measurement = CreateMeasurementFromVariable( variable, obs, toa5.Metadata); if (measurement != null) { measurements.Add(measurement); } } } return(measurements); }
public void ToMeasurement_ValidDataMetV2_ReturnCorrectMeasurementsV2() { //# Arrange MapFromToa5DataTableToCafStandards map = new MapFromToa5DataTableToCafStandards(); TOA5 toa5 = LoggerNetArranger.GetToa5MeteorologyDerivedFromActualDataV2(); List <MeasurementV2> expected = LoggerNetArranger.GetMeasurementsV2DerivedFromActualDataMeteorologyV2(); CosmosDBSqlApiV2Transformer sut = new CosmosDBSqlApiV2Transformer( map, "http://files.cafltar.org/data/schema/documentDb/v2/measurement.json", "CosmosDBSqlApiTransformer", "Measurement", "CafMeteorologyEcTower", 900); //# Act var actual = sut.ToMeasurements(toa5); //# Assert Assert.Equal(expected.Count, actual.Count); Assert.True(ComparerUtil.AreMeasurementsRoughlyEqual(expected, actual)); }
public TOA5 GetTOA5(IObservation datatable) { TOA5 toa5 = new TOA5(); toa5.Metadata = GetMetadata(); toa5.Observations = GetObservations(datatable).ToList(); return(toa5); }
public TOA5 GetTOA5 <T>() where T : IObservation { TOA5 toa5 = new TOA5(); toa5.Metadata = GetMetadata(); toa5.Observations = GetObservations <T>().Cast <IObservation>().ToList(); return(toa5); }
public void GetTOA5_ValidContentV1_ReturnsCorrectTOA5() { TOA5Extractor sut = new TOA5Extractor(pathToFileWithValidContentV1); List <IObservation> expectedObservations = LoggerNetArranger.GetFluxObservationsDerivedFromActualDataV1(); TOA5 fluxTable = sut.GetTOA5(new Flux()); Assert.Equal(expectedObservations, fluxTable.Observations); //TODO: Test actual metadata }
public TOA5 GetMeteorology() { TOA5 met = new TOA5(); met.Metadata = GetMetadata(); //List<Meteorology> obs = GetObservations<Meteorology>(); met.Observations = GetObservations <Meteorology>().Cast <IObservation>().ToList(); return(met); }
public void GetTOA5_TestContentV2_ReturnsCorrectTOA5() { // Arrange TOA5 expected = LoggerNetArranger.GetToa5FluxDerivedFromTestDataV2(); TOA5Extractor sut = new TOA5Extractor( pathToFileWithTestLoggerOutputV2); // Act TOA5 actual = sut.GetTOA5(new Flux()); // Assert Assert.Equal(expected, actual); }
public void GetTOA5_ActualContentV2_ReturnsCorrectTOA5() { // Arrange TOA5 expected = LoggerNetArranger.GetToa5MeteorologyDerivedFromActualDataV2(); TOA5Extractor sut = new TOA5Extractor( pathToFileWithActualLoggerOutputV2); // Act TOA5 actual = sut.GetTOA5(new Meteorology()); // Assert Assert.Equal(expected, actual); }
public List <MeasurementV1> ToMeasurements(TOA5 meteorology) { List <MeasurementV1> measurements = new List <MeasurementV1>(); foreach (IObservation obs in meteorology.Observations) { foreach (Variable variable in meteorology.Metadata.Variables) { // Skip TIMESTAMP and RECORD if (variable.FieldName == "TIMESTAMP" || variable.FieldName == "RECORD") { continue; } MeasurementV1 measurement = CreateMeasurementFromVariable(variable, obs, meteorology.Metadata); measurements.Add(measurement); } } return(measurements); }
public static async Task Run( [BlobTrigger("ectower-cookeast/raw/Flux/{name}", Connection = "ltarcafdatastreamConnectionString")] Stream myBlob, string name, TraceWriter log, ExecutionContext context) { log.Info($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes"); //var config = new ConfigurationBuilder() // .SetBasePath(context.FunctionAppDirectory) // .AddJsonFile("local.settings.json", optional: true, reloadOnChange: true) // .AddEnvironmentVariables() // .Build(); EtlEvent etlEvent = new EtlEvent( "EtlEvent", "AzureFunction", "http://files.cafltar.org/data/schema/documentDb/v2/etlEvent.json", "CafMeteorologyEcTower", "1.1", "LoggerNetFluxToCosmosDBSqlApiMeasurementCookEast", DateTime.UtcNow); etlEvent.Outputs = null; etlEvent.Inputs.Add($"ectower-cookeast/raw/Flux/{name}"); etlEvent.Logs.Add($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes"); StreamReader reader = new StreamReader(myBlob); string contents = ""; log.Info("About to read contents"); try { contents = reader.ReadToEnd(); } catch (Exception e) { etlEvent.Logs.Add( $"Error reading Blob: {e.Message}"); } //DocumentClient client = new DocumentClient( // new Uri( // config["Values:AzureCosmosDBUri"]), // config["Values:AzureCosmosDBKey"]); DocumentClient client; try { client = new DocumentClient( new Uri( ConfigurationManager.AppSettings["AzureCosmosDBUri"]), ConfigurationManager.AppSettings["AzureCosmosDBKey"]); } catch (Exception e) { etlEvent.Logs.Add( $"Error creating DocumentClient: {e.Message}"); log.Error($"Error creating DocumentClient: {e.Message}"); throw new Exception("Error creating DocumentClient", e); } DocumentLoader loader = new DocumentLoader( client, "cafdb", "items"); log.Info("Created client and loader"); if (!String.IsNullOrEmpty(contents)) { try { log.Info("Attempting extract and transform"); TOA5Extractor extractor = new TOA5Extractor( name, contents, -8); TOA5 fluxTable = extractor.GetTOA5 <Flux>(); // TODO: Move strings and such to settings file DocumentDbMeasurementV2Transformer transformer = new DocumentDbMeasurementV2Transformer( new MapFromFluxDataTableToCafStandards(), "http://files.cafltar.org/data/schema/documentDb/v2/measurement.json", etlEvent.Id, "Measurement", "CafMeteorologyEcTower", 1800); List <MeasurementV2> measurements = transformer.ToMeasurements(fluxTable); log.Info("Attempting load"); /// Using the bulkImport sproc doesn't provide much benefit since /// most data tables will only have a few measurements with the /// same partition key. But it's better than nothing. StoredProcedureResponse <bool>[] results = await loader.LoadBulk(measurements); log.Info($"Loaded {results.Length.ToString()} measurements"); } catch (Exception e) { etlEvent.Logs.Add( $"Error in ETL pipeline: {e.Message}"); log.Error($"Error in ETL pipeline: {e.Message}"); throw new Exception("Error in ETL pipeline", e); } finally { log.Info("Loading etlEvent to db"); etlEvent.DateTimeEnd = DateTime.UtcNow; ResourceResponse <Document> result = await loader.LoadNoReplace(etlEvent); log.Info($"Result of writing EtlEvent: {result.StatusCode.ToString()}"); } log.Info("Function completed"); } }
public async Task PipeItAsync() { EtlEvent etlEvent = new EtlEvent( "EtlEvent", "AzureFunction", "http://files.cafltar.org/data/schema/documentDb/v2/etlEvent.json", "CafMeteorologyEcTower", version, functionName, DateTime.UtcNow); etlEvent.Inputs.Add(blobPath); StreamReader reader = new StreamReader(myBlob); string contents = ""; log.LogInformation("About to read contents"); try { contents = reader.ReadToEnd(); } catch (Exception e) { etlEvent.Logs.Add( $"Error reading Blob: {e.Message}"); } DocumentLoader loader = new DocumentLoader( client, "cafdb", "items"); log.LogInformation("Created loader"); if (!String.IsNullOrEmpty(contents)) { try { log.LogInformation("Attempting extract and transform"); TOA5Extractor extractor = new TOA5Extractor( name, contents, -8); TOA5 toa5 = extractor.GetTOA5(observation); CosmosDBSqlApiV2Transformer transformer = new CosmosDBSqlApiV2Transformer( new MapFromToa5DataTableToCafStandards(), "http://files.cafltar.org/data/schema/documentDb/v2/measurement.json", etlEvent.Id, "Measurement", "CafMeteorologyEcTower", timestep); List <MeasurementV2> measurements = transformer.ToMeasurements(toa5); log.LogInformation("Attempting load"); int docsLoaded = 0; int docsError = 0; foreach (MeasurementV2 measurement in measurements) { try { ResourceResponse <Document> result = await loader.LoadNoReplace(measurement); if ( result.StatusCode == HttpStatusCode.Created || result.StatusCode == HttpStatusCode.OK) { etlEvent.Outputs.Add(result.Resource.Id); docsLoaded++; } else { etlEvent.Logs.Add( $"StatusCode: {result.StatusCode} on MeasurementV2: {measurement.Id.ToString()}"); docsError++; } } catch (Exception e) { etlEvent.Logs.Add( $"Error loading {measurement.Id.ToString()} MeasurementV2: {e.Message}"); log.LogError($"Error loading MeasurementV2: {e.Message}"); docsError++; } } log.LogInformation( $"Loaded {docsLoaded.ToString()} MeasurementV2s."); log.LogInformation( $"Error loading {docsError.ToString()} MeasurementV2s."); etlEvent.Logs.Add( $"Loaded {docsLoaded.ToString()} MeasurementV2s"); etlEvent.Logs.Add( $"Error loading {docsError.ToString()} MeasurementV2s"); } catch (Exception e) { etlEvent.Logs.Add( $"Error in ETL pipeline: {e.Message}"); log.LogError($"Error in ETL pipeline: {e.Message}"); throw new Exception("Error in ETL pipeline", e); } finally { log.LogInformation("Loading etlEvent to db"); etlEvent.DateTimeEnd = DateTime.UtcNow; ResourceResponse <Document> result = await loader.LoadNoReplace(etlEvent); log.LogInformation($"Result of writing EtlEvent: {result.StatusCode.ToString()}"); } } }
private TOA5 GetMockMeteorology() { TOA5 met = new TOA5(); met.Observations = new List <IObservation>(); met.Observations.Add(new Meteorology() { TIMESTAMP = new System.DateTime(2017, 6, 20, 11, 30, 00), RECORD = 15, amb_tmpr_Avg = 4.940109, rslt_wnd_spd = 4.940109, wnd_dir_compass = 259.7, RH_Avg = 56.22676, Precipitation_Tot = 0, amb_press_Avg = 93.25672, PAR_density_Avg = 1956.598, batt_volt_Avg = 13.63667, panel_tmpr_Avg = 25.22728, std_wnd_dir = 14.26, VPD_air = 1.244421, Rn_meas_Avg = 643.2509 }); met.Metadata = new Metadata() { FileFormat = "TOA5", StationName = "LTAR_CookEast", DataLoggerType = "CR3000", SerialNumber = 6503, OperatingSystemVersion = "CR3000.Std.31", DataloggerProgramName = "CPU:DEFAULT.CR3", DataloggerProgramSignature = 13636, TableName = "LTAR_Met", Variables = new List <Variable>() { new Variable() { FieldName = "TIMESTAMP", Units = "TS", Processing = "" }, new Variable() { FieldName = "RECORD", Units = "", Processing = "" }, new Variable() { FieldName = "amb_tmpr_Avg", Units = "C", Processing = "Avg" }, new Variable() { FieldName = "rslt_wnd_spd", Units = "m/s", Processing = "Smp" }, new Variable() { FieldName = "wnd_dir_compass", Units = "degrees", Processing = "Smp" }, new Variable() { FieldName = "RH_Avg", Units = "%", Processing = "Avg" }, new Variable() { FieldName = "Precipitation_Tot", Units = "mm", Processing = "Tot" }, new Variable() { FieldName = "amb_press_Avg", Units = "kPa", Processing = "Avg" }, new Variable() { FieldName = "PAR_density_Avg", Units = "umol/(s m^2)", Processing = "Avg" }, new Variable() { FieldName = "batt_volt_Avg", Units = "V", Processing = "Avg" }, new Variable() { FieldName = "panel_tmpr_Avg", Units = "C", Processing = "Avg" }, new Variable() { FieldName = "std_wnd_dir", Units = "degrees", Processing = "Smp" }, new Variable() { FieldName = "VPD_air", Units = "kpa", Processing = "Smp" }, new Variable() { FieldName = "Rn_meas_Avg", Units = "W/m^2", Processing = "Avg" } } }; return(met); }