public static void AddAllDescriptionsToDatabase(string fileDirectory, [Optional] IntervalHierarchyNames columnIdentifiers) { FileCollection fileCollection = new FileCollection(); fileCollection.AddFiles(fileDirectory, "*.csv"); foreach (var file in fileCollection.Filenames) { Log.Information($"{file}: Trying to add descriptions the database"); try { var x = Task.Run(async() => await UploadDescriptionsFromFileToDatabaseAsync(file, columnIdentifiers).ConfigureAwait(true)); x.Wait(); Log.Information($"{file}: Successfully added descriptions the database"); } catch (Exception ex) { Log.Warning($"{file}: Could not add descriptions to database"); Log.Warning(ex.Message); Log.Warning(ex.StackTrace); } Thread.Sleep(1000); } }
public static IODPDataTable ImportDataTableFromFile(string filePath, IntervalHierarchyNames hierarchy) { var dataTableReader = new CSVReader(); dataTableReader.ReadPath = filePath; using (DataTable dataTable = dataTableReader.Read()) { return(new IODPDataTable(dataTable, hierarchy)); } }
public void ParseSectionInfoFromDataTable(DataTable sectionsDatatable, IntervalHierarchyNames hierarchyNames) { foreach (DataRow row in sectionsDatatable.Rows) { try { IntervalHierarchyValues values = Importer.GetHierarchyValuesFromDataRow(row, hierarchyNames); SectionInfo section = new SectionInfo(values); Sections.Add(section); } catch (Exception) { throw new Exception("Error parsing SectionInfo from data row"); } } }
public static void AddSampleIDColumnToFileAndExport(string path, string newPath) { //Impor a file: CSVReader reader = new CSVReader(); reader.ReadPath = path; using (DataTable importTable = reader.Read()) { IntervalHierarchyNames columnNames = new IntervalHierarchyNames() { Expedition = "Exp", Site = "Site", Hole = "Hole", Core = "Core", Type = "Type", Section = "Section", Half = "A/W", }; IODPDataTable iODPDataTable = new IODPDataTable(importTable, columnNames); if (iODPDataTable.DataTable.Columns.Contains("Sample") || iODPDataTable.DataTable.Columns.Contains("Label ID")) { return; } if (!iODPDataTable.DataTable.Columns.Contains("Sample")) { iODPDataTable.DataTable.Columns.Add("Sample").SetOrdinal(1); } foreach (DataRow row in importTable.Rows) { row.BeginEdit(); row["Sample"] = $"{row[iODPDataTable.ExpeditionColumn]}" + $"-{row[iODPDataTable.SiteColumn]}{row[iODPDataTable.HoleColumn]}" + $"-{row[iODPDataTable.CoreColumn]}{row[iODPDataTable.TypeColumn]}" + $"-{row[iODPDataTable.SectionColumn]}" + $"-{row[columnNames.Half]}"; row.EndEdit(); } //Export File Importer.ExportDataTableAsNewFile(newPath, importTable); } }
public IODPDataTable(DataTable dataTable, IntervalHierarchyNames hierarchy) { DataTable = dataTable; ExpeditionColumn = hierarchy.Expedition; SiteColumn = hierarchy.Site; HoleColumn = hierarchy.Hole; CoreColumn = hierarchy.Core; TypeColumn = hierarchy.Type; SectionColumn = hierarchy.Section; TopOffsetColumn = hierarchy.TopOffset; BottomOffsetColumn = hierarchy.BottomOffset; SampleIDColumn = hierarchy.SampleID; }
/// <summary> /// Gets a collection of LithologicDescriptions from a corrected file. /// </summary> /// <param name="filename"></param> /// <param name="columnIdentifiers"></param> /// <returns></returns> public static async Task <ICollection <LithologicDescription> > GetDescriptionsFromFileAsync(string filename, [Optional] IntervalHierarchyNames columnIdentifiers) { columnIdentifiers = columnIdentifiers ?? new IntervalHierarchyNames() { Expedition = "Expedition_VP", Site = "Site_VP", Hole = "Hole_VP", Core = "Core_VP", Type = "Type_VP", Section = "Section_VP", Half = "SectionHalf_VP", TopOffset = "TopOffset_VP", BottomOffset = "BottomOffset_VP", ArchiveTextID = "ArchiveSectionTextID_VP", WorkingTextID = "WorkingSectionTextID_VP", ParentTextID = "ParentSectionTextID_VP" }; IODPDataTable iODPDataTable = Importer.ImportDataTableFromFile(filename, columnIdentifiers); ICollection <LithologicDescription> descriptions = new HashSet <LithologicDescription>(); using (DescDBContext dbContext = new DescDBContext()) { try { foreach (DataRow row in iODPDataTable.DataTable.Rows) { SectionInfo section = new SectionInfo(Importer.GetHierarchyValuesFromDataRow(row, columnIdentifiers)); LithologicDescription description = new LithologicDescription(); // description.SectionInfo = section; description.SectionInfo = await DatabaseWorkflowHandler.GetSectionInfoFromDatabaseForIntervalAsync(dbContext, section).ConfigureAwait(true); description.LithologicID = row["LithologicID_VP"].ToString(); description.DataRow = row; description.DescriptionReport = row["Filename_VP"].ToString();; description.StartOffset = double.TryParse(row[columnIdentifiers.TopOffset].ToString(), out double startOffset) ? startOffset : -1; description.EndOffset = double.TryParse(row[columnIdentifiers.BottomOffset].ToString(), out double endOffset) ? endOffset : -1; descriptions.Add(description); } } catch (Exception) { throw new Exception("Error creating lithologic description from data row"); } } return(descriptions); }
private static void AddDataToHierarchyColumns(IODPDataTable IODPDataTable, string fileName, DataTable allSectionsDataTable) { int rowNumber = 1; IntervalHierarchyNames sectionTableColumnNames = new IntervalHierarchyNames() { Expedition = "Exp", Site = "Site", Hole = "Hole", Core = "Core", Type = "Type", Section = "Sect" }; foreach (DataRow row in IODPDataTable.DataTable.Rows) { LithologicDescription description = new LithologicDescription(row[IODPDataTable.SampleIDColumn].ToString()); double parsedOffset = 0; Importer.StartOffsetValuesAreValid(row, IODPDataTable, ref parsedOffset); description.StartOffset = parsedOffset; Importer.EndOffsetValuesAreValid(row, IODPDataTable, ref parsedOffset); description.EndOffset = parsedOffset; LithologicIDGenerator idGenerator = new LithologicIDGenerator(); var textids = GetSectionTextIDsForDescription(allSectionsDataTable, description, sectionTableColumnNames); try { var descriptionID = idGenerator.GenerateID(description); row.BeginEdit(); row["Filename_VP"] = fileName; row["LithologicID_VP"] = descriptionID; row["ArchiveSectionTextID_VP"] = textids.Archive; row["WorkingSectionTextID_VP"] = textids.Working; row["ParentSectionTextID_VP"] = textids.Parent; row["Expedition_VP"] = description.SectionInfo.Expedition; row["Site_VP"] = description.SectionInfo.Site; row["Hole_VP"] = description.SectionInfo.Hole; row["Core_VP"] = description.SectionInfo.Core; row["Type_VP"] = description.SectionInfo.Type; row["Section_VP"] = description.SectionInfo.Section; row["SectionHalf_VP"] = description.SectionInfo.Half; row["TopOffset_VP"] = description.StartOffset.ToString(); row["BottomOffset_VP"] = description.EndOffset.ToString(); row.EndEdit(); } catch (Exception) { Log.Warning(string.Format("Row {0}: Unable to populate data for description", rowNumber.ToString())); } rowNumber++; } }
private static (string Archive, string Working, string Parent) GetSectionTextIDsForDescription(DataTable allSections, LithologicDescription description, IntervalHierarchyNames columnNames) { var matchingTextID = allSections.AsEnumerable().Where(x => x.Field <string>(columnNames.Expedition) == description.SectionInfo.Expedition && x.Field <string>(columnNames.Site) == description.SectionInfo.Site && x.Field <string>(columnNames.Hole) == description.SectionInfo.Hole && x.Field <string>(columnNames.Core) == description.SectionInfo.Core && x.Field <string>(columnNames.Type) == description.SectionInfo.Type && x.Field <string>(columnNames.Section) == description.SectionInfo.Section).FirstOrDefault(); if (matchingTextID == null) { return(Archive : "-1", Working : "-1", Parent : "-1"); } (string Archive, string Working, string Parent)textids = (matchingTextID["Text ID of archive half"].ToString(), matchingTextID["Text ID of working half"].ToString(), matchingTextID["Text ID of section"].ToString()); return(textids); }
public static IntervalHierarchyValues GetHierarchyValuesFromDataRow(DataRow dataRow, IntervalHierarchyNames columnNames) { IntervalHierarchyValues result = new IntervalHierarchyValues(); try { result.SampleID = DataRowContainsColumn(columnNames.SampleID, dataRow) ? dataRow[columnNames.SampleID].ToString() : result.SampleID; result.Expedition = DataRowContainsColumn(columnNames.Expedition, dataRow) ? dataRow[columnNames.Expedition].ToString() : result.Expedition; result.Site = DataRowContainsColumn(columnNames.Site, dataRow) ? dataRow[columnNames.Site].ToString() : result.Site; result.Hole = DataRowContainsColumn(columnNames.Hole, dataRow) ? dataRow[columnNames.Hole].ToString() : result.Hole; result.Core = DataRowContainsColumn(columnNames.Core, dataRow) ? dataRow[columnNames.Core].ToString() : result.Core; result.Type = DataRowContainsColumn(columnNames.Type, dataRow) ? dataRow[columnNames.Type].ToString() : result.Type; result.Section = DataRowContainsColumn(columnNames.Section, dataRow) ? dataRow[columnNames.Section].ToString() : result.Section; result.Half = DataRowContainsColumn(columnNames.Half, dataRow) ? dataRow[columnNames.Half].ToString() : result.Half; result.Offset = DataRowContainsColumn(columnNames.Offset, dataRow) ? dataRow[columnNames.Offset].ToString() : result.Offset; result.TopOffset = DataRowContainsColumn(columnNames.TopOffset, dataRow) ? dataRow[columnNames.TopOffset].ToString() : result.TopOffset; result.BottomOffset = DataRowContainsColumn(columnNames.BottomOffset, dataRow) ? dataRow[columnNames.BottomOffset].ToString() : result.BottomOffset; result.ArchiveTextID = DataRowContainsColumn(columnNames.ArchiveTextID, dataRow) ? dataRow[columnNames.ArchiveTextID].ToString() : result.ArchiveTextID; result.WorkingTextID = DataRowContainsColumn(columnNames.WorkingTextID, dataRow) ? dataRow[columnNames.WorkingTextID].ToString() : result.WorkingTextID; result.ParentTextID = DataRowContainsColumn(columnNames.ParentTextID, dataRow) ? dataRow[columnNames.ParentTextID].ToString() : result.ParentTextID; result.TextID = DataRowContainsColumn(columnNames.TextID, dataRow) ? dataRow[columnNames.TextID].ToString() : result.TextID; result.TestNumber = DataRowContainsColumn(columnNames.TestNumber, dataRow) ? dataRow[columnNames.TestNumber].ToString() : result.TestNumber; return(result); } catch (Exception) { throw new Exception("Error trying to get hierarchy values from data row"); } }
/// <summary> /// Asynchronously gets a collection of IODP measurements from a .csv file. /// </summary> /// <param name="filename">The .csv file location</param> /// <param name="columnIdentifiers">Optional parameter which specifies the file's column names</param> /// <returns>A collection of measurements</returns> public static async Task <ICollection <Measurement> > GetMeasurementsFromFileAsync(string filename, [Optional] IntervalHierarchyNames columnIdentifiers) { columnIdentifiers = columnIdentifiers ?? new IntervalHierarchyNames() { Expedition = "Exp", Site = "Site", Hole = "Hole", Core = "Core", Type = "Type", Section = "Sect", Half = "A/W", TopOffset = "Offset (cm)", BottomOffset = "Offset (cm)", ArchiveTextID = "ArchiveSectionTextID_VP", WorkingTextID = "WorkingSectionTextID_VP", ParentTextID = "ParentSectionTextID_VP", SampleID = "Sample", TextID = "Text ID", TestNumber = "Test No.", }; //TODO: need to get this some other way string InstrumentSystem = Importer.GetFileNameWithoutExtension(filename); if (InstrumentSystem == "CARB" || InstrumentSystem == "ICP") { columnIdentifiers.TopOffset = "Top offset on section (cm)"; columnIdentifiers.BottomOffset = "Bot offset on section (cm)"; } IODPDataTable iODPDataTable = Importer.ImportDataTableFromFile(filename, columnIdentifiers); ICollection <Measurement> measurements = new HashSet <Measurement>(); try { foreach (DataRow row in iODPDataTable.DataTable.Rows) { IntervalHierarchyValues parsedValues = Importer.GetHierarchyValuesFromDataRow(row, columnIdentifiers); Measurement measurement = new Measurement(); measurement.SectionInfo = new SectionInfo(parsedValues); //Creating a SectionInfo here that will be used to find the one stored in the DB. measurement.DataRow = row; measurement.InstrumentReport = ""; measurement.InstrumentSystem = InstrumentSystem; measurement.TextID = parsedValues.TextID; measurement.TestNumber = parsedValues.TestNumber; measurement.StartOffset = double.TryParse(row[columnIdentifiers.TopOffset].ToString(), out double startOffset) ? startOffset : -1; measurement.EndOffset = double.TryParse(row[columnIdentifiers.BottomOffset].ToString(), out double endOffset) ? endOffset : -1; measurements.Add(measurement); } } catch (Exception) { throw new Exception("Error creating measurement from data row"); } using (DescDBContext dbContext = new DescDBContext()) { string[] expeditions = measurements.Select(x => x.SectionInfo.Expedition).Distinct().ToArray(); ICollection <SectionInfo> sections; try { sections = await DatabaseWorkflowHandler.GetAllSectionsFromDatabaseForExpeditionAsync(dbContext, expeditions).ConfigureAwait(false); } catch (Exception) { throw new Exception("Could not get sections from the database"); } foreach (var measurement in measurements) { measurement.SectionInfo = DatabaseWorkflowHandler.GetSectionInfoFromCollection(sections, measurement.SectionInfo); } return(measurements); } }
/// <summary> /// Parses a file into Measurements, checks the database for existence, if records are new uploads to database. /// </summary> /// <param name="filename"></param> /// <param name="columnIdentifiers"></param> /// <returns></returns> private static async Task <bool> UploadMeasurementsFromFileToDatabaseAsync(string filename, [Optional] IntervalHierarchyNames columnIdentifiers) { var measurements = await MeasurementHandler.GetMeasurementsFromFileAsync(filename, columnIdentifiers).ConfigureAwait(true); ICollection <Measurement> measurementsToRemove = new HashSet <Measurement>(); using (DescDBContext dBContext = new DescDBContext()) { foreach (var measurement in measurements) { var measurementExists = await DatabaseWorkflowHandler.FindMeasurementInDatabase(dBContext, measurement).ConfigureAwait(true); if (measurementExists) { measurementsToRemove.Add(measurement); } } } foreach (var measurement in measurementsToRemove) { measurements.Remove(measurement); } if (measurements.Count == 0) { return(false); } bool isDataUploaded; foreach (var measurement in measurements) { measurement.MeasurementData.Clear(); //Not uploading measurement report data at the moment } using (DescDBContext dbContext = new DescDBContext()) { isDataUploaded = await DatabaseWorkflowHandler.AddMeasurementsToDataBaseAsync(dbContext, measurements).ConfigureAwait(true); } return(isDataUploaded); }
private static async Task <bool> UploadDescriptionsFromFileToDatabaseAsync(string filename, [Optional] IntervalHierarchyNames columnIdentifiers) { var descriptions = await DescriptionHandler.GetDescriptionsFromFileAsync(filename, columnIdentifiers).ConfigureAwait(true); try { using (DescDBContext dBContext = new DescDBContext()) { foreach (var description in descriptions) { if (await DatabaseWorkflowHandler.CheckForDescriptionAsync(dBContext, description).ConfigureAwait(true)) { descriptions.Remove(description); } } } } catch (Exception ex) { throw ex; } if (descriptions.Count == 0) { return(false); } bool isDataUploaded; using (DescDBContext dbContext = new DescDBContext()) { isDataUploaded = await DatabaseWorkflowHandler.AddDescriptionsToDataBaseAsync(dbContext, descriptions).ConfigureAwait(true); } return(isDataUploaded); }
public static async Task <ICollection <SectionInfo> > GetSectionsFromFileAsync(string fileName, [Optional] IntervalHierarchyNames columnNames) { SectionInfoCollection sectionCollection = new SectionInfoCollection(); columnNames = columnNames ?? new IntervalHierarchyNames { Expedition = "Exp", Site = "Site", Hole = "Hole", Core = "Core", Type = "Type", Section = "Sect", ParentTextID = "Text ID of section", ArchiveTextID = "Text ID of archive half", WorkingTextID = "Text ID of working half" }; try { sectionCollection.ParseSectionInfoFromDataTable(SectionInfoCollection.ImportAllSections(fileName), columnNames); } catch (Exception ex) { throw new Exception($"{fileName}: Could not parse section info from datatable"); } return(sectionCollection.Sections); }