public static Dictionary <int, Measurement> Convert(IODPDataTable dataTable, SectionInfoCollection SectionCollection) { _ = SectionCollection ?? throw new ArgumentNullException(nameof(SectionCollection)); _ = dataTable ?? throw new ArgumentNullException(nameof(dataTable)); Dictionary <int, Measurement> _measurements = new Dictionary <int, Measurement>(); int measurementCount = _measurements.Count + 1; //TODO: Ignore record if error is thrown, ex: offsets with TCON foreach (DataRow record in dataTable.DataTable.Rows) { SectionInfo measurementSectionInfo = new SectionInfo(); try { measurementSectionInfo.Expedition = record[dataTable.ExpeditionColumn].ToString(); measurementSectionInfo.Site = record[dataTable.SiteColumn].ToString(); measurementSectionInfo.Hole = record[dataTable.HoleColumn].ToString(); measurementSectionInfo.Core = record[dataTable.CoreColumn].ToString(); measurementSectionInfo.Type = record[dataTable.TypeColumn].ToString(); measurementSectionInfo.Section = record[dataTable.SectionColumn].ToString(); } catch (Exception) { throw new IndexOutOfRangeException(nameof(record)); } Measurement measurement = new Measurement(measurementSectionInfo); measurement.SectionInfo = SectionCollection.GetExistingElseAddAndGetCurrentSection(measurement.SectionInfo); //CARB files throw error here because there isn't an offset field within the file. Ensure there is. try { if (!string.IsNullOrEmpty(dataTable.OffsetColumn)) { measurement.StartOffset = double.Parse(record[dataTable.OffsetColumn].ToString(), CultureInfo.CurrentCulture); measurement.EndOffset = double.Parse(record[dataTable.OffsetColumn].ToString(), CultureInfo.CurrentCulture); } if (!string.IsNullOrEmpty(dataTable.TopOffsetColumn)) { measurement.StartOffset = double.Parse(record[dataTable.TopOffsetColumn].ToString(), CultureInfo.CurrentCulture); } if (!string.IsNullOrEmpty(dataTable.BottomOffsetColumn)) { measurement.EndOffset = double.Parse(record[dataTable.BottomOffsetColumn].ToString(), CultureInfo.CurrentCulture); } measurement.DataRow = record; _measurements.Add(measurementCount, measurement); measurementCount++; } catch (Exception) { //throw; } } return(_measurements); }
public static async Task <ICollection <SectionInfo> > GetSectionsFromFileAsync(string fileName, [Optional] IntervalHierarchyNames columnNames) { SectionInfoCollection sectionCollection = new SectionInfoCollection(); columnNames = columnNames ?? new IntervalHierarchyNames { Expedition = "Exp", Site = "Site", Hole = "Hole", Core = "Core", Type = "Type", Section = "Sect", ParentTextID = "Text ID of section", ArchiveTextID = "Text ID of archive half", WorkingTextID = "Text ID of working half" }; try { sectionCollection.ParseSectionInfoFromDataTable(SectionInfoCollection.ImportAllSections(fileName), columnNames); } catch (Exception ex) { throw new Exception($"{fileName}: Could not parse section info from datatable"); } return(sectionCollection.Sections); }
private static void CleanupDescriptionFile(string filePath, string exportFilePath, string errorExportFilePath) { var iodpDataTable = Importer.ImportDataTableFromFile(filePath); string currentFileName = Importer.GetFileName(filePath); try { Importer.CheckFile(iodpDataTable); AddMissingColumnsToDescriptionTable(iodpDataTable.DataTable); AddDataToHierarchyColumns(iodpDataTable, Importer.GetFileName(filePath), SectionInfoCollection.ImportAllSections(ConfigurationManager.AppSettings["AllSectionsFile"])); Log.Information($"{currentFileName}: Processed successfully"); } catch (Exception ex) { Log.Warning($"{currentFileName}: {ex.Message}"); exportFilePath = errorExportFilePath; } try { Importer.ExportDataTableAsNewFile(exportFilePath, iodpDataTable.DataTable); } catch (Exception) { throw new Exception($"{currentFileName}: Error exporting to file"); } }
private static void ProcessData(FileCollection DescriptionFileCollection, FileCollection MeasurementFileCollection) { #region ImportDescriptionData DescriptionFileCollection.Filenames.ForEach(fileName => Console.WriteLine(fileName.ToString())); var lithologyWorkflowHandler = new CSVLithologyWorkflowHandler(); lithologyWorkflowHandler.FileCollection = DescriptionFileCollection; lithologyWorkflowHandler.ExportDirectory = DescriptionFileCollection.ExportDirectory; var SectionCollection = new SectionInfoCollection(); var lithologyCache = lithologyWorkflowHandler.ImportCache(SectionCollection); if (ProgramSettings.SendDataToDESCDataBase) { DatabaseWorkflowHandler.SendLithologiesToDatabase(lithologyCache); } if (ProgramSettings.SendDataToLithologyDataBase) { //Need to specify which columns to keep, ex is for x375 List <string> acceptableColumns = new List <string>() { "LITHOLOGY PREFIX", "Lithology principal name", "Lithology SUFFIX" }; LithologyDatabaseWorkflowHandler.SendLithologiesToDataBase(lithologyCache, acceptableColumns); } var LithCache = CacheReconfigurer.CreateDescriptionSearchHierarchy(lithologyCache); #endregion #region ImportMeasurementData if (ProgramSettings.ProcessMeaurements == false) { Console.WriteLine("Finished processing files at: " + DateTime.Now.ToString()); return; } var measurementWorkFlowHandler = new CSVMeasurementWorkFlowHandler(); measurementWorkFlowHandler.FileCollection = new FileCollection(); foreach (string path in MeasurementFileCollection.Filenames) { Console.WriteLine("Processing measurement file: " + path); measurementWorkFlowHandler.FileCollection.RemoveFiles(); measurementWorkFlowHandler.FileCollection.Filenames.Add(path); var measurementCache = measurementWorkFlowHandler.ImportCache(SectionCollection); Console.WriteLine(string.Format(CultureInfo.CurrentCulture, "Processing {0} measurements", measurementCache.Count.ToString(CultureInfo.CurrentCulture))); measurementWorkFlowHandler.UpdateMeasurementCacheWithLithologicDescriptions(measurementCache, LithCache); if (ProgramSettings.SendDataToDESCDataBase) { DatabaseWorkflowHandler.SendMeasurementsToDatabase(measurementCache); } if (ProgramSettings.ExportCachesToFiles) { measurementWorkFlowHandler.ExportDirectory = MeasurementFileCollection.ExportDirectory; measurementWorkFlowHandler.ExportToFile(measurementCache); } measurementCache = null; GC.Collect(); Console.WriteLine("The total section count is: " + SectionCollection.Sections.Count); } #endregion Console.WriteLine("Finished processing measurement files at: " + DateTime.Now.ToString()); }
public static List <DrillingDisturbanceRecord> FormatDrillingDisturbanceFile(string filename, string exportFilename) { Log.Information("--------Parsing a new file--------"); DataTable sections = SectionInfoCollection.ImportAllSections(ConfigurationManager.AppSettings["AllSectionsFile"]); ICollection <DrillingDisturbanceRecord> descriptions = new HashSet <DrillingDisturbanceRecord>(); List <DrillingDisturbanceRecord> FinalDescriptionsToAdd = new List <DrillingDisturbanceRecord>(); #region ImportDrillingDisturbances var dataTableReader = new CSVReader(); dataTableReader.ReadPath = filename; DataTable drillingDisturbances = dataTableReader.Read(); //Correct Column Names: ChangeColumn(drillingDisturbances, "Drilling disturbance intensity [rank]", "Drilling disturbance intensity rank"); ChangeColumn(drillingDisturbances, "Drilling disturbance intensity rank(read only)", "Drilling disturbance intensity rank"); ChangeColumn(drillingDisturbances, "Drilling disturbance intensity rank (read only)", "Drilling disturbance intensity rank"); ChangeColumn(drillingDisturbances, "Label ID", "Sample"); ChangeColumn(drillingDisturbances, "Top depth [m]", "Top Depth [m]"); ChangeColumn(drillingDisturbances, "Bottom depth [m]", "Bottom Depth [m]"); ChangeColumn(drillingDisturbances, "Disturbance [name]", "Disturbance"); ChangeColumn(drillingDisturbances, "File data", "File Data"); //Add additional columns if (!drillingDisturbances.Columns.Contains("Drilling disturbance comment")) { drillingDisturbances.Columns.Add("Drilling disturbance comment"); } if (!drillingDisturbances.Columns.Contains("Drilling disturbance type")) { drillingDisturbances.Columns.Add("Drilling disturbance type"); } if (!drillingDisturbances.Columns.Contains("Drilling disturbance intensity")) { drillingDisturbances.Columns.Add("Drilling disturbance intensity"); } if (!drillingDisturbances.Columns.Contains("Drilling disturbance intensity rank")) { drillingDisturbances.Columns.Add("Drilling disturbance intensity rank"); } try { //Collection of all drilling disturbances foreach (DataRow row in drillingDisturbances.Rows) { DrillingDisturbanceRecord record = new DrillingDisturbanceRecord() { Column1 = row["Column1"].ToString(), SampleID = row["Sample"].ToString(), Top_cm = row["Top [cm]"].ToString(), Bottom_cm = row["Bottom [cm]"].ToString(), TopDepth_m = row["Top Depth [m]"].ToString(), BottomDepth_m = row["Bottom Depth [m]"].ToString(), DrillingDisturbanceType = row["Drilling disturbance type"].ToString(), DrillingDisturbanceIntensity = row["Drilling disturbance intensity"].ToString(), DrillingDisturbanceIntensityRank = row["Drilling disturbance intensity rank"].ToString(), DrillingDisturbanceComment = row["Drilling disturbance comment"].ToString(), // ShipFileLinks = row["Ship File Links"].ToString(), // ShoreFileLinks = row["Shore File Links"].ToString(), FileData = row["File Data"].ToString() }; descriptions.Add(record); } } catch (Exception ex) { Log.Warning(ex.Message); Log.Warning($"Could not created disturbance records from {filename}"); return(FinalDescriptionsToAdd); } #endregion #region GetTheSectionsInCoreDescription foreach (var description in descriptions) { //Find rows where sample Id doesn't end in A OR W if (description.SampleID.EndsWith("A") || description.SampleID.EndsWith("W")) { FinalDescriptionsToAdd.Add(description); continue; } Log.Information($"{description.SampleID} is a description on the Core"); Log.Information($"{description.SampleID} TOP OFFSET: {description.Top_cm} BOTTOM OFFSET: {description.Bottom_cm} TOPDEPTH: {description.TopDepth_m} BOTTOM DEPTH: {description.BottomDepth_m}"); //At this point the description should be of the entire Core; //Parse Core information from the Sample: SectionInfo coreInfo = new SectionInfo(description.SampleID); //Find all the sections within the AllSectionsTable which overlap with the Top/Bottom offsets of the Core description var constituentSections = sections.AsEnumerable() .Where(x => x.Field <string>("Exp") == coreInfo.Expedition) .Where(x => x.Field <string>("Site") == coreInfo.Site) .Where(x => x.Field <string>("Hole") == coreInfo.Hole) .Where(x => x.Field <string>("Core") == coreInfo.Core) .Where(x => x.Field <string>("Type") == coreInfo.Type) .Where(x => (x.Field <string>("Top depth CSF-A (m)").ToDouble() >= description.TopDepth_m.ToDouble() && x.Field <string>("Top depth CSF-A (m)").ToDouble() < description.BottomDepth_m.ToDouble()) || (x.Field <string>("Bottom depth CSF-A (m)").ToDouble() > description.TopDepth_m.ToDouble() && x.Field <string>("Bottom depth CSF-A (m)").ToDouble() <= description.BottomDepth_m.ToDouble()) ) .Select(x => x) .ToHashSet(); //collection of datarows... //Create new drilling disturbance records by mashing up data between original Disturbances and sections: //Need to relook at this: Use only the top and bottom offsets from the Sample //Find the sections from the allsections table which overlap those intervals //Create new drilling disturbances with section information HashSet <DrillingDisturbanceRecord> newDrillingRecords = new HashSet <DrillingDisturbanceRecord>(); foreach (var section in constituentSections) { //Create new sampleID, All of them will be on the Archive half string newSampleID = string.Format("{0}-{1}{2}-{3}{4}-{5}-A", section["Exp"], section["Site"], section["Hole"], section["Core"], section["Type"], section["Sect"]); //Create a new drilling disturbance record var record = new DrillingDisturbanceRecord { Column1 = description.Column1, SampleID = newSampleID, Top_cm = "0", //section["Top Offset (cm)"].ToString(), Bottom_cm = Math.Round((section["Curated length (m)"].ToString().ToDouble() * 100), 2).ToString(), TopDepth_m = section["Top depth CSF-A (m)"].ToString(), BottomDepth_m = section["Bottom depth CSF-A (m)"].ToString(), DrillingDisturbanceType = description.DrillingDisturbanceType, DrillingDisturbanceIntensity = description.DrillingDisturbanceIntensityRank, DrillingDisturbanceComment = description.DrillingDisturbanceComment, DrillingDisturbanceIntensityRank = description.DrillingDisturbanceIntensityRank, ShipFileLinks = description.ShipFileLinks, ShoreFileLinks = description.ShoreFileLinks, FileData = description.FileData }; newDrillingRecords.Add(record); Log.Information($"{newSampleID}: Section Added: TOP OFFSET: {record.Top_cm} BOTTOM OFFSET: {record.Bottom_cm} TOP DEPTH: {record.TopDepth_m} BOTTOM DEPTH: {record.BottomDepth_m}"); } #endregion region #region SetTheOffsetsForBorderingSections //Set the TOP/BOTTOM offsets for core description's first and last sections to be equal to the core descriptions TOP/BOTTOM Offsets var topSection = newDrillingRecords.OrderBy(x => x.TopDepth_m).First(); var topCorrection = topSection.Top_cm.ToDouble() + (description.TopDepth_m.ToDouble() - topSection.TopDepth_m.ToDouble()) * 100; topCorrection = Math.Round(topCorrection, 2); topSection.Top_cm = topCorrection.ToString(); //Need to calculate offsets based on depths topSection.TopDepth_m = description.TopDepth_m; Log.Information($"{topSection.SampleID}: Changed TopDepth to {topSection.TopDepth_m} and TopOffset to {topSection.Top_cm}"); var bottomSection = newDrillingRecords.OrderBy(x => x.BottomDepth_m).Last(); var bottomCorrection = bottomSection.Bottom_cm.ToDouble() - (bottomSection.BottomDepth_m.ToDouble() - description.BottomDepth_m.ToDouble()) * 100; bottomCorrection = Math.Round(bottomCorrection, 2); bottomSection.Bottom_cm = bottomCorrection.ToString(); bottomSection.BottomDepth_m = description.BottomDepth_m; #endregion Log.Information($"{bottomSection.SampleID}: Changed BottomDepth to {bottomSection.BottomDepth_m} and BottomOffset to {bottomSection.Bottom_cm}"); foreach (var newSectionDescription in newDrillingRecords) { //Find all descriptions the core describer's made for this SECTION. The sample ID's should be equal var describedIntervalsOnSection = descriptions.Where(x => x.SampleID == newSectionDescription.SampleID).ToHashSet(); //If they described any intervals, pass those intervals into the algo to correctly process gap intervals if (describedIntervalsOnSection.Any()) { ICollection <DrillingDisturbanceRecord> finalRecords = CoreToSectionAlgo(newSectionDescription, describedIntervalsOnSection); FinalDescriptionsToAdd.AddRange(finalRecords); foreach (var record in finalRecords) { Log.Information($"{newSectionDescription.SampleID}: Adding to final descriptions"); } } else if (!describedIntervalsOnSection.Any()) { FinalDescriptionsToAdd.Add(newSectionDescription); Log.Information($"{newSectionDescription.SampleID}: Adding to final descriptions"); } } } //These are new descriptions to add to the file foreach (var record in FinalDescriptionsToAdd) { var offsetDifference = Math.Round(record.Bottom_cm.ToDouble() - record.Top_cm.ToDouble(), 2); var depthDifference = Math.Round((record.BottomDepth_m.ToDouble() - record.TopDepth_m.ToDouble()) * 100, 2); if (offsetDifference != depthDifference) { Log.Warning($"Error in Offsets: {record.SampleID}: TOP OFFSET: {record.Top_cm} BOTTOMOFFSET: {record.Bottom_cm} TOPDEPTH: {record.TopDepth_m} BOTTOMDEPTH: {record.BottomDepth_m}"); } else { Log.Information($"{record.SampleID}: TOP OFFSET: {record.Top_cm} BOTTOMOFFSET: {record.Bottom_cm} TOPDEPTH: {record.TopDepth_m} BOTTOMDEPTH: {record.BottomDepth_m}"); } } DataTable dt = new DataTable(); // DataColumn[] columns = new DataColumn[drillingDisturbances.Columns.Count]; // drillingDisturbances.Columns.CopyTo(columns, 0); // dt.Columns.AddRange(columns); for (int i = 0; i < drillingDisturbances.Columns.Count; i++) { dt.Columns.Add(drillingDisturbances.Columns[i].ColumnName, drillingDisturbances.Columns[i].DataType); } //Add in new corrected drilling disturbances foreach (var item in FinalDescriptionsToAdd) { DataRow row = dt.NewRow(); //row.BeginEdit(); row["Column1"] = item.Column1; row["Sample"] = item.SampleID; row["Top [cm]"] = item.Top_cm; row["Bottom [cm]"] = item.Bottom_cm; row["Top Depth [m]"] = item.TopDepth_m; row["Bottom Depth [m]"] = item.BottomDepth_m; row["Drilling disturbance type"] = item.DrillingDisturbanceType; row["Drilling disturbance intensity"] = item.DrillingDisturbanceIntensity; row["Drilling disturbance intensity rank"] = item.DrillingDisturbanceIntensityRank; row["Drilling disturbance comment"] = item.DrillingDisturbanceComment; //row["Ship File Links"] = item.ShipFileLinks; //row["Shore File Links"] = item.ShoreFileLinks; row["File Data"] = item.FileData; dt.Rows.Add(row); //row.EndEdit(); } Importer.ExportDataTableAsNewFile(exportFilename, dt); return(FinalDescriptionsToAdd); }
/// <summary> /// Converts an IODPDataTable object into a collection of Lithologic Descriptions /// </summary> /// <param name="dataTable">The datatable to convert</param> /// <returns></returns> public static Dictionary <string, LithologicDescription> ConvertDatatableToDictionary(IODPDataTable dataTable, SectionInfoCollection SectionCollection) { _ = SectionCollection ?? throw new ArgumentNullException(nameof(SectionCollection)); var LithologyCache = new Dictionary <string, LithologicDescription>(); if (dataTable == null) { return(LithologyCache); } //Add a column in the datatable to ensure consistency between files with and without descriptions: dataTable.DataTable.Columns.Add("LithologicID_VP", typeof(string)).SetOrdinal(0); foreach (DataRow dataTableRow in dataTable.DataTable.Rows) { dataTableRow["LithologicID_VP"] = "-1"; if (!Importer.DataRowContainsDescription(dataTableRow, dataTable)) { return(LithologyCache); } if (!Importer.DataRowContainsSampleIDColumn(dataTableRow, dataTable)) { return(LithologyCache); } LithologicDescription description = new LithologicDescription(dataTableRow[dataTable.SampleIDColumn].ToString()); description.SectionInfo = SectionCollection.GetExistingElseAddAndGetCurrentSection(description.SectionInfo); if (!Importer.DescriptionContainsSectionInfo(description)) { return(LithologyCache); } description.DataRow = dataTableRow; double parsedOffset = 0; if (!Importer.DataRowContainsOffsetColumns(dataTableRow, dataTable)) { return(LithologyCache); } if (!Importer.StartOffsetValuesAreValid(dataTableRow, dataTable, ref parsedOffset)) { return(LithologyCache); } description.StartOffset = parsedOffset; if (!Importer.EndOffsetValuesAreValid(dataTableRow, dataTable, ref parsedOffset)) { return(LithologyCache); } description.EndOffset = parsedOffset; LithologicIDGenerator IDGenerator = new LithologicIDGenerator(); IDGenerator.GenerateID(description); if (description.OffsetsSet()) { description.GenerateSubintervals(); } description.DataRow["LithologicID_VP"] = description.LithologicID; //Some descriptions are split in two rows. It's very uncommon, but throws an error //Only selecting the first row, despite the loss of data if (!LithologyCache.ContainsKey(description.LithologicID)) { LithologyCache.Add(description.LithologicID, description); } } return(LithologyCache); }