/// <summary> /// Gets a collection of LithologicDescriptions from a corrected file. /// </summary> /// <param name="filename"></param> /// <param name="columnIdentifiers"></param> /// <returns></returns> public static async Task <ICollection <LithologicDescription> > GetDescriptionsFromFileAsync(string filename, [Optional] IntervalHierarchyNames columnIdentifiers) { columnIdentifiers = columnIdentifiers ?? new IntervalHierarchyNames() { Expedition = "Expedition_VP", Site = "Site_VP", Hole = "Hole_VP", Core = "Core_VP", Type = "Type_VP", Section = "Section_VP", Half = "SectionHalf_VP", TopOffset = "TopOffset_VP", BottomOffset = "BottomOffset_VP", ArchiveTextID = "ArchiveSectionTextID_VP", WorkingTextID = "WorkingSectionTextID_VP", ParentTextID = "ParentSectionTextID_VP" }; IODPDataTable iODPDataTable = Importer.ImportDataTableFromFile(filename, columnIdentifiers); ICollection <LithologicDescription> descriptions = new HashSet <LithologicDescription>(); using (DescDBContext dbContext = new DescDBContext()) { try { foreach (DataRow row in iODPDataTable.DataTable.Rows) { SectionInfo section = new SectionInfo(Importer.GetHierarchyValuesFromDataRow(row, columnIdentifiers)); LithologicDescription description = new LithologicDescription(); // description.SectionInfo = section; description.SectionInfo = await DatabaseWorkflowHandler.GetSectionInfoFromDatabaseForIntervalAsync(dbContext, section).ConfigureAwait(true); description.LithologicID = row["LithologicID_VP"].ToString(); description.DataRow = row; description.DescriptionReport = row["Filename_VP"].ToString();; description.StartOffset = double.TryParse(row[columnIdentifiers.TopOffset].ToString(), out double startOffset) ? startOffset : -1; description.EndOffset = double.TryParse(row[columnIdentifiers.BottomOffset].ToString(), out double endOffset) ? endOffset : -1; descriptions.Add(description); } } catch (Exception) { throw new Exception("Error creating lithologic description from data row"); } } return(descriptions); }
/// <summary> /// Parses a file into Measurements, checks the database for existence, if records are new uploads to database. /// </summary> /// <param name="filename"></param> /// <param name="columnIdentifiers"></param> /// <returns></returns> private static async Task <bool> UploadMeasurementsFromFileToDatabaseAsync(string filename, [Optional] IntervalHierarchyNames columnIdentifiers) { var measurements = await MeasurementHandler.GetMeasurementsFromFileAsync(filename, columnIdentifiers).ConfigureAwait(true); ICollection <Measurement> measurementsToRemove = new HashSet <Measurement>(); using (DescDBContext dBContext = new DescDBContext()) { foreach (var measurement in measurements) { var measurementExists = await DatabaseWorkflowHandler.FindMeasurementInDatabase(dBContext, measurement).ConfigureAwait(true); if (measurementExists) { measurementsToRemove.Add(measurement); } } } foreach (var measurement in measurementsToRemove) { measurements.Remove(measurement); } if (measurements.Count == 0) { return(false); } bool isDataUploaded; foreach (var measurement in measurements) { measurement.MeasurementData.Clear(); //Not uploading measurement report data at the moment } using (DescDBContext dbContext = new DescDBContext()) { isDataUploaded = await DatabaseWorkflowHandler.AddMeasurementsToDataBaseAsync(dbContext, measurements).ConfigureAwait(true); } return(isDataUploaded); }
private static async Task <bool> UploadDescriptionsFromFileToDatabaseAsync(string filename, [Optional] IntervalHierarchyNames columnIdentifiers) { var descriptions = await DescriptionHandler.GetDescriptionsFromFileAsync(filename, columnIdentifiers).ConfigureAwait(true); try { using (DescDBContext dBContext = new DescDBContext()) { foreach (var description in descriptions) { if (await DatabaseWorkflowHandler.CheckForDescriptionAsync(dBContext, description).ConfigureAwait(true)) { descriptions.Remove(description); } } } } catch (Exception ex) { throw ex; } if (descriptions.Count == 0) { return(false); } bool isDataUploaded; using (DescDBContext dbContext = new DescDBContext()) { isDataUploaded = await DatabaseWorkflowHandler.AddDescriptionsToDataBaseAsync(dbContext, descriptions).ConfigureAwait(true); } return(isDataUploaded); }
/// <summary> /// Asynchronously gets a collection of IODP measurements from a .csv file. /// </summary> /// <param name="filename">The .csv file location</param> /// <param name="columnIdentifiers">Optional parameter which specifies the file's column names</param> /// <returns>A collection of measurements</returns> public static async Task <ICollection <Measurement> > GetMeasurementsFromFileAsync(string filename, [Optional] IntervalHierarchyNames columnIdentifiers) { columnIdentifiers = columnIdentifiers ?? new IntervalHierarchyNames() { Expedition = "Exp", Site = "Site", Hole = "Hole", Core = "Core", Type = "Type", Section = "Sect", Half = "A/W", TopOffset = "Offset (cm)", BottomOffset = "Offset (cm)", ArchiveTextID = "ArchiveSectionTextID_VP", WorkingTextID = "WorkingSectionTextID_VP", ParentTextID = "ParentSectionTextID_VP", SampleID = "Sample", TextID = "Text ID", TestNumber = "Test No.", }; //TODO: need to get this some other way string InstrumentSystem = Importer.GetFileNameWithoutExtension(filename); if (InstrumentSystem == "CARB" || InstrumentSystem == "ICP") { columnIdentifiers.TopOffset = "Top offset on section (cm)"; columnIdentifiers.BottomOffset = "Bot offset on section (cm)"; } IODPDataTable iODPDataTable = Importer.ImportDataTableFromFile(filename, columnIdentifiers); ICollection <Measurement> measurements = new HashSet <Measurement>(); try { foreach (DataRow row in iODPDataTable.DataTable.Rows) { IntervalHierarchyValues parsedValues = Importer.GetHierarchyValuesFromDataRow(row, columnIdentifiers); Measurement measurement = new Measurement(); measurement.SectionInfo = new SectionInfo(parsedValues); //Creating a SectionInfo here that will be used to find the one stored in the DB. measurement.DataRow = row; measurement.InstrumentReport = ""; measurement.InstrumentSystem = InstrumentSystem; measurement.TextID = parsedValues.TextID; measurement.TestNumber = parsedValues.TestNumber; measurement.StartOffset = double.TryParse(row[columnIdentifiers.TopOffset].ToString(), out double startOffset) ? startOffset : -1; measurement.EndOffset = double.TryParse(row[columnIdentifiers.BottomOffset].ToString(), out double endOffset) ? endOffset : -1; measurements.Add(measurement); } } catch (Exception) { throw new Exception("Error creating measurement from data row"); } using (DescDBContext dbContext = new DescDBContext()) { string[] expeditions = measurements.Select(x => x.SectionInfo.Expedition).Distinct().ToArray(); ICollection <SectionInfo> sections; try { sections = await DatabaseWorkflowHandler.GetAllSectionsFromDatabaseForExpeditionAsync(dbContext, expeditions).ConfigureAwait(false); } catch (Exception) { throw new Exception("Could not get sections from the database"); } foreach (var measurement in measurements) { measurement.SectionInfo = DatabaseWorkflowHandler.GetSectionInfoFromCollection(sections, measurement.SectionInfo); } return(measurements); } }
public static async Task <bool> GetMeasurementIDForMeasurementFile(string file, string exportFilePath) { ICollection <Measurement> measurements = await MeasurementHandler.GetMeasurementsFromFileAsync(file).ConfigureAwait(true); using (DescDBContext dBContext = new DescDBContext()) { foreach (var measurement in measurements) { measurement.ID = await DatabaseWorkflowHandler.GetMeasurementIDAsync(dBContext, measurement).ConfigureAwait(true); } } //Column Names HashSet <string> columns = measurements.First().MeasurementData.Select(x => x.ColumnName).ToHashSet(); //Check if all measurements have the same Columns foreach (var measurement in measurements) { var compareColumns = measurement.MeasurementData.Select(x => x.ColumnName).ToHashSet(); if (!columns.SetEquals(compareColumns)) { return(false); } } //TODO: Construct new Datatable using (DataTable dataTable = new DataTable()) { foreach (var column in columns) { dataTable.Columns.Add(column); } dataTable.Columns.Add("MeasurementID").SetOrdinal(0); int currentRow = 0; foreach (var measurement in measurements) { dataTable.ImportRow(measurement.DataRow); var row = dataTable.Rows[currentRow]; row.BeginEdit(); row["MeasurementID"] = measurement.ID; row.EndEdit(); currentRow++; } //AddID column //Export measurements to File Importer.ExportDataTableAsNewFile(exportFilePath, dataTable); } return(true); }
private static void ProcessData(FileCollection DescriptionFileCollection, FileCollection MeasurementFileCollection) { #region ImportDescriptionData DescriptionFileCollection.Filenames.ForEach(fileName => Console.WriteLine(fileName.ToString())); var lithologyWorkflowHandler = new CSVLithologyWorkflowHandler(); lithologyWorkflowHandler.FileCollection = DescriptionFileCollection; lithologyWorkflowHandler.ExportDirectory = DescriptionFileCollection.ExportDirectory; var SectionCollection = new SectionInfoCollection(); var lithologyCache = lithologyWorkflowHandler.ImportCache(SectionCollection); if (ProgramSettings.SendDataToDESCDataBase) { DatabaseWorkflowHandler.SendLithologiesToDatabase(lithologyCache); } if (ProgramSettings.SendDataToLithologyDataBase) { //Need to specify which columns to keep, ex is for x375 List <string> acceptableColumns = new List <string>() { "LITHOLOGY PREFIX", "Lithology principal name", "Lithology SUFFIX" }; LithologyDatabaseWorkflowHandler.SendLithologiesToDataBase(lithologyCache, acceptableColumns); } var LithCache = CacheReconfigurer.CreateDescriptionSearchHierarchy(lithologyCache); #endregion #region ImportMeasurementData if (ProgramSettings.ProcessMeaurements == false) { Console.WriteLine("Finished processing files at: " + DateTime.Now.ToString()); return; } var measurementWorkFlowHandler = new CSVMeasurementWorkFlowHandler(); measurementWorkFlowHandler.FileCollection = new FileCollection(); foreach (string path in MeasurementFileCollection.Filenames) { Console.WriteLine("Processing measurement file: " + path); measurementWorkFlowHandler.FileCollection.RemoveFiles(); measurementWorkFlowHandler.FileCollection.Filenames.Add(path); var measurementCache = measurementWorkFlowHandler.ImportCache(SectionCollection); Console.WriteLine(string.Format(CultureInfo.CurrentCulture, "Processing {0} measurements", measurementCache.Count.ToString(CultureInfo.CurrentCulture))); measurementWorkFlowHandler.UpdateMeasurementCacheWithLithologicDescriptions(measurementCache, LithCache); if (ProgramSettings.SendDataToDESCDataBase) { DatabaseWorkflowHandler.SendMeasurementsToDatabase(measurementCache); } if (ProgramSettings.ExportCachesToFiles) { measurementWorkFlowHandler.ExportDirectory = MeasurementFileCollection.ExportDirectory; measurementWorkFlowHandler.ExportToFile(measurementCache); } measurementCache = null; GC.Collect(); Console.WriteLine("The total section count is: " + SectionCollection.Sections.Count); } #endregion Console.WriteLine("Finished processing measurement files at: " + DateTime.Now.ToString()); }