/// <summary> /// Carry out the block model import /// </summary> /// <param name="SelectedBMFile"></param> /// <param name="SelectedFormatBMFile"></param> /// <param name="importMap"></param> /// <param name="rawFileReader"></param> /// <returns></returns> internal bool DoBMImport(string SelectedBMFile, string SelectedFormatBMFile, ImportDataMap importMap, RawFileReader rawFileReader, string NKDProjectID, string modelAlias) { BaseImportTools bit = new BaseImportTools(); int cxColumnID = importMap.GetColumnIDMappedTo("CentroidX"); int cyColumnID = importMap.GetColumnIDMappedTo("CentroidY"); int czColumnID = importMap.GetColumnIDMappedTo("CentroidZ"); ColumnStats xOrigin = rawFileReader.GetDimensions(cxColumnID); ColumnStats yOrigin = rawFileReader.GetDimensions(cyColumnID); ColumnStats zOrigin = rawFileReader.GetDimensions(czColumnID); int approxNumLines = xOrigin.count; Stream bmFileStream = new FileStream(SelectedBMFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); // Stream bmFileStream = new FileStream(SelectedBMFile, FileMode.Open); ModelImportStatus mos = new ModelImportStatus(); Guid newModelGuid = Guid.NewGuid(); Guid authorGuid = new Guid(); List<string> status = bit.PerformBMImport(mos, newModelGuid, bmFileStream, null, importMap, xOrigin.min, yOrigin.min, zOrigin.min, backgroundWorker, approxNumLines, NKDProjectID, modelAlias, authorGuid, ConnectionString); return true; }
internal ModelImportStatus DoCollarImport(string SelectedFile, string SelectedFormatBMFile, ImportDataMap importMap, RawFileReader rawFileReader, Guid NKDProjectID, bool overwrite) { BaseImportTools bit = new BaseImportTools(); // get the current collar names in this project List<CollarInfo> existingHoles = this.GetHolesForProject(NKDProjectID); List<string> existingHoleNames = new List<string>(); foreach (CollarInfo ci in existingHoles) { existingHoleNames.Add(ci.Name); } ModelImportStatus mos = new ModelImportStatus(); Stream fileStream = new FileStream(SelectedFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); //Stream fileStream = new FileStream(SelectedFile, FileMode.Open); bit.PerformCollarImport(mos, fileStream, null, importMap, this.backgroundWorker, NKDProjectID, ConnectionString, existingHoleNames, overwrite); return mos; }
private void bw_DoCoalQualityImportWork(object sender, DoWorkEventArgs e) { ImportDataMap importMap = (ImportDataMap)e.Argument; commandDirector.SetCurrentWorkerThread(workerCoalQualityDataImport); var rawFileReader = new RawFileReader((SelectedFile.ToLower().IndexOf(".csv") > -1) ? ',' : '\t'); ModelImportStatus status = commandDirector.DoCoalQualityImport(SelectedFile, SelectedFormatFile, importMap, rawFileReader, NKDProjectID, doDuplicateCheck, doImportOverwrite); latestImportUpdateStatus = status; }
internal ModelImportStatus DoLithoImport(string SelectedFile, string SelectedFormatFile, ImportDataMap importMap, RawFileReader rawFileReader, Guid NKDProjectID, bool doOverwrite, bool checkForDuplicates) { BaseImportTools bit = new BaseImportTools(); ModelImportStatus mos = new ModelImportStatus(); GeneralFileInfo gfi = new GeneralFileInfo(); gfi.GeneralFileStats(SelectedFile); int numLines = gfi.numLines; //Stream fileStream = new FileStream(SelectedFile, FileMode.Open); Stream fileStream = new FileStream(SelectedFile, FileMode.Open, FileAccess.Read , FileShare.ReadWrite); bit.PerformLithoImport(mos, fileStream, null, importMap, this.backgroundWorker, NKDProjectID, ConnectionString, numLines, doOverwrite, checkForDuplicates); return mos; }
private void CollarImportExecuted(object sender, ExecutedRoutedEventArgs e) { if (ComboBoxProjectList.SelectedValue == null) { ComboBoxProjectList.BorderBrush = Brushes.Red; MessageBox.Show("You must select a project before importing"); return; } Guid NKDProjectID = (Guid)ComboBoxProjectList.SelectedValue; bool overwrite = (bool)checkBoxOverwrite.IsChecked; ImportDataMap importMap = MapConfigTable.GetImportDataMap(SelectedFile, MapConfigTable.collarPrimaryTableName, (SelectedFile.ToLower().IndexOf(".csv") > -1) ? ',' : '\t', ImportDataPreview.MaxColumns); // add into map details of which columns are foreign keys if (collarDBFields != null) { importMap.UpdateWithFKInof(collarDBFields); } // get the selected project ID var rawFileReader = new RawFileReader((SelectedFile.ToLower().IndexOf(".csv") > -1) ? ',' : '\t'); ModelImportStatus status = commandDirector.DoCollarImport(SelectedFile, SelectedFormatFile, importMap, rawFileReader, NKDProjectID, overwrite); latestImportUpdateStatus = status; //if(status.finalErrorCode != ModelImportStatus.OK){ // string ss = status.GenerateStringMessage(true); // lastestImportUpdateStatus.SaveReportData(); // if (status.finalErrorCode == ModelImportStatus.DATA_CONSISTENCY_ERROR) // { // string headline = "Import complete. " + status.linesReadFromSource + " data lines read, " + status.recordsAdded + " new records added \n" + status.recordsUpdated + " existing records updated."; // MessageBox.Show("Warnings issued during import.\n\n" + headline + ".\n\n" + ss); // } // else // { // MessageBox.Show("Import failed. " + ss); // } //}else{ // MessageBox.Show("Import complete. "+status.linesReadFromSource+" data lines read, "+status.recordsAdded+" new records added \n"+status.recordsUpdated+" existing records updated."); //} if (latestImportUpdateStatus != null) { ImportStatusWindow ii = new ImportStatusWindow(); ii.SetData(latestImportUpdateStatus); ii.ShowDialog(); } //workerBMDataImport = new BackgroundWorker(); //workerBMDataImport.WorkerReportsProgress = true; //workerBMDataImport.WorkerSupportsCancellation = false; //workerBMDataImport.DoWork += bw_DoCollarImportWork; //// Method to call when Progress has changed //workerBMDataImport.ProgressChanged += bw_BMImportProgressChanged; //// Method to run after BackgroundWorker has completed? //workerBMDataImport.RunWorkerCompleted += bw_BMImportRunWorkerCompleted; //workerBMDataImport.RunWorkerAsync(importMap); e.Handled = true; }
private void LoadLASTextDataForPreview(string inputFilename) { IOResults ares = new IOResults(); List<ColumnMetaInfo> mandatoryColumns = new List<ColumnMetaInfo>(); List<ColumnMetaInfo> optionalColumns = new List<ColumnMetaInfo>(); List<ColumnMetaInfo> dbFields = GetGeophysicsFieldsFromNKD(); foreach (ColumnMetaInfo s in dbFields) { mandatoryColumns.Add(s); } // talk to the database to get the column names ImportDataPreview.SetMandatoryMappingColumns(mandatoryColumns); ImportDataPreview.SetOptionalMappingColumns(optionalColumns); ImportDataPreview.SetPreviewType("MODEL"); bool firstLineIsHeader = true; var rawFileReader = new RawFileReader((inputFilename.ToLower().IndexOf(".csv") > -1) ? ',' : '\t'); List<RawDataRow> dt = rawFileReader.LoadRawDataForPreview(inputFilename, ares); ImportDataPreview.ResetTable(dt, firstLineIsHeader); }
private void bw_DoBMImportWork(object sender, DoWorkEventArgs e) { ImportDataMap importMap = (ImportDataMap)e.Argument; commandDirector.SetCurrentWorkerThread(workerBMDataImport); var rawFileReader = new RawFileReader((SelectedFile.ToLower().IndexOf(".csv") > -1) ? ',' : '\t'); bool status = commandDirector.DoBMImport(SelectedFile, SelectedFormatFile, importMap, rawFileReader, NKDProjectID.ToString(), blockModellName); }
private void PresetDimensionData(ImportDataMap impMap) { var rawFileReader = new RawFileReader((SelectedFile.ToLower().IndexOf(".csv") > -1) ? ',' : '\t'); int cxColumnID = impMap.GetColumnIDMappedTo("CentroidX"); int cyColumnID = impMap.GetColumnIDMappedTo("CentroidY"); int czColumnID = impMap.GetColumnIDMappedTo("CentroidZ"); int xincColumnID = impMap.GetColumnIDMappedTo("LegthX"); int yincColumnID = impMap.GetColumnIDMappedTo("LengthY"); int zincColumnID = impMap.GetColumnIDMappedTo("LengthZ"); PhysicalDimensions pd = new PhysicalDimensions(); if (cxColumnID > -1) { ColumnStats xOrigin = rawFileReader.GetDimensions(cxColumnID); pd.originX = xOrigin.min; } if (cyColumnID > -1) { ColumnStats yOrigin = rawFileReader.GetDimensions(cyColumnID); pd.originY = yOrigin.min; } if (czColumnID > -1) { ColumnStats zOrigin = rawFileReader.GetDimensions(czColumnID); pd.originZ = zOrigin.min; } if (xincColumnID > -1) { ColumnStats xInc = rawFileReader.GetDimensions(xincColumnID); pd.blockXWidth = xInc.max; } if (yincColumnID > -1) { ColumnStats yInc = rawFileReader.GetDimensions(yincColumnID); pd.blockYWidth = yInc.max; } if (zincColumnID > -1) { ColumnStats zInc = rawFileReader.GetDimensions(zincColumnID); pd.blockZWidth = zInc.max; } BlockDimensionsControl.SetBlockDimensions(pd); }
private void LoadFileForPreview(string fileToLoad) { IOResults ares = new IOResults(); bool firstLineIsHeader = true;// (bool)dataEntryForm.checkBoxModelFirstRowHeader.IsChecked; var rawFileReader = new RawFileReader((fileToLoad.ToLower().IndexOf(".csv") > -1) ? ',' : '\t'); List<RawDataRow> dt = rawFileReader.LoadRawDataForPreview(fileToLoad, ares); rawFileReader.PerformColumnLoad(fileToLoad, ares, rawFileReader.MaxCols, firstLineIsHeader, workerLoadData); string ss = rawFileReader.GetColumnStats(); List<string> res = rawFileReader.DetermineColumnDataTypes(); columnDefs = new ModelColumnDefinitions(); // collect column assignments here rawFileReader.SetColumnDefinitions(columnDefs); }
private void LoadGeophysiscsTextDataForPreview(string inputFilename) { IOResults ares = new IOResults(); List<ColumnMetaInfo> dbFields = GetGeophysicsFieldsFromNKD(); // talk to the database to get the column names ImportDataPreview.SetMandatoryMappingColumns(dbFields); ImportDataPreview.SetPreviewType("GEOPHYISCS"); bool firstLineIsHeader = true; if (inputFilename.ToLower().EndsWith("las")) { LASFileReader lfr = new LASFileReader(); int errCode = 0; LASFile fl = lfr.ReadLASFile(inputFilename, 0, out errCode); List<RawDataRow> dt = new List<RawDataRow>(); RawDataRow rdh = new RawDataRow(); rdh.dataItems = new List<string>(); rdh.dataItems.Add("Depth"); foreach (string ss in fl.columnHeaders) { rdh.dataItems.Add(ss); } dt.Add(rdh); foreach (LASDataRow ldr in fl.dataRows) { RawDataRow rd = new RawDataRow(); rd.dataItems.Add("" + ldr.depth); foreach (double d in ldr.rowData) { rd.dataItems.Add("" + d); } dt.Add(rd); } ImportDataPreview.ResetTable(dt, true); } else { var rawFileReader = new RawFileReader(','); List<RawDataRow> dt = rawFileReader.LoadRawDataForPreview(inputFilename, ares); ImportDataPreview.ResetTable(dt, firstLineIsHeader); } }
private void LoadTextDataForPreview(string inputFilename) { IOResults ares = new IOResults(); // talk to the database to get the column names List<ColumnMetaInfo> dbFields = null; if (SelectedImportType == GeneralParameters.BLOCKMODEL) { dbFields = bmDBFields; } else if (SelectedImportType == GeneralParameters.COLLAR) { dbFields = collarDBFields; } else if (SelectedImportType == GeneralParameters.ASSAY) { dbFields = assayDBFields; } else if (SelectedImportType == GeneralParameters.COAL_QUALITY) { dbFields = coalQualityDBFields; } else if (SelectedImportType == GeneralParameters.SURVEY) { dbFields = surveyDBFields; } else if (SelectedImportType == GeneralParameters.LITHO) { dbFields = lithoDBFields; } else { } ImportDataPreview.SetMandatoryMappingColumns(dbFields); ImportDataPreview.SetPreviewType("MODEL"); bool firstLineIsHeader = true; var rawFileReader = new RawFileReader((SelectedFile.ToLower().IndexOf(".csv") > -1) ? ',' : '\t'); List<RawDataRow> dt = rawFileReader.LoadRawDataForPreview(inputFilename, ares); if (inputFilename.ToLower().EndsWith("las")) { ImportDataPreview.ResetTable(dt, false); } else { ImportDataPreview.ResetTable(dt, firstLineIsHeader); } }