public void PerformLithoImport(ModelImportStatus mos, System.IO.Stream bmFileStream, System.IO.Stream ffFileStream, ImportDataMap importMap, System.ComponentModel.BackgroundWorker backgroundWorker, Guid NKDProjectID, string connectionString, int numLines, bool doOverwrite, bool checkForDuplicates) { this.currentWorker = backgroundWorker; // talk to the import lib to do the import DateTime startTime = DateTime.Now; int batchSize = 100; //UpdateStatus("Creating new NKD block model", 20.0); ImportUtils.LithoImport lithImp = null; lithImp = new ImportUtils.LithoImport(); lithImp.AddLithoData(mos, bmFileStream, importMap, batchSize, UpdateStatus, numLines, connectionString, NKDProjectID, doOverwrite, checkForDuplicates); }
public void SetData(ModelImportStatus _mos) { mos = _mos; string message = ""; ObservableCollection<ErrorMessages> messageList = new ObservableCollection<ErrorMessages>(); GenerateMessages(out message, out messageList, true); //DataGridMessageList.Items. foreach(ErrorMessages em in messageList){ em.SetStatusTextInfo(); } DataGridMessageList.Items.Clear(); DataGridMessageList.ItemsSource = messageList; MessageText.Text = message; }
public void PerformCollarImport(ModelImportStatus mos, System.IO.Stream bmFileStream, System.IO.Stream ffFileStream, ImportDataMap importMap, System.ComponentModel.BackgroundWorker backgroundWorker, Guid NKDProjectID, string connString, List <string> existingHoleNames, bool overwrite) { this.currentWorker = null; // talk to the import lib to do the import DateTime startTime = DateTime.Now; int batchSize = 1000; //UpdateStatus("Creating new NKD block model", 20.0); ImportUtils.CollarImport collImp = null; collImp = new ImportUtils.CollarImport(); int approxNumLines = 100; importMap.columnMap.Add(new ColumnMap("", -1, "Header", "ProjectID", ImportDataMap.TEXTDATATYPE, NKDProjectID.ToString(), NKDProjectID.ToString(), ImportDataMap.UNIT_NONE)); collImp.AddCollarData(mos, bmFileStream, importMap, batchSize, UpdateStatus, approxNumLines, connString, existingHoleNames, NKDProjectID, overwrite); }
/// <summary> /// Carry out the block model import /// </summary> /// <param name="SelectedBMFile"></param> /// <param name="SelectedFormatBMFile"></param> /// <param name="importMap"></param> /// <param name="rawFileReader"></param> /// <returns></returns> internal bool DoBMImport(string SelectedBMFile, string SelectedFormatBMFile, ImportDataMap importMap, RawFileReader rawFileReader, string NKDProjectID, string modelAlias) { BaseImportTools bit = new BaseImportTools(); int cxColumnID = importMap.GetColumnIDMappedTo("CentroidX"); int cyColumnID = importMap.GetColumnIDMappedTo("CentroidY"); int czColumnID = importMap.GetColumnIDMappedTo("CentroidZ"); ColumnStats xOrigin = rawFileReader.GetDimensions(cxColumnID); ColumnStats yOrigin = rawFileReader.GetDimensions(cyColumnID); ColumnStats zOrigin = rawFileReader.GetDimensions(czColumnID); int approxNumLines = xOrigin.count; Stream bmFileStream = new FileStream(SelectedBMFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); // Stream bmFileStream = new FileStream(SelectedBMFile, FileMode.Open); ModelImportStatus mos = new ModelImportStatus(); Guid newModelGuid = Guid.NewGuid(); Guid authorGuid = new Guid(); List<string> status = bit.PerformBMImport(mos, newModelGuid, bmFileStream, null, importMap, xOrigin.min, yOrigin.min, zOrigin.min, backgroundWorker, approxNumLines, NKDProjectID, modelAlias, authorGuid, ConnectionString); return true; }
internal ModelImportStatus BatchImportLasFiles(string[] filePaths, Guid currentProjectID) { ModelImportStatus finalStatus = new ModelImportStatus(); LASBatchImportTools ll = new LASBatchImportTools(); List<string> messages = new List<string>(); int importCount = 0; int failCount = 0; string report = ""; Dictionary<string, ModelImportStatus> mosList = new Dictionary<string, ModelImportStatus>(); bool reportStatus = false; if (this.backgroundWorker != null) { reportStatus = true; } int fileCount = filePaths.Length; int thisFileNum = 0; var dataDict = new Dictionary<string, List<object>>(); foreach (string file in filePaths) { var data = new List<object>(); double pct = ((double)thisFileNum / (double)fileCount) * 100.0; thisFileNum++; if (reportStatus) { backgroundWorker.ReportProgress((int)pct, "Processing las file "+thisFileNum+" of "+fileCount+", "+file); } ModelImportStatus mis = new ModelImportStatus(); NKD.Import.Client.Processing.LASImport li = new NKD.Import.Client.Processing.LASImport(); LASFile lf = li.GetLASFile(file, mis); if (lf == null) { mis.errorMessages.Add("Failed to load LAS file " + file); mosList.Add(file, mis); continue; } data = ll.ProcessLASFile(lf, file, mis, currentProjectID, this.backgroundWorker); string msg = ""; if (msg != null) { messages.Add(msg); report += msg + "\n"; failCount++; } else { importCount++; } mosList.Add(file, mis); dataDict.Add(file, data); //if (thisFileNum % 2 == 0 || (filePaths.Length-thisFileNum) < 1) //FIXME magic number, should look at used memory and make a choice on that //{ //insert into DB to avoid memory issues //var subdict = dataDict; PushToDB(dataDict); dataDict = new Dictionary<string, List<object>>(); //PushToDB(subdict); //subdict = null; GC.Collect(); GC.WaitForPendingFinalizers(); //} } string finalReport = "Immport status:\nFiles imported:" + importCount + "\nFailed files:" + failCount + "\n\nMessages:\n"; finalReport += report; int totRecordsAddedCount = 0; int totLinesReadCount = 0; foreach (KeyValuePair<string, ModelImportStatus> kvp in mosList) { string lfName = kvp.Key; ModelImportStatus ms = kvp.Value; totRecordsAddedCount += ms.recordsAdded; totLinesReadCount += ms.linesReadFromSource; if (ms.finalErrorCode != ModelImportStatus.OK) { finalStatus.finalErrorCode = ModelImportStatus.GENERAL_LOAD_ERROR; } foreach (string m in ms.warningMessages) { finalStatus.warningMessages.Add(m); } foreach (string m in ms.errorMessages) { finalStatus.errorMessages.Add(m); } } finalStatus.linesReadFromSource = totLinesReadCount; finalStatus.recordsAdded = totRecordsAddedCount; return finalStatus; }
internal ModelImportStatus DoLithoImport(string SelectedFile, string SelectedFormatFile, ImportDataMap importMap, RawFileReader rawFileReader, Guid NKDProjectID, bool doOverwrite, bool checkForDuplicates) { BaseImportTools bit = new BaseImportTools(); ModelImportStatus mos = new ModelImportStatus(); GeneralFileInfo gfi = new GeneralFileInfo(); gfi.GeneralFileStats(SelectedFile); int numLines = gfi.numLines; //Stream fileStream = new FileStream(SelectedFile, FileMode.Open); Stream fileStream = new FileStream(SelectedFile, FileMode.Open, FileAccess.Read , FileShare.ReadWrite); bit.PerformLithoImport(mos, fileStream, null, importMap, this.backgroundWorker, NKDProjectID, ConnectionString, numLines, doOverwrite, checkForDuplicates); return mos; }
public List<object> ImportLasFile(NKD.Import.LAS.LASFile lasFile, string origFilename, ModelImportStatus mos, Guid currentProjectID, System.ComponentModel.BackgroundWorker backgroundWorker) { this.currentWorker = backgroundWorker; // get the pre holeID from the filename List<object> data = new List<object>(); LasImportUtils liu = new LasImportUtils(); data = liu.ImportLASFile(lasFile, origFilename, mos, currentProjectID, UpdateStatus); return data; }
public void PerformCollarImport(ModelImportStatus mos, System.IO.Stream bmFileStream, System.IO.Stream ffFileStream, ImportDataMap importMap, System.ComponentModel.BackgroundWorker backgroundWorker, Guid NKDProjectID, string connString, List<string> existingHoleNames, bool overwrite) { this.currentWorker = null; // talk to the import lib to do the import DateTime startTime = DateTime.Now; int batchSize = 1000; //UpdateStatus("Creating new NKD block model", 20.0); ImportUtils.CollarImport collImp = null; collImp = new ImportUtils.CollarImport(); int approxNumLines = 100; importMap.columnMap.Add(new ColumnMap("", -1, "Header", "ProjectID", ImportDataMap.TEXTDATATYPE, NKDProjectID.ToString(), NKDProjectID.ToString(), ImportDataMap.UNIT_NONE)); collImp.AddCollarData(mos, bmFileStream, importMap, batchSize, UpdateStatus, approxNumLines, connString, existingHoleNames, NKDProjectID, overwrite); }
public List<string> PerformBMImport(ModelImportStatus mos, Guid blockModelGUID, System.IO.Stream bmFileStream, System.IO.Stream ffFileStream, ImportDataMap importMap, double xOrigin, double yOrigin, double zOrigin, System.ComponentModel.BackgroundWorker worker, int approxNumLines, string NKDProjectID, string alias, Guid authorGuid, string connString) { this.currentWorker = worker; using (var entityObj = new NKDC(connString, null)) { // talk to the import lib to do the import DateTime startTime = DateTime.Now; int batchSize = 1000; //UpdateStatus("Creating new NKD block model", 20.0); ImportUtils.BlockImport dbIm = null; try { dbIm = new ImportUtils.BlockImport(); //ImportDataMap importMapLoaded = FormatSpecificationIO.ImportMapIO.LoadImportMap(ffFileStream); BlockModel xAdd = new BlockModel(); xAdd.OriginX = (Decimal)xOrigin; // TO-DO xAdd.OriginY = (Decimal)yOrigin; // TO-DO xAdd.OriginZ = (Decimal)zOrigin; // TO-DO xAdd.Alias = alias; // when on server, automatically pick up the author GUID and apply it to the model. if (currentWorker == null) { xAdd.AuthorContactID = authorGuid; xAdd.ResponsibleContactID = authorGuid; } xAdd.VersionUpdated = DateTime.UtcNow; xAdd.BlockModelID = blockModelGUID; xAdd.ProjectID = new Guid(NKDProjectID); // TODO - allow user to pick size entityObj.BlockModels.AddObject(xAdd); entityObj.SaveChanges(); UpdateStatus("Setting model meta data", 25.0); // add the meta data to identify all of the oclumns etc. } catch (Exception ex) { mos.AddErrorMessage("Error setting block model defintion data. " + ex.ToString()); } List<string> domains = new List<string>(); if (dbIm != null) { try { List<BlockModelMetadata> blockColumnMetaData = dbIm.SetBlockModelMetaData(blockModelGUID, importMap, connString); } catch (Exception ex) { mos.AddErrorMessage("Error setting block model meta data:\n" + ex.ToString()); } try { // add the new BM guid to the column map as a default so that it is always entered importMap.columnMap.Add(new ColumnMap("", -1, "BlockModelBlock", "BlockModelID", ImportDataMap.TEXTDATATYPE, blockModelGUID.ToString(), blockModelGUID.ToString(), ImportDataMap.UNIT_NONE)); // add the individual blocks domains = dbIm.AddBlockData(mos, bmFileStream, importMap, blockModelGUID, batchSize, UpdateStatus, approxNumLines, connString); // run this only if in wonows client (determined by the status of the worker thread at this stage) if (currentWorker != null) { List<Tuple<string, string>> doms = new List<Tuple<string, string>>(); string domainColumnName = "Domain"; foreach (string ss in domains) { doms.Add(new Tuple<string, string>(domainColumnName, ss)); } dbIm.UpdateDomains(doms, blockModelGUID); } } catch (Exception ex) { mos.AddErrorMessage("Error adding block data:\n" + ex.ToString()); } } return domains; } }
private void bw_DoCoalQualityImportWork(object sender, DoWorkEventArgs e) { ImportDataMap importMap = (ImportDataMap)e.Argument; commandDirector.SetCurrentWorkerThread(workerCoalQualityDataImport); var rawFileReader = new RawFileReader((SelectedFile.ToLower().IndexOf(".csv") > -1) ? ',' : '\t'); ModelImportStatus status = commandDirector.DoCoalQualityImport(SelectedFile, SelectedFormatFile, importMap, rawFileReader, NKDProjectID, doDuplicateCheck, doImportOverwrite); latestImportUpdateStatus = status; }
public void ResetUI() { ModelColumnDefinitions columnDefs = new ModelColumnDefinitions(); latestImportUpdateStatus = null; SelectedFormatFile = ""; LabelLoadedFile.Content = "no file loaded"; //currentDBFieldsMetaInfo = new List<ColumnMetaInfo>(); MapConfigTable.ResetView(); ImportDataPreview.ResetData(); this.SelectedImportType = -1; ReSetRibbonEnabledStatus(true); }
private void CollarImportExecuted(object sender, ExecutedRoutedEventArgs e) { if (ComboBoxProjectList.SelectedValue == null) { ComboBoxProjectList.BorderBrush = Brushes.Red; MessageBox.Show("You must select a project before importing"); return; } Guid NKDProjectID = (Guid)ComboBoxProjectList.SelectedValue; bool overwrite = (bool)checkBoxOverwrite.IsChecked; ImportDataMap importMap = MapConfigTable.GetImportDataMap(SelectedFile, MapConfigTable.collarPrimaryTableName, (SelectedFile.ToLower().IndexOf(".csv") > -1) ? ',' : '\t', ImportDataPreview.MaxColumns); // add into map details of which columns are foreign keys if (collarDBFields != null) { importMap.UpdateWithFKInof(collarDBFields); } // get the selected project ID var rawFileReader = new RawFileReader((SelectedFile.ToLower().IndexOf(".csv") > -1) ? ',' : '\t'); ModelImportStatus status = commandDirector.DoCollarImport(SelectedFile, SelectedFormatFile, importMap, rawFileReader, NKDProjectID, overwrite); latestImportUpdateStatus = status; //if(status.finalErrorCode != ModelImportStatus.OK){ // string ss = status.GenerateStringMessage(true); // lastestImportUpdateStatus.SaveReportData(); // if (status.finalErrorCode == ModelImportStatus.DATA_CONSISTENCY_ERROR) // { // string headline = "Import complete. " + status.linesReadFromSource + " data lines read, " + status.recordsAdded + " new records added \n" + status.recordsUpdated + " existing records updated."; // MessageBox.Show("Warnings issued during import.\n\n" + headline + ".\n\n" + ss); // } // else // { // MessageBox.Show("Import failed. " + ss); // } //}else{ // MessageBox.Show("Import complete. "+status.linesReadFromSource+" data lines read, "+status.recordsAdded+" new records added \n"+status.recordsUpdated+" existing records updated."); //} if (latestImportUpdateStatus != null) { ImportStatusWindow ii = new ImportStatusWindow(); ii.SetData(latestImportUpdateStatus); ii.ShowDialog(); } //workerBMDataImport = new BackgroundWorker(); //workerBMDataImport.WorkerReportsProgress = true; //workerBMDataImport.WorkerSupportsCancellation = false; //workerBMDataImport.DoWork += bw_DoCollarImportWork; //// Method to call when Progress has changed //workerBMDataImport.ProgressChanged += bw_BMImportProgressChanged; //// Method to run after BackgroundWorker has completed? //workerBMDataImport.RunWorkerCompleted += bw_BMImportRunWorkerCompleted; //workerBMDataImport.RunWorkerAsync(importMap); e.Handled = true; }
private void bw_DoLASBatchImportWork(object sender, DoWorkEventArgs e) { string[] ss = (string[])e.Argument; commandDirector.SetCurrentWorkerThread(workerLASBatchDataImport); ModelImportStatus status = commandDirector.BatchImportLasFiles(ss, NKDProjectID); latestImportUpdateStatus = status; workerLASBatchDataImport.ReportProgress((int)0, ""); }
public List <object> ImportLasFile(NKD.Import.LAS.LASFile lasFile, string origFilename, ModelImportStatus mos, Guid currentProjectID, System.ComponentModel.BackgroundWorker backgroundWorker) { this.currentWorker = backgroundWorker; // get the pre holeID from the filename List <object> data = new List <object>(); LasImportUtils liu = new LasImportUtils(); data = liu.ImportLASFile(lasFile, origFilename, mos, currentProjectID, UpdateStatus); return(data); }
internal ModelImportStatus DoCollarImport(string SelectedFile, string SelectedFormatBMFile, ImportDataMap importMap, RawFileReader rawFileReader, Guid NKDProjectID, bool overwrite) { BaseImportTools bit = new BaseImportTools(); // get the current collar names in this project List<CollarInfo> existingHoles = this.GetHolesForProject(NKDProjectID); List<string> existingHoleNames = new List<string>(); foreach (CollarInfo ci in existingHoles) { existingHoleNames.Add(ci.Name); } ModelImportStatus mos = new ModelImportStatus(); Stream fileStream = new FileStream(SelectedFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); //Stream fileStream = new FileStream(SelectedFile, FileMode.Open); bit.PerformCollarImport(mos, fileStream, null, importMap, this.backgroundWorker, NKDProjectID, ConnectionString, existingHoleNames, overwrite); return mos; }
public List <string> PerformBMImport(ModelImportStatus mos, Guid blockModelGUID, System.IO.Stream bmFileStream, System.IO.Stream ffFileStream, ImportDataMap importMap, double xOrigin, double yOrigin, double zOrigin, System.ComponentModel.BackgroundWorker worker, int approxNumLines, string NKDProjectID, string alias, Guid authorGuid, string connString) { this.currentWorker = worker; using (var entityObj = new NKDC(connString, null)) { // talk to the import lib to do the import DateTime startTime = DateTime.Now; int batchSize = 1000; //UpdateStatus("Creating new NKD block model", 20.0); ImportUtils.BlockImport dbIm = null; try { dbIm = new ImportUtils.BlockImport(); //ImportDataMap importMapLoaded = FormatSpecificationIO.ImportMapIO.LoadImportMap(ffFileStream); BlockModel xAdd = new BlockModel(); xAdd.OriginX = (Decimal)xOrigin; // TO-DO xAdd.OriginY = (Decimal)yOrigin; // TO-DO xAdd.OriginZ = (Decimal)zOrigin; // TO-DO xAdd.Alias = alias; // when on server, automatically pick up the author GUID and apply it to the model. if (currentWorker == null) { xAdd.AuthorContactID = authorGuid; xAdd.ResponsibleContactID = authorGuid; } xAdd.VersionUpdated = DateTime.UtcNow; xAdd.BlockModelID = blockModelGUID; xAdd.ProjectID = new Guid(NKDProjectID); // TODO - allow user to pick size entityObj.BlockModels.AddObject(xAdd); entityObj.SaveChanges(); UpdateStatus("Setting model meta data", 25.0); // add the meta data to identify all of the oclumns etc. } catch (Exception ex) { mos.AddErrorMessage("Error setting block model defintion data. " + ex.ToString()); } List <string> domains = new List <string>(); if (dbIm != null) { try { List <BlockModelMetadata> blockColumnMetaData = dbIm.SetBlockModelMetaData(blockModelGUID, importMap, connString); } catch (Exception ex) { mos.AddErrorMessage("Error setting block model meta data:\n" + ex.ToString()); } try { // add the new BM guid to the column map as a default so that it is always entered importMap.columnMap.Add(new ColumnMap("", -1, "BlockModelBlock", "BlockModelID", ImportDataMap.TEXTDATATYPE, blockModelGUID.ToString(), blockModelGUID.ToString(), ImportDataMap.UNIT_NONE)); // add the individual blocks domains = dbIm.AddBlockData(mos, bmFileStream, importMap, blockModelGUID, batchSize, UpdateStatus, approxNumLines, connString); // run this only if in wonows client (determined by the status of the worker thread at this stage) if (currentWorker != null) { List <Tuple <string, string> > doms = new List <Tuple <string, string> >(); string domainColumnName = "Domain"; foreach (string ss in domains) { doms.Add(new Tuple <string, string>(domainColumnName, ss)); } dbIm.UpdateDomains(doms, blockModelGUID); } } catch (Exception ex) { mos.AddErrorMessage("Error adding block data:\n" + ex.ToString()); } } return(domains); } }