public static void SaveImportMap(ImportDataMap importDataMap, string filename) { // check the map is not null if (importDataMap != null && filename != null) { Type importDatMapType = importDataMap.GetType(); var serializer = new XmlSerializer(importDatMapType); using (var definitionWriter = XmlWriter.Create(filename)) { serializer.Serialize(definitionWriter, importDataMap); } } }
/// <summary> /// Carry out the block model import /// </summary> /// <param name="SelectedBMFile"></param> /// <param name="SelectedFormatBMFile"></param> /// <param name="importMap"></param> /// <param name="blockRawFileReader"></param> /// <returns></returns> internal bool DoBMImport(string SelectedBMFile, string SelectedFormatBMFile, ImportDataMap importMap, RawFileReader blockRawFileReader, string XODBProjectID, string modelAlias) { BaseImportTools bit = new BaseImportTools(); int cxColumnID = importMap.GetColumnIDMappedTo("CentroidX"); int cyColumnID = importMap.GetColumnIDMappedTo("CentroidY"); int czColumnID = importMap.GetColumnIDMappedTo("CentroidZ"); ColumnStats xOrigin = blockRawFileReader.GetDimensions(cxColumnID); ColumnStats yOrigin = blockRawFileReader.GetDimensions(cyColumnID); ColumnStats zOrigin = blockRawFileReader.GetDimensions(czColumnID); int approxNumLines = xOrigin.count; Stream bmFileStream = new FileStream(SelectedBMFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); // Stream bmFileStream = new FileStream(SelectedBMFile, FileMode.Open); ModelImportStatus mos = new ModelImportStatus(); Guid newModelGuid = Guid.NewGuid(); Guid authorGuid = new Guid(); List<string> status = bit.PerformBMImport(mos, newModelGuid, bmFileStream, null, importMap, xOrigin.min, yOrigin.min, zOrigin.min, backgroundWorker, approxNumLines, XODBProjectID, modelAlias, authorGuid, ConnectionString); return true; }
private void PresetDimensionData(ImportDataMap impMap) { int cxColumnID = impMap.GetColumnIDMappedTo("CentroidX"); int cyColumnID = impMap.GetColumnIDMappedTo("CentroidY"); int czColumnID = impMap.GetColumnIDMappedTo("CentroidZ"); int xincColumnID = impMap.GetColumnIDMappedTo("LegthX"); int yincColumnID = impMap.GetColumnIDMappedTo("LengthY"); int zincColumnID = impMap.GetColumnIDMappedTo("LengthZ"); PhysicalDimensions pd = new PhysicalDimensions(); if (cxColumnID > -1) { ColumnStats xOrigin = blockRawFileReader.GetDimensions(cxColumnID); pd.originX = xOrigin.min; } if (cyColumnID > -1) { ColumnStats yOrigin = blockRawFileReader.GetDimensions(cyColumnID); pd.originY = yOrigin.min; } if (czColumnID > -1) { ColumnStats zOrigin = blockRawFileReader.GetDimensions(czColumnID); pd.originZ = zOrigin.min; } if (xincColumnID > -1) { ColumnStats xInc = blockRawFileReader.GetDimensions(xincColumnID); pd.blockXWidth = xInc.max; } if (yincColumnID > -1) { ColumnStats yInc = blockRawFileReader.GetDimensions(yincColumnID); pd.blockYWidth = yInc.max; } if (zincColumnID > -1) { ColumnStats zInc = blockRawFileReader.GetDimensions(zincColumnID); pd.blockZWidth = zInc.max; } BlockDimensionsControl.SetBlockDimensions(pd); }
private void OverwriteCollarRecord(ModelImportStatus mos, List<List<string>> rejectedLines, ImportDataMap importMap, string connectionString, Guid XODBProjectID, Action<string, double> UpdateStatus, Dictionary<string, string> holeWarningMessages) { SqlConnection connection = null; SqlConnection secondaryConnection = null; try { connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); int numCommits = 0; SqlTransaction trans; trans = connection.BeginTransaction(); List<SqlCommand> commands = new List<SqlCommand>(); int tb = 0; int transactionBatchLimit = 10; // open the filestream and read the first line float bct = 1; // report every X blocks int repCount = 0; //int reportOnBlock = 1000; float fNumLines = (float)rejectedLines.Count(); // get the column containing the hole name ColumnMap cmapHeader = importMap.FindItemsByTargetName("HoleName"); cmapHeader.importDataType = ImportDataMap.TEXTDATATYPE; int headerIDX = cmapHeader.sourceColumnNumber; int numberOfHolesAdded = 0; int linesRead = 0; int ct = 1; // get all fo the header IDs in one go before we try the insert Dictionary<string, Guid> holeIDLookups = CollarQueries.FindHeaderGuidsForProject(XODBProjectID); // this loop makes sure that any guids are properly types so that a text string for that guid can be passed into the query foreach (ColumnMap cmap in importMap.columnMap) { bool isFKColumn = cmap.hasFKRelation; if (isFKColumn) { cmap.importDataType = ImportDataMap.TEXTDATATYPE; } } foreach (List<string> columnData in rejectedLines) { linesRead++; repCount++; bct++; string statementPart1 = "UPDATE " + importMap.mapTargetPrimaryTable + " "; string clauseValues = ""; // using the column map, pick out the hole name field and see if it is in the database already string headerNameItem = columnData[headerIDX]; string headerGUID = ""; bool lv = holeIDLookups.ContainsKey(headerNameItem); if (!lv) { // oops - no hole ID with this name - should not happen though!! } else { Guid holeGuid = new Guid(); holeIDLookups.TryGetValue(headerNameItem, out holeGuid); headerGUID = holeGuid.ToString(); } #region mappsearch // now pick out all the mapped values foreach (ColumnMap cmap in importMap.columnMap) { bool isFKColumn = cmap.hasFKRelation; int colID = cmap.sourceColumnNumber; string columnValue = cmap.defaultValue; if (colID >= 0) { columnValue = columnData[colID]; } string targetCol = cmap.targetColumnName; // ignore mapped hole name and project ID columns if (targetCol.Trim().Equals("HoleName") || targetCol.Trim().Equals("ProjectID")) { continue; } string targetTable = cmap.targetColumnTable; clauseValues += "" + targetTable + "." + targetCol + "="; if (isFKColumn) { // go and search for the appropriate value from the foreign key table string newValue = ForeignKeyUtils.FindFKValueInDictionary(columnValue, cmap, secondaryConnection, true); if (newValue == null) { clauseValues += "NULL,"; } else { clauseValues += "\'" + newValue + "\',"; } } else { if (cmap.importDataType.Equals(ImportDataMap.NUMERICDATATYPE)) { if (columnValue.Equals("-") || columnValue.Trim().Length == 0) { if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) { columnValue = cmap.defaultValue; } else { columnValue = "NULL"; } } } else if (cmap.importDataType.Equals(ImportDataMap.TIMESTAMPDATATYPE)) { DateTime dtr = new DateTime(); bool parsed = DateTime.TryParse(columnValue, out dtr); if (parsed) { columnValue = "\'" + dtr.ToString("yyyy-MM-dd hh:mm:ss tt") + "\'"; } else { columnValue = "NULL"; } } else { columnValue = "\'" + columnValue + "\'"; } clauseValues += columnValue + ","; } } #endregion // now just a hack to remove the final coma from the query clauseValues = clauseValues.Substring(0, clauseValues.Length - 1); string commandText = statementPart1 + "SET " + clauseValues + " WHERE HeaderID=\'" + headerGUID + "\' AND ProjectID=\'" + XODBProjectID.ToString() + "\';"; SqlCommand sqc = new SqlCommand(commandText, connection, trans); string msg = ""; //holeWarningMessages.TryGetValue(headerNameItem, out msg); msg = "Hole "+headerNameItem + " was overwritten with new data"; holeWarningMessages[headerNameItem] = msg; numberOfHolesAdded++; if (commitToDB) { sqc.ExecuteNonQuery(); } tb++; if (tb == transactionBatchLimit) { // commit batch, then renew the transaction if (commitToDB) { trans.Commit(); numCommits++; // trans = null; trans = connection.BeginTransaction(); } // reset counter tb = 0; } ct++; } if (tb > 0) { if (commitToDB) { trans.Commit(); } numCommits++; } mos.recordsUpdated = numberOfHolesAdded; UpdateStatus("Finished writing collars to database ", 100.0); } catch (Exception ex) { UpdateStatus("Error writing collars to database ", 0); mos.AddErrorMessage("Error writing collar data at line " + rejectedLines.Count + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } }
internal ModelImportStatus DoCollarImport(string SelectedFile, string SelectedFormatBMFile, ImportDataMap importMap, RawFileReader blockRawFileReader, Guid XODBProjectID, bool overwrite) { BaseImportTools bit = new BaseImportTools(); // get the current collar names in this project List<CollarInfo> existingHoles = this.GetHolesForProject(XODBProjectID); List<string> existingHoleNames = new List<string>(); foreach (CollarInfo ci in existingHoles) { existingHoleNames.Add(ci.Name); } ModelImportStatus mos = new ModelImportStatus(); Stream fileStream = new FileStream(SelectedFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); //Stream fileStream = new FileStream(SelectedFile, FileMode.Open); bit.PerformCollarImport(mos, fileStream, null, importMap, this.backgroundWorker, XODBProjectID, ConnectionString, existingHoleNames, overwrite); return mos; }
internal ModelImportStatus DoLithoImport(string SelectedFile, string SelectedFormatFile, ImportDataMap importMap, RawFileReader blockRawFileReader, Guid XODBProjectID, bool doOverwrite, bool checkForDuplicates) { BaseImportTools bit = new BaseImportTools(); ModelImportStatus mos = new ModelImportStatus(); GeneralFileInfo gfi = new GeneralFileInfo(); gfi.GeneralFileStats(SelectedFile); int numLines = gfi.numLines; //Stream fileStream = new FileStream(SelectedFile, FileMode.Open); Stream fileStream = new FileStream(SelectedFile, FileMode.Open, FileAccess.Read , FileShare.ReadWrite); bit.PerformLithoImport(mos, fileStream, null, importMap, this.backgroundWorker, XODBProjectID, ConnectionString, numLines, doOverwrite, checkForDuplicates); return mos; }
internal FormatLoadStatus SetMappingFromImportDataMap(ImportDataMap idm) { FormatLoadStatus fms = new FormatLoadStatus(); if (idm != null) { fms.LoadStatus = FormatLoadStatus.LOAD_OK; } int mapCount = 0; foreach(ColumnMap cm in idm.columnMap){ string sourceColName = cm.sourceColumnName; string targetMappingName = cm.targetColumnName; int sourceColNum = cm.sourceColumnNumber; DataGridColumn dgc = PreviewGrid.Columns[sourceColNum]; ComboBox tmp = (ComboBox)dgc.Header; int i = 0; bool foundMatch = false; foreach (string ss in tmp.Items) { if (i > 1) { int idx = ss.IndexOf('('); string s1a = ss.Substring(2, idx - 2); string s1 = ss.Substring(0, idx); int idx2 = ss.IndexOf(')'); string s2 = ss.Substring(idx + 1, (idx2 - idx) - 1); if (s1a.Equals(targetMappingName)) { tmp.SelectedIndex = i; mapCount++; foundMatch = true; break; } } i++; } if(foundMatch == false){ fms.WarningMessages.Add("No match for column "+sourceColName+" mapped to "+targetMappingName); } } if (mapCount != idm.columnMap.Count) { fms.MappingStatus = FormatLoadStatus.MAPPING_ASSIGNEMNT_WARNING; fms.MappingMessage = "Failed to apply all items from saved map to currentluy loaded data file."; } else { fms.MappingStatus = FormatLoadStatus.MAPPING_ASSIGNEMNT_OK; } dropDown_SelectionChanged(null, null); return fms; }
private ImportDataMap GetColumnDefs() { ImportDataMap impMap = new ImportDataMap(); // get the selected headers string res = ""; int colNum = 0; IList<DataGridColumn> columns = PreviewGrid.Columns; Dictionary<string, int> defs = new Dictionary<string, int>(); int incrementor1 = 1; foreach (DataGridColumn dgc in columns) { ComboBox tmp = (ComboBox)dgc.Header; string ss = (string)tmp.SelectedValue; if (ss != null && ss.StartsWith("->") == true) { int idx = ss.IndexOf('('); string s1a = ss.Substring(2, idx-2); string s1 = ss.Substring(0, idx); int idx2 = ss.IndexOf(')'); string s2 = ss.Substring(idx + 1, (idx2 - idx) - 1); res += "\nMap column " + colNum + " \'" + s2 + "\' to " + s1; try { string colVal = s1.Substring(2).Trim(); if (colVal.StartsWith("[")) { int lv = colVal.Trim().Length - 1; string sx1 = colVal.Substring(0, lv); sx1 += " " + incrementor1 + "]"; incrementor1++; colVal = sx1; } defs.Add(colVal, colNum); // get the specified type form DB for this column string dbType = LookupColumnDBType(targetMainDataType, s1a, bmRequiredFields); impMap.columnMap.Add(new ColumnMap(s2, colNum, targetMainDataType, s1a, dbType, "", "", ImportDataMap.UNIT_NONE)); } catch (Exception ex) { // duplicate added } } colNum++; } // now search through the definitions and see which items are mapped gpl.Children.Clear(); int ct = 0; foreach (ColumnMetaInfo rf in bmRequiredFields) { int col = GetValFromDict(defs, rf.columnName); SolidColorBrush scb = Brushes.Red; //if (rf.hasFK) { // scb = Brushes.Gold; //} if (rf.isMandatory) { GenerateLabel(ct, rf.columnName, col, scb); ct++; } } foreach (ColumnMetaInfo rf in bmOptionalFields) { int col = GetValFromDict(defs, rf.columnName); SolidColorBrush scb = Brushes.Orange; //if (rf.hasFK) //{ // scb = Brushes.Gold; //} GenerateLabel(ct, rf.columnName, col, scb); ct++; } // now seartch list to perform assignmnets //columnDefs.bmX = GetValFromDict(defs, "X"); //columnDefs.bmY = GetValFromDict(defs, "Y"); //columnDefs.bmZ = GetValFromDict(defs, "Z"); //columnDefs.bmXINC = GetValFromDict(defs, "X width"); //columnDefs.bmYINC = GetValFromDict(defs, "Y width"); //columnDefs.bmZINC = GetValFromDict(defs, "Z width"); //columnDefs.bmZone = GetValFromDict(defs, "Domain"); //columnDefs.bmDensity = GetValFromDict(defs, "Density"); //columnDefs.bmGradeAttributes = new int[1]; //columnDefs.bmGradeAttributes[0] = GetValFromDict(defs, "Variable"); //int ct = 0; //foreach (string rf in bmRequiredFields) //{ // GenerateLabel(ct, rf, -1); // ct++; //} //GenerateLabel(0, "X",columnDefs.bmX); //GenerateLabel(1, "Y", columnDefs.bmY); //GenerateLabel(2, "Z", columnDefs.bmZ); //GenerateLabel(3, "X width", columnDefs.bmXINC); //GenerateLabel(4, "Y width", columnDefs.bmYINC); //GenerateLabel(5, "Z width", columnDefs.bmZINC); //GenerateLabel(6, "Domain", columnDefs.bmZone); //GenerateLabel(7, "Variable", columnDefs.bmGradeAttributes[0]); //GenerateLabel(8, "Density", columnDefs.bmDensity); return impMap; }
private ImportDataMap GenerateImportDataMap(string mapTargetPrimaryTable) { ObservableCollection<ColumnMap> inMapCols = (ObservableCollection<ColumnMap>)DataGridColumnMap.ItemsSource; string res = ""; res += inMapCols.Count; ImportDataMap idm = new ImportDataMap(); foreach (ColumnMap cm in inMapCols) { if (cm.sourceColumnName != null && cm.sourceColumnName.Trim().Length > 0) { idm.columnMap.Add(cm); } } idm.dataStartLine = 2; idm.mapDate = System.DateTime.Now; idm.mapName = ""; idm.mapTargetPrimaryTable = mapTargetPrimaryTable; idm.inputDelimiter = ','; return idm; }
private string GetPreviousDefaultFromMap(ImportDataMap oldMap, ColumnMap mi) { string defaultVal = ""; if (oldMap != null) { foreach (ColumnMap cm in oldMap.columnMap) { if (cm.sourceColumnName.Trim().Equals(mi.sourceColumnName.Trim())) { defaultVal = cm.defaultValue; break; } } } return defaultVal; }
private string GetPreviousTypeFromMap(ImportDataMap oldMap, ColumnMap mi) { string importDataType = mi.importDataType; if (oldMap != null) { foreach (ColumnMap cm in oldMap.columnMap) { if (cm.sourceColumnName.Trim().Equals(mi.sourceColumnName.Trim())) { importDataType = cm.importDataType; break; } } } return importDataType; }
internal void SetMap(ImportDataMap impMap) { ImportDataMap oldMap = null; if (DataGridColumnMap.ItemsSource != null) { oldMap = GenerateImportDataMap(impMap.mapTargetPrimaryTable); } DataGridColumnMap.ItemsSource = null; sourceColumns = new ObservableCollection<string>(); targetColumns = new ObservableCollection<string>(); foreach (ColumnMap mi in impMap.columnMap) { sourceColumns.Add(mi.sourceColumnName); targetColumns.Add(mi.targetColumnName); if (mi.importDataType == null) { mi.importDataType = ImportDataMap.NUMERICDATATYPE; }else{ // try and get last used type if (oldMap != null && oldMap.columnMap.Count > 0) { mi.importDataType = GetPreviousTypeFromMap(oldMap, mi); } } if (mi.defaultValue == null || mi.defaultValue.Trim().Length == 0) { mi.defaultValue = GetPreviousDefaultFromMap(oldMap, mi); } } dataTypeList = new ObservableCollection<string>(); dataTypeList.Add(ImportDataMap.NUMERICDATATYPE); dataTypeList.Add(ImportDataMap.TEXTDATATYPE); dataTypeList.Add(ImportDataMap.TIMESTAMPDATATYPE); DataGridColumnMap.ItemsSource = null; DataGridColumnMap.Items.Clear(); unitTypesList = new ObservableCollection<string>(); unitTypesList.Add(""); unitTypesList.Add(ImportDataMap.UNIT_PCT); unitTypesList.Add(ImportDataMap.UNIT_PPM); cmaps = new ObservableCollection<ColumnMap>(); foreach (ColumnMap cm in impMap.columnMap) { cmaps.Add(cm); } DataGridColumnMap.ItemsSource = cmaps; DataGridColumnMap.UpdateLayout(); }
internal List<BlockModelMetadata> SetBlockModelMetaData(Guid blockModelGUID, ImportDataMap testMap, string connString) { using (var entityObj = new XODBC(connString, null)) { List<BlockModelMetadata> metaDataItems = new List<BlockModelMetadata>(); foreach (ColumnMap cmap in testMap.columnMap) { BlockModelMetadata metaData = new BlockModelMetadata(); metaData.BlockModelID = blockModelGUID; metaData.BlockModelMetadataID = Guid.NewGuid(); metaData.IsColumnData = true; string colName = cmap.sourceColumnName; string columnValue = cmap.defaultValue; Parameter param1 = new Parameter(); param1.ParameterName = cmap.targetColumnName; // source column param1.ParameterType = "FieldName"; if (entityObj.BlockModelMetadatas.Where(f => f.BlockModelID == blockModelGUID && f.Parameter.Description == cmap.sourceColumnName).Any()) param1.Description = cmap.sourceColumnName = string.Format("{0}_{1}", cmap.sourceColumnName, Guid.NewGuid()); else param1.Description = cmap.sourceColumnName; // target column param1.ParameterID = Guid.NewGuid(); if (cmap.sourceColumnName != null && cmap.sourceColumnName.ToLower().Contains("ppm")) { param1.UnitID = new Guid("E91773A4-2762-4EDE-8510-38F78FAF981D");// TODO: HACK - get the proper guid for the current unit type by querying database } else if (cmap.sourceColumnName != null && cmap.sourceColumnName.ToLower().Contains("pct")) { param1.UnitID = new Guid("AEDBBE0A-6A94-419F-8B43-A98CE942669A");// TODO: HACK - get the proper guid for the current unit type by querying database } metaData.BlockModelMetadataText = cmap.targetColumnName; metaData.ParameterID = param1.ParameterID; entityObj.Parameters.AddObject(param1); entityObj.BlockModelMetadatas.AddObject(metaData); metaDataItems.Add(metaData); entityObj.SaveChanges(); } return metaDataItems; } }
/// <summary> /// Add block model data /// </summary> /// <param name="textInputDataFile"></param> /// <param name="testMap"></param> /// <param name="blockModelGUID"></param> /// <param name="batchSize"></param> /// <param name="UpdateStatus"></param> /// <param name="numLines"></param> internal void AddBlockData(string textInputDataFile, ImportDataMap testMap, Guid blockModelGUID, int batchSize, Action<string, double> UpdateStatus, int numLines, string connString) { // iterate through the data lines int ct = 1; SqlConnection connection = null; // get a connection to the database try { connection = new SqlConnection(connString); connection.Open(); int numCommits = 0; SqlTransaction trans; trans = connection.BeginTransaction(System.Data.IsolationLevel.ReadUncommitted); List<SqlCommand> commands = new List<SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; FileStream fs = null; try { fs = new FileStream(textInputDataFile, FileMode.Open, FileAccess.Read, FileShare.Read); sr = new StreamReader(fs); } catch (FileNotFoundException fex) { throw fex; } catch (Exception ex) { throw ex; } string line = null; float pct = 0; float bct = 1; // report every X blocks int repCount = 0; int reportOnBlock = 1000; float fNumLines = (float)numLines; if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; if (repCount == reportOnBlock) { repCount = 0; // now report status pct = ( bct / fNumLines) * 100.0f; UpdateStatus("Writing block " + bct + " to database" , pct); } bct++; if (ct >= testMap.dataStartLine) { string statementPart1 = "INSERT INTO " + testMap.mapTargetPrimaryTable + " "; string clauseValues = ""; string clauseParameters = ""; List<string> items = parseTestLine(line, testMap.inputDelimiter); // now pick out all the mapped values foreach (ColumnMap cmap in testMap.columnMap) { int colID = cmap.sourceColumnNumber; string columnValue = cmap.defaultValue; if (colID >= 0) { columnValue = items[colID]; } string targetCol = cmap.targetColumnName; string targetTable = cmap.targetColumnTable; clauseValues += "" + targetTable + "." + targetCol + ","; if (cmap.importDataType.Equals(ImportDataMap.NUMERICDATATYPE)) { if (columnValue.Equals("-")) { if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) { columnValue = cmap.defaultValue; } else { columnValue = "NULL"; } } clauseParameters += columnValue + ","; } else { clauseParameters += "\'" + columnValue + "\',"; } } // now just a hack to remove the final coma from the query clauseParameters = clauseParameters.Substring(0, clauseParameters.Length - 1); clauseValues = clauseValues.Substring(0, clauseValues.Length - 1); string commandText = statementPart1 + "(" + clauseValues + ") VALUES (" + clauseParameters + ")"; SqlCommand sqc = new SqlCommand(commandText, connection, trans); //commands.Add(sqc); sqc.ExecuteNonQuery(); tb++; if (tb == transactionBatchLimit) { // commit batch, then renew the transaction trans.Commit(); numCommits++; // trans = null; trans = connection.BeginTransaction(System.Data.IsolationLevel.ReadUncommitted); // reset counter tb = 0; } } ct++; } } if (tb > 0) { trans.Commit(); numCommits++; } UpdateStatus("Finished writing blocks to database ", 100.0); } catch (Exception ex) { UpdateStatus("Error writing blocks to database\n\n " + ex.ToString(), 0); } finally { try { connection.Close(); } catch { } } }
/// <summary> /// Add block data to the model, provided by the supplied file stream and format defintion /// </summary> /// <param name="bmFileStream"></param> /// <param name="importMap"></param> /// <param name="blockModelGUID"></param> /// <param name="batchSize"></param> /// <param name="UpdateStatus"></param> /// <param name="approxNumLines"></param> internal List<string> AddBlockData(ModelImportStatus mos, Stream bmFileStream, ImportDataMap importMap, Guid blockModelGUID, int batchSize, Action<string, double> UpdateStatus, int numLines, string connString) { // iterate through the data lines int ct = 1; int linesRead = 0; int total = 0; SqlConnection connection = null; List<string> uniqueDomains = new List<string>(); // get a connection to the database try { int domainColIDX = -1; // find the column ID for the specified Domain field, as we need to capture this list. foreach (ColumnMap cm in importMap.columnMap) { if (cm.targetColumnName.Trim().Equals("Domain")) { domainColIDX = cm.sourceColumnNumber; } } connection = new SqlConnection(connString); connection.Open(); int numCommits = 0; SqlTransaction trans; trans = connection.BeginTransaction(System.Data.IsolationLevel.ReadUncommitted); List<SqlCommand> commands = new List<SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; //FileStream fs = null; try { //fs = new FileStream(textInputDataFile, FileMode.Open, FileAccess.Read, FileShare.Read); sr = new StreamReader(bmFileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input model:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } string line = null; float pct = 0; float bct = 1; // report every X blocks int repCount = 0; int reportOnBlock = 500; float fNumLines = (float)numLines; bool commitToDB = true; if (sr != null) { while ((line = sr.ReadLine()) != null) { linesRead++; repCount++; if (repCount == reportOnBlock) { repCount = 0; // now report status pct = (bct / fNumLines) * 100.0f; UpdateStatus("Writing block " + bct + " to database", pct); } bct++; if (ct >= importMap.dataStartLine) { string statementPart1 = "INSERT INTO " + importMap.mapTargetPrimaryTable + " "; string clauseValues = ""; string clauseParameters = ""; List<string> items = parseTestLine(line, importMap.inputDelimiter); // now pick out all the mapped values foreach (ColumnMap cmap in importMap.columnMap) { int colID = cmap.sourceColumnNumber; string columnValue = cmap.defaultValue; if (colID >= 0) { columnValue = items[colID]; } if (cmap.sourceColumnNumber == domainColIDX) { if (!uniqueDomains.Contains(columnValue.Trim())) { uniqueDomains.Add(columnValue); } } string targetCol = cmap.targetColumnName; string targetTable = cmap.targetColumnTable; clauseValues += "" + targetTable + "." + targetCol + ","; if (cmap.importDataType.Equals(ImportDataMap.NUMERICDATATYPE)) { if (columnValue.Equals("-")) { if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) { columnValue = cmap.defaultValue; } else { columnValue = "NULL"; } } clauseParameters += columnValue + ","; } else { clauseParameters += "\'" + columnValue + "\',"; } } // now just a hack to remove the final coma from the query clauseParameters = clauseParameters.Substring(0, clauseParameters.Length - 1); clauseValues = clauseValues.Substring(0, clauseValues.Length - 1); string commandText = statementPart1 + "(" + clauseValues + ") VALUES (" + clauseParameters + ")"; SqlCommand sqc = new SqlCommand(commandText, connection, trans); if (commitToDB) { sqc.ExecuteNonQuery(); } tb++; if (tb == transactionBatchLimit) { // commit batch, then renew the transaction if (commitToDB) { total += tb; trans.Commit(); numCommits++; // trans = null; trans = connection.BeginTransaction(System.Data.IsolationLevel.ReadUncommitted); } // reset counter tb = 0; } } ct++; } } if (tb > 0) { if (commitToDB) { total += tb; trans.Commit(); } numCommits++; } UpdateStatus("Finished writing blocks to database ", 100.0); } catch (Exception ex) { UpdateStatus("Error writing blocks to database ", 0); mos.AddErrorMessage("Error writing block data at line "+linesRead+":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n"+ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } mos.RecordsImported = total; mos.linesReadFromSource = linesRead; return uniqueDomains; }