Ejemplo n.º 1
0
 public static void PopulateCMapShortcut(FormatSpecification.ImportDataMap importMap, Dictionary <string, int> columnIDX)
 {
     foreach (ColumnMap cm in importMap.columnMap)
     {
         if (cm.targetColumnName[0] != '[')
         {
             columnIDX.Add(cm.targetColumnName.Trim(), cm.sourceColumnNumber);
         }
     }
 }
Ejemplo n.º 2
0
        public List <string> GetImportFileColumnsAsList(Guid guid, string bmFileName, string modelAlias)
        {
            List <string> columnNames = new List <string>();

            BaseImportTools bit = new BaseImportTools();

            string targetFolder;
            bool   attmemptModelLoad;
            string originalName = bmFileName;

            bmFileName = ExtractBlockModelFromZip(bmFileName, out targetFolder, out attmemptModelLoad);
            IStorageFile bmFile       = _storageProvider.GetFile(bmFileName);
            Stream       bmFileStream = bmFile.OpenRead();

            NKD.Import.FormatSpecification.ImportDataMap idm = null;
            double _originX = -1;
            double _originY = -1;
            double _originZ = -1;

            try
            {
                StreamReader sr            = new StreamReader(bmFileStream);
                string       headerLine    = "";
                string       firstDataLine = "";
                if (sr != null)
                {
                    headerLine    = sr.ReadLine();
                    firstDataLine = sr.ReadLine();

                    bit.ParseDataLinesForOrigins(headerLine, firstDataLine, ',', out _originX, out _originY, out _originZ);
                    // auto generate a format defintion based on Goldfields typical input column data
                    idm = bit.AutoGenerateFormatDefinition(headerLine, ',');
                }
                sr.Close();
            }
            catch
            {
            }

            if (idm != null)
            {
                foreach (ColumnMap cm in idm.columnMap)
                {
                    columnNames.Add(cm.sourceColumnName);
                }
            }
            return(columnNames);
        }
Ejemplo n.º 3
0
        public static void SaveImportMap(ImportDataMap importDataMap, string filename)
        {

            // check the map is not null
            if (importDataMap != null && filename != null)
            {
                Type importDatMapType = importDataMap.GetType();

                var serializer = new XmlSerializer(importDatMapType);
                using (var definitionWriter = XmlWriter.Create(filename))
                {
                    serializer.Serialize(definitionWriter, importDataMap);
                }

            }

        }
Ejemplo n.º 4
0
        /// <summary>
        /// Carry out the block model import
        /// </summary>
        /// <param name="SelectedBMFile"></param>
        /// <param name="SelectedFormatBMFile"></param>
        /// <param name="importMap"></param>
        /// <param name="rawFileReader"></param>
        /// <returns></returns>
        internal bool DoBMImport(string SelectedBMFile, string SelectedFormatBMFile, ImportDataMap importMap, RawFileReader rawFileReader, string NKDProjectID, string modelAlias)
        {
            BaseImportTools bit = new BaseImportTools();
            int cxColumnID = importMap.GetColumnIDMappedTo("CentroidX");
            int cyColumnID = importMap.GetColumnIDMappedTo("CentroidY");
            int czColumnID = importMap.GetColumnIDMappedTo("CentroidZ");
            
            ColumnStats xOrigin = rawFileReader.GetDimensions(cxColumnID);
            ColumnStats yOrigin = rawFileReader.GetDimensions(cyColumnID);
            ColumnStats zOrigin = rawFileReader.GetDimensions(czColumnID);

            int approxNumLines = xOrigin.count;


            Stream bmFileStream = new FileStream(SelectedBMFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
           // Stream bmFileStream = new FileStream(SelectedBMFile, FileMode.Open);
            ModelImportStatus mos = new ModelImportStatus();
            Guid newModelGuid = Guid.NewGuid();
            
            Guid authorGuid = new Guid();
            List<string> status = bit.PerformBMImport(mos, newModelGuid, bmFileStream, null, importMap, xOrigin.min, yOrigin.min, zOrigin.min, backgroundWorker, approxNumLines, NKDProjectID, modelAlias, authorGuid, ConnectionString);
            return true;
        }
Ejemplo n.º 5
0
        public ModelImportStatus PerformBMAppend(System.IO.Stream bmStream, Guid bmGuid, string alias, string columnNameToImport, int columnIndexToImport, string connString, char delimiter)
        {
            // TODO: read stream and write updates to database

            // get the next column to write to - search meta data to get the list of occupied columns
            using (var entityObj = new NKDC(connString, null))
            {
                List<BlockModelMetadata> d = new List<BlockModelMetadata>();
                var o = entityObj.BlockModelMetadatas.Where(f => f.BlockModelID == bmGuid && f.IsColumnData == true).Select(f => (string)f.BlockModelMetadataText).ToArray();
                // yuk, ugly hack to get the next column to update into.  In the long run, use normalised data as it will be much easier
                int lastIndex = 0;
                foreach (string s in o)
                {
                    if (s.StartsWith("Numeric"))
                    {
                        string endBit = s.Substring(7);
                        int ival = -1;
                        bool parsed = int.TryParse(endBit, out ival);
                        if (parsed)
                        {
                            lastIndex = Math.Max(ival, lastIndex);
                        }

                    }
                }
                string colToInsertTo = "Numeric" + (lastIndex + 1);
                //TODO: add this new meta data item into the database

                //TODO: update the data within the database itself
                ImportUtils.BlockImport dbIm = new ImportUtils.BlockImport();
                ImportDataMap idm = new ImportDataMap();
                idm.columnMap = new List<ColumnMap>();
                idm.inputDelimiter = delimiter;
                idm.columnMap.Add(new ColumnMap(columnNameToImport, columnIndexToImport, "BlockModelBlock", colToInsertTo, ImportDataMap.NUMERICDATATYPE, null, null, null));
                dbIm.SetBlockModelMetaData(bmGuid, idm, connString);

                return dbIm.UpdateBlockData(bmStream, bmGuid, colToInsertTo, connString, delimiter);
            }

        }
Ejemplo n.º 6
0
 private static int AutoGenColMap(string[] headerItems, ImportDataMap idm, string sourceName, string targetName, string dbArea, Dictionary<string, bool> autoMap)
 {
     int idx = FindItemInLine(headerItems, sourceName);
     if(idx > -1){
         
         idm.columnMap.Add(new ColumnMap(sourceName, idx, dbArea, targetName, ImportDataMap.NUMERICDATATYPE, null, null, null));
         if (autoMap != null) { autoMap[sourceName] = true; }
     }
     return idx;
 }
Ejemplo n.º 7
0
        // attempt to use the block model file header to automatically create a defintion based on goldfields typical model formats
        public ImportDataMap AutoGenerateFormatDefinition(string headerLine, char delimeter)
        {
                       
            Dictionary<string, bool> autoMap = new Dictionary<string, bool>();
            string[] headerItems = headerLine.Split(new char[] { delimeter }, StringSplitOptions.None);
            // iterate through each item in the header and assign it to a target column. 
            foreach(string ss in headerItems){
                autoMap.Add(ss, false);
            }

            // manually here get the core BM fields.
            // DB field = [Domain]
            ImportDataMap idm = new ImportDataMap();
            idm.columnMap = new List<ColumnMap>();
            idm.MaxColumns = headerItems.Length;
            idm.inputDelimiter = delimeter;
            idm.mapTargetPrimaryTable = "BlockModelBlock";
            idm.dataStartLine = 2;

            List<string> dmFields = new List<string>();

            int idx = -1;   
            
            string dbArea = "BlockModel";

            idx = AutoGenColMap(headerItems, idm,  "DOMAIN", "Domain", dbArea, autoMap);
            idx = AutoGenColMap(headerItems, idm, "XC", "CentroidX", dbArea, autoMap);     
            idx = AutoGenColMap(headerItems, idm, "YC", "CentroidY", dbArea, autoMap);
            idx = AutoGenColMap(headerItems, idm, "ZC", "CentroidZ", dbArea, autoMap);
            idx = AutoGenColMap(headerItems, idm, "XINC", "LengthX", dbArea, autoMap);
            idx = AutoGenColMap(headerItems, idm, "YINC", "LengthY", dbArea, autoMap);
            idx = AutoGenColMap(headerItems, idm, "ZINC", "LengthZ", dbArea, autoMap);
            idx = AutoGenColMap(headerItems, idm, "DENSITY", "Density", dbArea, autoMap);
            idx = AutoGenColMap(headerItems, idm, "RESCAT", "ResourceCategory", dbArea, autoMap);
            int num = 1;
            // auto map all ofther fields into numeric 1 to n
            foreach (KeyValuePair<string, bool> kvp in autoMap) {
                if ((bool)kvp.Value == false) {
                    string nm = kvp.Key;
                    // make a column map for this stright into the numeric, and keep track of numeric number
                    string targetFieldName = "Numeric" + num;
                    AutoGenColMap(headerItems, idm, nm, targetFieldName, dbArea, null);
                    num++;
                }
            }

            return idm;

        }
Ejemplo n.º 8
0
 public void PerformLithoImport(ModelImportStatus mos, System.IO.Stream bmFileStream, System.IO.Stream ffFileStream, ImportDataMap importMap, System.ComponentModel.BackgroundWorker backgroundWorker, Guid NKDProjectID, string connectionString, int numLines, bool doOverwrite, bool checkForDuplicates)
 {
     this.currentWorker = backgroundWorker;
     // talk to the import lib to do the import
     DateTime startTime = DateTime.Now;
     int batchSize = 100;
     //UpdateStatus("Creating new NKD block model", 20.0);
     ImportUtils.LithoImport lithImp = null;
     lithImp = new ImportUtils.LithoImport();
     lithImp.AddLithoData(mos, bmFileStream, importMap, batchSize, UpdateStatus, numLines, connectionString, NKDProjectID, doOverwrite, checkForDuplicates);
 }
Ejemplo n.º 9
0
        private ImportDataMap GetColumnDefs()
        {
            ImportDataMap impMap = new ImportDataMap();
            
            // get the selected headers
            string res = "";
            int colNum = 0;
            IList<DataGridColumn> columns = PreviewGrid.Columns;
            Dictionary<string, int> defs = new Dictionary<string, int>();
            int incrementor1 = 1;
            foreach (DataGridColumn dgc in columns)
            {
                ComboBox tmp = (ComboBox)dgc.Header;
                string ss = (string)tmp.SelectedValue;
                if (ss != null && ss.StartsWith("->") == true)
                {
                    int idx = ss.IndexOf('(');
                    string s1a = ss.Substring(2, idx-2);
                    string s1 = ss.Substring(0, idx);

                    int idx2 = ss.LastIndexOf(')');
                    string s2 = ss.Substring(idx + 1, (idx2 - idx) - 1);
                    res += "\nMap column " + colNum + " \'" + s2 + "\' to " + s1;
                    try
                    {
                        string colVal = s1.Substring(2).Trim();
                        if (colVal.StartsWith("[")) {
                            int lv = colVal.Trim().Length - 1;
                            string sx1 = colVal.Substring(0, lv);
                            sx1 += " " + incrementor1 + "]";
                            incrementor1++;
                            colVal = sx1;
                        }
                        defs.Add(colVal, colNum);
                        // get the specified type form DB for this column
                        string dbType = LookupColumnDBType(targetMainDataType, s1a, bmRequiredFields);

                        impMap.columnMap.Add(new ColumnMap(s2, colNum, targetMainDataType, s1a, dbType, "", "", ImportDataMap.UNIT_NONE));
                    }
                    catch (Exception ex)
                    {
                        // duplicate added 
                    }
                }
                colNum++;
            }


            // now search through the definitions and see which items are mapped

            gpl.Children.Clear();
            int ct = 0;
            foreach (ColumnMetaInfo rf in bmRequiredFields)
            {
                
                int col = GetValFromDict(defs, rf.columnName);
                SolidColorBrush scb = Brushes.Red;

                //if (rf.hasFK) {
                //    scb = Brushes.Gold;
                //}
                if (rf.isMandatory)
                {

                    GenerateLabel(ct, rf.columnName, col, scb);
                    ct++;
                }
            }

            foreach (ColumnMetaInfo rf in bmOptionalFields)
            {
                
                int col = GetValFromDict(defs, rf.columnName);
                SolidColorBrush scb = Brushes.Orange;

                //if (rf.hasFK)
                //{
                //    scb = Brushes.Gold;
                //}
                GenerateLabel(ct, rf.columnName, col, scb);
                ct++;
            }

            // now seartch list to perform assignmnets
           
                //columnDefs.bmX = GetValFromDict(defs, "X");
                //columnDefs.bmY = GetValFromDict(defs, "Y");
                //columnDefs.bmZ = GetValFromDict(defs, "Z");
                //columnDefs.bmXINC = GetValFromDict(defs, "X width");
                //columnDefs.bmYINC = GetValFromDict(defs, "Y width");
                //columnDefs.bmZINC = GetValFromDict(defs, "Z width");
                //columnDefs.bmZone = GetValFromDict(defs, "Domain");
                //columnDefs.bmDensity = GetValFromDict(defs, "Density");
                //columnDefs.bmGradeAttributes = new int[1];
                //columnDefs.bmGradeAttributes[0] = GetValFromDict(defs, "Variable");

                //int ct = 0;
                //foreach (string rf in bmRequiredFields)
                //{
                //    GenerateLabel(ct, rf, -1);
                //    ct++;
                //}
                //GenerateLabel(0, "X",columnDefs.bmX);
                //GenerateLabel(1, "Y", columnDefs.bmY);
                //GenerateLabel(2, "Z", columnDefs.bmZ);
                
                //GenerateLabel(3, "X width", columnDefs.bmXINC);
                //GenerateLabel(4, "Y width", columnDefs.bmYINC);
                //GenerateLabel(5, "Z width", columnDefs.bmZINC);
                //GenerateLabel(6, "Domain", columnDefs.bmZone);
                //GenerateLabel(7, "Variable", columnDefs.bmGradeAttributes[0]);

                //GenerateLabel(8, "Density", columnDefs.bmDensity);

            
            return impMap;

        }
Ejemplo n.º 10
0
        /// <summary>
        /// Import the block model.  This is ultimatly carried out throught he import library which must be
        /// passed the streams for the import file (and defintiion possibly).
        /// </summary>
        /// <param name="bmFileName"></param>
        /// <param name="formatFileName"></param>
        /// <param name="projectID"></param>
        /// <param name="alias"></param>
        /// <param name="authorGuid"></param>
        /// <param name="res2"></param>
        /// <param name="domains"></param>
        /// <param name="targetFolder"></param>
        /// <param name="attmemptModelLoad"></param>
        /// <param name="notes"></param>
        /// <param name="stage"></param>
        /// <returns></returns>
        private ModelImportStatus  DoNewModelImport(string bmFileName, string formatFileName, string projectID, string alias, Guid authorGuid, ref List <string> domains, string targetFolder, bool attmemptModelLoad, string notes, string stage, Guid stageMetaID)
        {
            ModelImportStatus mos = new ModelImportStatus();

            mos.finalErrorCode = ModelImportStatus.OK;
            double          _originX = -1;
            double          _originY = -1;
            double          _originZ = -1;
            BaseImportTools bit      = new BaseImportTools();

            if (attmemptModelLoad)
            {
                //IStorageFile formatFile = _storageProvider.GetFile(formatFileName);
                IStorageFile bmFile           = _storageProvider.GetFile(bmFileName);
                Stream       bmFileStream     = bmFile.OpenRead();
                Stream       bmTempFileStream = bmFile.OpenRead();
                // try and automatically detect the origin coordinates from the file itself - a datamine file
                // always carries the origin coords in XMORIG, YMORIG, ZMORIG

                NKD.Import.FormatSpecification.ImportDataMap idm = null;
                try
                {
                    StreamReader sr            = new StreamReader(bmTempFileStream);
                    string       headerLine    = "";
                    string       firstDataLine = "";
                    if (sr != null)
                    {
                        headerLine    = sr.ReadLine();
                        firstDataLine = sr.ReadLine();
                        bit.ParseDataLinesForOrigins(headerLine, firstDataLine, ',', out _originX, out _originY, out _originZ);
                        // auto generate a format defintion based on Goldfields typical input column data
                        idm = bit.AutoGenerateFormatDefinition(headerLine, ',');
                    }
                    sr.Close();
                }
                catch (Exception ex) {
                    mos.AddWarningMessage("Unable to auto detect origin and other format information from the file\n\n" + ex.ToString());
                }
                var opts = new TransactionOptions();
                opts.IsolationLevel = System.Transactions.IsolationLevel.ReadUncommitted;
                using (new TransactionScope(TransactionScopeOption.Suppress, opts))
                {
                    try
                    {
                        Guid blockModelGUID = Guid.NewGuid();
                        mos.modelID = blockModelGUID;
                        domains     = bit.PerformBMImport(mos, blockModelGUID, bmFileStream, null, idm, _originX, _originY, _originZ, null, 1000, projectID, alias, authorGuid, _users.ApplicationConnectionString);
                        List <Tuple <string, string> > doms = new List <Tuple <string, string> >();
                        string domainColumnName             = "Domain";
                        foreach (string ss in domains)
                        {
                            doms.Add(new Tuple <string, string>(domainColumnName, ss));
                        }
                        UpdateDomains(doms, blockModelGUID);
                        AddModelNotes(notes, blockModelGUID);
                        UpdateStage(blockModelGUID, stageMetaID, stage);
                    }
                    catch (Exception ex)
                    {
                        mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB;
                        mos.AddErrorMessage("Error importing block model:\n" + ex.ToString());
                    }
                }

                //TODO call into import library with the stream object for the import

                bmFileStream.Close();
                _storageProvider.DeleteFile(bmFileName);
                _storageProvider.DeleteFolder(targetFolder);
            }
            return(mos);
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Add block model data
        /// </summary>
        /// <param name="textInputDataFile"></param>
        /// <param name="testMap"></param>
        /// <param name="blockModelGUID"></param>
        /// <param name="batchSize"></param>
        /// <param name="UpdateStatus"></param>
        /// <param name="numLines"></param>
        internal void AddBlockData(string textInputDataFile, ImportDataMap testMap, Guid blockModelGUID, int batchSize, Action<string, double> UpdateStatus, int numLines, string connString)
        {

            // iterate through the data lines
            int ct = 1;
            SqlConnection connection = null;
            // get a connection to the database
            try
            {
                connection = new SqlConnection(connString);
                connection.Open();
                int numCommits = 0;
                SqlTransaction trans;
                trans = connection.BeginTransaction(System.Data.IsolationLevel.ReadUncommitted);
                List<SqlCommand> commands = new List<SqlCommand>();
                int tb = 0;
                int transactionBatchLimit = batchSize;

                // open the filestream and read the first line
                StreamReader sr = null;
                FileStream fs = null;
                try
                {
                    fs = new FileStream(textInputDataFile, FileMode.Open, FileAccess.Read, FileShare.Read);
                    sr = new StreamReader(fs);
                }
                catch (FileNotFoundException fex)
                {
                    throw fex;
                }
                catch (Exception ex)
                {
                    throw ex;
                }
                string line = null;
                float pct = 0;
                float bct = 1;

                // report every X blocks
                int repCount = 0;
                int reportOnBlock = 1000;
                float fNumLines = (float)numLines;
                if (sr != null)
                {
                    while ((line = sr.ReadLine()) != null)
                    {
                        repCount++;
                        
                        if (repCount == reportOnBlock) {
                            repCount = 0;
                            // now report status
                            pct = ( bct / fNumLines) * 100.0f;
                            UpdateStatus("Writing block " + bct + " to database" , pct);
                        }
                        bct++;

                        if (ct >= testMap.dataStartLine)
                        {
                            string statementPart1 = "INSERT INTO " + testMap.mapTargetPrimaryTable + " ";
                            string clauseValues = "";
                            string clauseParameters = "";

                            List<string> items = parseTestLine(line, testMap.inputDelimiter);
                            // now pick out all the mapped values
                            foreach (ColumnMap cmap in testMap.columnMap)
                            {
                                int colID = cmap.sourceColumnNumber;
                                string columnValue = cmap.defaultValue;
                                if (colID >= 0)
                                {
                                    columnValue = items[colID];
                                }
                                string targetCol = cmap.targetColumnName;
                                string targetTable = cmap.targetColumnTable;
                                clauseValues += "" + targetTable + "." + targetCol + ",";


                                if (cmap.importDataType.Equals(ImportDataMap.NUMERICDATATYPE))
                                {
                                    if (columnValue.Equals("-")) {
                                        if (cmap.defaultValue != null && cmap.defaultValue.Length > 0)
                                        {
                                            columnValue = cmap.defaultValue;
                                        }
                                        else
                                        {
                                            columnValue = "NULL";
                                        }
                                    }
                                    clauseParameters += columnValue + ",";
                                }
                                else
                                {
                                    clauseParameters += "\'" + columnValue + "\',";
                                }
                            }
                            // now just a hack to remove the final coma from the query
                            clauseParameters = clauseParameters.Substring(0, clauseParameters.Length - 1);
                            clauseValues = clauseValues.Substring(0, clauseValues.Length - 1);

                            string commandText = statementPart1 + "(" + clauseValues + ") VALUES (" + clauseParameters + ")";
                            SqlCommand sqc = new SqlCommand(commandText, connection, trans);

                            //commands.Add(sqc);
                            sqc.ExecuteNonQuery();
                            tb++;
                            if (tb == transactionBatchLimit)
                            {
                                // commit batch, then renew the transaction
                                trans.Commit();
                                numCommits++;
                                //   trans = null;
                                trans = connection.BeginTransaction(System.Data.IsolationLevel.ReadUncommitted);
                                // reset counter
                                tb = 0;
                            }
                        }

                        ct++;

                    }
                }
                if (tb > 0)
                {
                    trans.Commit();
                    numCommits++;
                }
                UpdateStatus("Finished writing blocks to database ", 100.0);

            }
            catch (Exception ex)
            {
                UpdateStatus("Error writing blocks to database\n\n " + ex.ToString(), 0);
            }
            finally {
                try { connection.Close(); }
                catch { }
            }
           
            
            

        }
Ejemplo n.º 12
0
        private void PresetDimensionData(ImportDataMap impMap)
        {
            var rawFileReader = new RawFileReader((SelectedFile.ToLower().IndexOf(".csv") > -1) ? ',' : '\t');
            int cxColumnID = impMap.GetColumnIDMappedTo("CentroidX");
            int cyColumnID = impMap.GetColumnIDMappedTo("CentroidY");
            int czColumnID = impMap.GetColumnIDMappedTo("CentroidZ");

            int xincColumnID = impMap.GetColumnIDMappedTo("LegthX");
            int yincColumnID = impMap.GetColumnIDMappedTo("LengthY");
            int zincColumnID = impMap.GetColumnIDMappedTo("LengthZ");
            PhysicalDimensions pd = new PhysicalDimensions();
            if (cxColumnID > -1)
            {
                ColumnStats xOrigin = rawFileReader.GetDimensions(cxColumnID);
                pd.originX = xOrigin.min;
            }
            if (cyColumnID > -1)
            {
                ColumnStats yOrigin = rawFileReader.GetDimensions(cyColumnID);
                pd.originY = yOrigin.min;
            }
            if (czColumnID > -1)
            {
                ColumnStats zOrigin = rawFileReader.GetDimensions(czColumnID);
                pd.originZ = zOrigin.min;
            }
            if (xincColumnID > -1)
            {
                ColumnStats xInc = rawFileReader.GetDimensions(xincColumnID);
                pd.blockXWidth = xInc.max;
            }
            if (yincColumnID > -1)
            {
                ColumnStats yInc = rawFileReader.GetDimensions(yincColumnID);
                pd.blockYWidth = yInc.max;
            }
            if (zincColumnID > -1)
            {
                ColumnStats zInc = rawFileReader.GetDimensions(zincColumnID);
                pd.blockZWidth = zInc.max;
            }
            BlockDimensionsControl.SetBlockDimensions(pd);
        }
Ejemplo n.º 13
0
        private string GetPreviousDefaultFromMap(ImportDataMap oldMap, ColumnMap mi)
        {

            string defaultVal = "";
            if (oldMap != null)
            {
                foreach (ColumnMap cm in oldMap.columnMap)
                {
                    if (cm.sourceColumnName.Trim().Equals(mi.sourceColumnName.Trim()))
                    {
                        defaultVal = cm.defaultValue;
                        break;
                    }
                }
            }

            return defaultVal;
        }
Ejemplo n.º 14
0
        private ImportDataMap GenerateImportDataMap(string fileName, string mapTargetPrimaryTable, char delimeter, int maxCols)
        {
            ObservableCollection<ColumnMap> inMapCols = (ObservableCollection<ColumnMap>)DataGridColumnMap.ItemsSource;

            string res = "";
            res += inMapCols.Count;

            ImportDataMap idm = new ImportDataMap();
            foreach (ColumnMap cm in inMapCols)
            {
                if (cm.sourceColumnName != null && cm.sourceColumnName.Trim().Length > 0)
                {
                    idm.columnMap.Add(cm);
                }
            }
            idm.dataStartLine = 2;
            idm.mapDate = System.DateTime.Now;
            idm.mapName = "";
            idm.mapTargetPrimaryTable = mapTargetPrimaryTable;
            idm.inputDelimiter = delimeter;
            idm.mapOriginalDataFile = fileName;
            idm.MaxColumns = maxCols;

            return idm;
        }
Ejemplo n.º 15
0
        private string GetPreviousTypeFromMap(ImportDataMap oldMap, ColumnMap mi)
        {

            string importDataType = mi.importDataType;
            if (oldMap != null)
            {
                foreach (ColumnMap cm in oldMap.columnMap)
                {
                    if (cm.sourceColumnName.Trim().Equals(mi.sourceColumnName.Trim()))
                    {
                        importDataType = cm.importDataType;
                        break;
                    }
                }
            }

            return importDataType;
        }
Ejemplo n.º 16
0
        internal void SetMap(ImportDataMap impMap)
        {
            ImportDataMap oldMap = null;
            if (DataGridColumnMap.ItemsSource != null)
            {
                oldMap = GenerateImportDataMap(impMap.mapOriginalDataFile, impMap.mapTargetPrimaryTable, impMap.inputDelimiter, impMap.MaxColumns);
            }


            DataGridColumnMap.ItemsSource = null;
            
            sourceColumns = new ObservableCollection<string>();
            targetColumns = new ObservableCollection<string>();
            foreach (ColumnMap mi in impMap.columnMap)
            {
                sourceColumns.Add(mi.sourceColumnName);
                targetColumns.Add(mi.targetColumnName);
                
                if (mi.importDataType == null) {
                    mi.importDataType = ImportDataMap.NUMERICDATATYPE;
                }else{
                    // try and get last used type
                    if (oldMap != null && oldMap.columnMap.Count > 0)
                    {
                        mi.importDataType = GetPreviousTypeFromMap(oldMap, mi);
                    }
                }
                 
                
                if (mi.defaultValue == null || mi.defaultValue.Trim().Length == 0) {
                    mi.defaultValue = GetPreviousDefaultFromMap(oldMap, mi);
                }
            }
            dataTypeList = new ObservableCollection<string>();
            dataTypeList.Add(ImportDataMap.NUMERICDATATYPE);
            dataTypeList.Add(ImportDataMap.TEXTDATATYPE);
            dataTypeList.Add(ImportDataMap.TIMESTAMPDATATYPE);

            DataGridColumnMap.ItemsSource = null;
            DataGridColumnMap.Items.Clear();

            unitTypesList = new ObservableCollection<string>();
            unitTypesList.Add("");
            unitTypesList.Add(ImportDataMap.UNIT_PCT);
            unitTypesList.Add(ImportDataMap.UNIT_PPM);
            


            cmaps = new ObservableCollection<ColumnMap>();
            foreach (ColumnMap cm in impMap.columnMap)
            {
                cmaps.Add(cm);

            }
            DataGridColumnMap.ItemsSource = cmaps;

            DataGridColumnMap.UpdateLayout();


        }
Ejemplo n.º 17
0
        internal FormatLoadStatus SetMappingFromImportDataMap(ImportDataMap idm)
        {
            FormatLoadStatus fms = new FormatLoadStatus();
            if (idm != null) {
                fms.LoadStatus = FormatLoadStatus.LOAD_OK;
            }
            int mapCount = 0;
            foreach(ColumnMap cm in idm.columnMap){
                string sourceColName = cm.sourceColumnName;
                string targetMappingName = cm.targetColumnName;
                int sourceColNum = cm.sourceColumnNumber;
                DataGridColumn dgc = PreviewGrid.Columns[sourceColNum];
                ComboBox tmp = (ComboBox)dgc.Header;
                int i = 0;
                bool foundMatch = false;
                foreach (string ss in tmp.Items) {
                    if (i > 1)
                    {
                        int idx = ss.IndexOf('(');
                        string s1a = ss.Substring(2, idx - 2);
                        string s1 = ss.Substring(0, idx);

                        int idx2 = ss.IndexOf(')');
                        string s2 = ss.Substring(idx + 1, (idx2 - idx) - 1);

                        if (s1a.Equals(targetMappingName))
                        {
                            tmp.SelectedIndex = i;
                            mapCount++;
                            foundMatch = true;
                            break;
                        }
                    }
                    i++;
                }
                if(foundMatch == false){
                    fms.WarningMessages.Add("No match for column "+sourceColName+" mapped to "+targetMappingName);
                }

            }

            if (mapCount != idm.columnMap.Count)
            {
                fms.MappingStatus = FormatLoadStatus.MAPPING_ASSIGNEMNT_WARNING;
                fms.MappingMessage = "Failed to apply all items from saved map to currentluy loaded data file.";
            }
            else
            {
                fms.MappingStatus = FormatLoadStatus.MAPPING_ASSIGNEMNT_OK;
            }
           
            dropDown_SelectionChanged(null, null);
            return fms;
        }
Ejemplo n.º 18
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="bmDataFile"></param>
        /// <param name="selectedFormatBMFile"></param>
        /// <param name="importMap"></param>
        /// <param name="xOrigin"></param>
        /// <param name="yOrigin"></param>
        /// <param name="zOrigin"></param>
        /// <param name="worker"></param>
        /// <param name="approxNumLines"></param>
        /// <param name="NKDProjectID"></param>
        /// <param name="units"></param>
        /// <param name="connString"></param>
        /// <returns></returns>
        public string PerformBMImport(string bmDataFile, string selectedFormatBMFile, ImportDataMap importMap, double xOrigin, double yOrigin, double zOrigin, System.ComponentModel.BackgroundWorker worker, int approxNumLines, string NKDProjectID, string units, string connString)
        {
            this.currentWorker = worker;
            UpdateStatus("Connecting to NKD", 10.0);
            using (var entityObj = new NKDC(connString, null))
            {
                // talk to the import lib to do the import                
                var query = from BlockModel in entityObj.BlockModels select new { BlockModel.BlockModelID, BlockModel.OriginX, BlockModel.OriginY, BlockModel.OriginZ, BlockModel.ProjectID };

                List<string> cn = new List<string>();
                //For each field in the database (or property in Linq object)
                BlockModel ob = new BlockModel();
                foreach (PropertyInfo pi in ob.GetType().GetProperties())
                {
                    Type ty = pi.GetType();
                    String name = pi.Name;
                    cn.Add(name);
                }



                DateTime startTime = DateTime.Now;
                int batchSize = 100;
                UpdateStatus("Creating new NKD block model", 20.0);
                ImportUtils.BlockImport dbIm = new ImportUtils.BlockImport();

                Guid blockModelGUID = Guid.NewGuid();

                BlockModel xAdd = new BlockModel();
                xAdd.OriginX = (Decimal)xOrigin;                                   // TO-DO
                xAdd.OriginY = (Decimal)yOrigin;                                   // TO-DO
                xAdd.OriginZ = (Decimal)zOrigin;                                   // TO-DO


                xAdd.BlockModelID = blockModelGUID;
                xAdd.ProjectID = new Guid(NKDProjectID);       // TODO - allow user to pick size
                entityObj.BlockModels.AddObject(xAdd);
                entityObj.SaveChanges();
                UpdateStatus("Setting model meta data", 25.0);
                // add the meta data to identify all of the oclumns etc.
                List<BlockModelMetadata> blockColumnMetaData = dbIm.SetBlockModelMetaData(blockModelGUID, importMap, connString);

                // add the new BM guid to the column map as a default so that it is always entered
                importMap.columnMap.Add(new ColumnMap("", -1, "BlockModelBlock", "BlockModelID", ImportDataMap.TEXTDATATYPE, blockModelGUID.ToString(), null, units));

                // add the individual blocks
                dbIm.AddBlockData(bmDataFile, importMap, blockModelGUID, batchSize, UpdateStatus, approxNumLines, connString);
                //dbIm.AddBlockDataNorm(bmDataFile, importMap, blockModelGUID, batchSize, UpdateStatus, approxNumLines, blockColumnMetaData);

                DateTime endTime = DateTime.Now;
                long compVal = (endTime.Ticks - startTime.Ticks) / 1000;
                string message = "" + startTime.ToShortTimeString() + " Ended: " + endTime.ToShortTimeString();

                long xval = compVal;

                return "";
            }
        }
Ejemplo n.º 19
0
        /// <summary>
        /// Add block data to the model, provided by the supplied file stream and format defintion
        /// </summary>
        /// <param name="bmFileStream"></param>
        /// <param name="importMap"></param>
        /// <param name="blockModelGUID"></param>
        /// <param name="batchSize"></param>
        /// <param name="UpdateStatus"></param>
        /// <param name="approxNumLines"></param>
        internal List<string> AddBlockData(ModelImportStatus mos, Stream bmFileStream, ImportDataMap importMap, Guid blockModelGUID, int batchSize, Action<string, double> UpdateStatus, int numLines, string connString)
        {
            // iterate through the data lines
            int ct = 1;
            int linesRead = 0;
            int total = 0;
            SqlConnection connection = null;
            List<string> uniqueDomains = new List<string>();
            // get a connection to the database
            try
            {
                int domainColIDX = -1;
                // find the column ID for the specified Domain field, as we need to capture this list.
                foreach (ColumnMap cm in importMap.columnMap) {
                    if (cm.targetColumnName.Trim().Equals("Domain")) {
                        domainColIDX = cm.sourceColumnNumber;
                    }
                }


                connection = new SqlConnection(connString);
                connection.Open();
                
                int numCommits = 0;
                SqlTransaction trans;
                trans = connection.BeginTransaction(System.Data.IsolationLevel.ReadUncommitted);
                List<SqlCommand> commands = new List<SqlCommand>();
                int tb = 0;
                int transactionBatchLimit = batchSize;

                // open the filestream and read the first line
                StreamReader sr = null;
                //FileStream fs = null;
                try
                {
                    //fs = new FileStream(textInputDataFile, FileMode.Open, FileAccess.Read, FileShare.Read);
                    sr = new StreamReader(bmFileStream);
                }               
                catch (Exception ex)
                {
                    mos.AddErrorMessage("Error getting data stream for input model:\n" + ex.ToString());
                    mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE;
                }
                string line = null;
                float pct = 0;
                float bct = 1;

                // report every X blocks
                int repCount = 0;
                int reportOnBlock = 500;
                float fNumLines = (float)numLines;
                bool commitToDB = true;
                if (sr != null)
                {
                    while ((line = sr.ReadLine()) != null)
                    {                        
                        linesRead++;
                        repCount++;

                        if (repCount == reportOnBlock)
                        {
                            repCount = 0;
                            // now report status
                            pct = (bct / fNumLines) * 100.0f;
                            UpdateStatus("Writing block " + bct + " to database", pct);
                        }
                        bct++;

                        if (ct >= importMap.dataStartLine)
                        {
                            string statementPart1 = "INSERT INTO " + importMap.mapTargetPrimaryTable + " ";
                            string clauseValues = "";
                            string clauseParameters = "";

                            List<string> items = parseTestLine(line, importMap.inputDelimiter);
                            // now pick out all the mapped values
                            foreach (ColumnMap cmap in importMap.columnMap)
                            {
                                int colID = cmap.sourceColumnNumber;
                                string columnValue = cmap.defaultValue;
                                if (colID >= 0)
                                {
                                    columnValue = items[colID];
                                }
                                if (cmap.sourceColumnNumber == domainColIDX) {
                                    if (!uniqueDomains.Contains(columnValue.Trim())) {
                                        uniqueDomains.Add(columnValue);
                                    }
                                }
                                string targetCol = cmap.targetColumnName;
                                string targetTable = cmap.targetColumnTable;
                                clauseValues += "" + targetTable + "." + targetCol + ",";


                                if (cmap.importDataType.Equals(ImportDataMap.NUMERICDATATYPE))
                                {
                                    if (columnValue.Equals("-"))
                                    {
                                        if (cmap.defaultValue != null && cmap.defaultValue.Length > 0)
                                        {
                                            columnValue = cmap.defaultValue;
                                        }
                                        else
                                        {
                                            columnValue = "NULL";
                                        }
                                    }
                                    clauseParameters += columnValue + ",";
                                }
                                else
                                {
                                    clauseParameters += "\'" + columnValue + "\',";
                                }
                            }
                            // now just a hack to remove the final coma from the query
                            clauseParameters = clauseParameters.Substring(0, clauseParameters.Length - 1);
                            clauseValues = clauseValues.Substring(0, clauseValues.Length - 1);

                            string commandText = statementPart1 + "(" + clauseValues + ") VALUES (" + clauseParameters + ")";
                            SqlCommand sqc = new SqlCommand(commandText, connection, trans);
                            if (commitToDB)
                            {
                                sqc.ExecuteNonQuery();
                            }
                            tb++;
                            if (tb == transactionBatchLimit)
                            {
                                // commit batch, then renew the transaction
                                if (commitToDB)
                                {
                                    total += tb;
                                    trans.Commit();
                                    numCommits++;
                                    //   trans = null;
                                    trans = connection.BeginTransaction(System.Data.IsolationLevel.ReadUncommitted);
                                }
                                // reset counter
                                tb = 0;
                            }
                        }
                        ct++;
                    }
                }
                if (tb > 0)
                {
                    if (commitToDB)
                    {
                        total += tb;
                        trans.Commit();
                    }
                    numCommits++;
                }
                UpdateStatus("Finished writing blocks to database ", 100.0);
            }
            catch (Exception ex)
            {
                UpdateStatus("Error writing blocks to database ", 0);
                mos.AddErrorMessage("Error writing block data at line "+linesRead+":\n" + ex.ToString());
                mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB;
            }
            finally
            {
                try { connection.Close(); }
                catch (Exception ex) {
                    mos.AddErrorMessage("Error closing conenction to database:\n"+ex.ToString());
                    mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB;
                }
            }

            mos.RecordsImported = total;
            mos.linesReadFromSource = linesRead;

            return uniqueDomains;   
        }
Ejemplo n.º 20
0
        internal ModelImportStatus DoLithoImport(string SelectedFile, string SelectedFormatFile, ImportDataMap importMap, RawFileReader rawFileReader, Guid NKDProjectID, bool doOverwrite, bool checkForDuplicates)
        {
            BaseImportTools bit = new BaseImportTools();
            ModelImportStatus mos = new ModelImportStatus();

            GeneralFileInfo gfi = new GeneralFileInfo();
            gfi.GeneralFileStats(SelectedFile);
            int numLines = gfi.numLines;


            //Stream fileStream = new FileStream(SelectedFile, FileMode.Open);
            Stream fileStream = new FileStream(SelectedFile, FileMode.Open, FileAccess.Read , FileShare.ReadWrite);
            bit.PerformLithoImport(mos, fileStream, null, importMap, this.backgroundWorker, NKDProjectID, ConnectionString, numLines, doOverwrite, checkForDuplicates);
            return mos;
        }
Ejemplo n.º 21
0
        public List<string> PerformBMImport(ModelImportStatus mos, Guid blockModelGUID, System.IO.Stream bmFileStream, System.IO.Stream ffFileStream, ImportDataMap importMap, double xOrigin, double yOrigin, double zOrigin, System.ComponentModel.BackgroundWorker worker, int approxNumLines, string NKDProjectID, string alias, Guid authorGuid, string connString)
        {
            this.currentWorker = worker;
            using (var entityObj = new NKDC(connString, null))
            {
                // talk to the import lib to do the import

                DateTime startTime = DateTime.Now;
                int batchSize = 1000;
                //UpdateStatus("Creating new NKD block model", 20.0);
                ImportUtils.BlockImport dbIm = null;
                try
                {
                    dbIm = new ImportUtils.BlockImport();
                    //ImportDataMap importMapLoaded = FormatSpecificationIO.ImportMapIO.LoadImportMap(ffFileStream);
                    BlockModel xAdd = new BlockModel();
                    xAdd.OriginX = (Decimal)xOrigin;                                   // TO-DO
                    xAdd.OriginY = (Decimal)yOrigin;                                   // TO-DO
                    xAdd.OriginZ = (Decimal)zOrigin;                                   // TO-DO
                    xAdd.Alias = alias;
                    // when on server, automatically pick up the author GUID and apply it to the model.
                    if (currentWorker == null)
                    {
                        xAdd.AuthorContactID = authorGuid;
                        xAdd.ResponsibleContactID = authorGuid;
                    }
                    xAdd.VersionUpdated = DateTime.UtcNow;

                    xAdd.BlockModelID = blockModelGUID;
                    xAdd.ProjectID = new Guid(NKDProjectID);       // TODO - allow user to pick size
                    entityObj.BlockModels.AddObject(xAdd);
                    entityObj.SaveChanges();
                    UpdateStatus("Setting model meta data", 25.0);
                    // add the meta data to identify all of the oclumns etc.
                }
                catch (Exception ex)
                {
                    mos.AddErrorMessage("Error setting block model defintion data. " + ex.ToString());
                }
                List<string> domains = new List<string>();
                if (dbIm != null)
                {
                    try
                    {
                        List<BlockModelMetadata> blockColumnMetaData = dbIm.SetBlockModelMetaData(blockModelGUID, importMap, connString);
                    }
                    catch (Exception ex)
                    {
                        mos.AddErrorMessage("Error setting block model meta data:\n" + ex.ToString());
                    }
                    try
                    {
                        // add the new BM guid to the column map as a default so that it is always entered
                        importMap.columnMap.Add(new ColumnMap("", -1, "BlockModelBlock", "BlockModelID", ImportDataMap.TEXTDATATYPE, blockModelGUID.ToString(), blockModelGUID.ToString(), ImportDataMap.UNIT_NONE));
                        // add the individual blocks
                        domains = dbIm.AddBlockData(mos, bmFileStream, importMap, blockModelGUID, batchSize, UpdateStatus, approxNumLines, connString);
                        // run this only if in wonows client (determined by the status of the worker thread at this stage)
                        if (currentWorker != null)
                        {
                            List<Tuple<string, string>> doms = new List<Tuple<string, string>>();
                            string domainColumnName = "Domain";
                            foreach (string ss in domains)
                            {
                                doms.Add(new Tuple<string, string>(domainColumnName, ss));
                            }
                            dbIm.UpdateDomains(doms, blockModelGUID);
                        }
                    }
                    catch (Exception ex)
                    {
                        mos.AddErrorMessage("Error adding block data:\n" + ex.ToString());
                    }

                }
                return domains;
            }
        }
Ejemplo n.º 22
0
        internal ModelImportStatus DoCollarImport(string SelectedFile, string SelectedFormatBMFile, ImportDataMap importMap, RawFileReader rawFileReader, Guid NKDProjectID, bool overwrite)
        {
          
            
            BaseImportTools bit = new BaseImportTools();
            // get the current collar names in this project
            List<CollarInfo> existingHoles = this.GetHolesForProject(NKDProjectID);


            List<string> existingHoleNames = new List<string>();
            foreach (CollarInfo ci in existingHoles)
            {
                existingHoleNames.Add(ci.Name);
            }

            ModelImportStatus mos = new ModelImportStatus();
            Stream fileStream = new FileStream(SelectedFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); 
            //Stream fileStream = new FileStream(SelectedFile, FileMode.Open);
            bit.PerformCollarImport(mos, fileStream, null, importMap, this.backgroundWorker, NKDProjectID, ConnectionString, existingHoleNames, overwrite);
            return mos;
            
        }
Ejemplo n.º 23
0
        public void PerformCollarImport(ModelImportStatus mos, System.IO.Stream bmFileStream, System.IO.Stream ffFileStream, ImportDataMap importMap, System.ComponentModel.BackgroundWorker backgroundWorker, Guid NKDProjectID, string connString, List<string> existingHoleNames, bool overwrite)
        {
            this.currentWorker = null;

            // talk to the import lib to do the import

            DateTime startTime = DateTime.Now;
            int batchSize = 1000;
            //UpdateStatus("Creating new NKD block model", 20.0);
            ImportUtils.CollarImport collImp = null;

            collImp = new ImportUtils.CollarImport();
            int approxNumLines = 100;

            importMap.columnMap.Add(new ColumnMap("", -1, "Header", "ProjectID", ImportDataMap.TEXTDATATYPE, NKDProjectID.ToString(), NKDProjectID.ToString(), ImportDataMap.UNIT_NONE));
            collImp.AddCollarData(mos, bmFileStream, importMap, batchSize, UpdateStatus, approxNumLines, connString, existingHoleNames, NKDProjectID, overwrite);

        }
Ejemplo n.º 24
0
        private void OverwriteSurveyRecord(ModelImportStatus mos, Dictionary<Guid, List<string>> rejectedLines, ImportDataMap importMap, string connectionString, Guid NKDProjectID, Action<string, double> UpdateStatus, Dictionary<string, string> holeWarningMessages)
        {
            SqlConnection connection = null;
            SqlConnection secondaryConnection = null;
            try
            {
                connection = new SqlConnection(connectionString);
                connection.Open();
                secondaryConnection = new SqlConnection(connectionString);
                secondaryConnection.Open();
                int numCommits = 0;
                SqlTransaction trans;
                trans = connection.BeginTransaction();
                List<SqlCommand> commands = new List<SqlCommand>();
                int tb = 0;
                int transactionBatchLimit = 10;
                // open the filestream and read the first line              
                float bct = 1;
                // report every X blocks
                int repCount = 0;
                //int reportOnBlock = 1000;
                float fNumLines = (float)rejectedLines.Count();


                // get the column containing the hole name 
                ColumnMap cmapHeader = importMap.FindItemsByTargetName("HeaderID");

                int headerIDX = cmapHeader.sourceColumnNumber;
                int numberOfHolesAdded = 0;
                int linesRead = 0;
                int ct = 1;

                // get all fo the header IDs in one go before we try the insert

                Dictionary<string, Guid> holeIDLookups = CollarQueries.FindHeaderGuidsForProject(NKDProjectID);


                foreach (KeyValuePair<Guid,List<string>> kvp in  rejectedLines){
                    Guid surveyGUID = kvp.Key;
                    List<string> columnData = kvp.Value;


                    linesRead++;
                    repCount++;
                    bct++;

                    string statementPart1 = "UPDATE " + importMap.mapTargetPrimaryTable + " ";
                    string clauseValues = "";


                    // using the column map, pick out the hole name field and see if it is in the database already
                    string headerNameItem = columnData[headerIDX];
                    string headerGUID = "";
                    bool lv = holeIDLookups.ContainsKey(headerNameItem);
                    if (!lv)
                    {
                        // oops - no hole ID with this name - should not happen though!!
                    }
                    else
                    {
                        Guid holeGuid = new Guid();
                        holeIDLookups.TryGetValue(headerNameItem, out holeGuid);
                        headerGUID = holeGuid.ToString();
                    }

                    #region mappsearch
                    // now pick out all the mapped values
                    foreach (ColumnMap cmap in importMap.columnMap)
                    {
                        bool isFKColumn = cmap.hasFKRelation;
                        int colID = cmap.sourceColumnNumber;
                        string columnValue = cmap.defaultValue;
                        if (colID >= 0)
                        {
                            columnValue = columnData[colID];
                        }

                        string targetCol = cmap.targetColumnName;
                        // ignore mapped hole name and project ID columns
                        if (targetCol.Trim().Equals("HeaderID"))
                        {
                            continue;
                        }
                        string targetTable = cmap.targetColumnTable;

                        clauseValues += "" + targetTable + "." + targetCol + "=";


                        if (isFKColumn)
                        {
                            // go and search for the appropriate value from the foreign key table
                            string newValue = ForeignKeyUtils.FindFKValueInDictionary(columnValue, cmap, secondaryConnection, true);
                            columnValue = newValue;
                            columnValue = newValue;
                            if (newValue != null && newValue.Trim().Length > 0)
                            {
                                clauseValues += "\'" + columnValue + "\',";
                            }
                            else
                            {
                                clauseValues += "NULL,";
                            }
                        }
                        else
                        {
                            if (cmap.importDataType.Equals(ImportDataMap.NUMERICDATATYPE))
                            {
                                if (columnValue.Equals("-") || columnValue.Trim().Length == 0)
                                {
                                    if (cmap.defaultValue != null && cmap.defaultValue.Length > 0)
                                    {
                                        columnValue = cmap.defaultValue;
                                    }
                                    else
                                    {
                                        columnValue = "NULL";
                                    }
                                }

                            }
                            else
                            {
                                columnValue += "\'" + columnValue + "\',";
                            }

                            clauseValues += columnValue + ",";
                        }

                    }
                    #endregion
                    // now just a hack to remove the final coma from the query
                    clauseValues = clauseValues.Substring(0, clauseValues.Length - 1);

                    string commandText = statementPart1 + "SET " + clauseValues + " WHERE SurveyID=\'" + surveyGUID + "\';";
                    SqlCommand sqc = new SqlCommand(commandText, connection, trans);
                    string msg = "";
                    //holeWarningMessages.TryGetValue(headerNameItem, out msg);
                    msg = "Survey for hole " + headerNameItem + " (" + clauseValues + ") was overwritten with new data";
                    holeWarningMessages[headerNameItem] = msg;

                    numberOfHolesAdded++;
                    if (commitToDB)
                    {
                        sqc.ExecuteNonQuery();
                    }
                    tb++;
                    if (tb == transactionBatchLimit)
                    {
                        // commit batch, then renew the transaction
                        if (commitToDB)
                        {
                            trans.Commit();
                            numCommits++;
                            //   trans = null;
                            trans = connection.BeginTransaction();
                        }
                        // reset counter
                        tb = 0;
                    }

                    ct++;
                }

                if (tb > 0)
                {
                    if (commitToDB)
                    {
                        trans.Commit();
                    }
                    numCommits++;
                }
                mos.recordsUpdated = numberOfHolesAdded;
                UpdateStatus("Finished writing collars to database ", 100.0);
            }

            catch (Exception ex)
            {
                UpdateStatus("Error writing collars to database ", 0);
                mos.AddErrorMessage("Error writing collar data at line " + rejectedLines.Count + ":\n" + ex.ToString());
                mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB;
            }
            finally
            {
                try
                {
                    connection.Close();
                    secondaryConnection.Close();


                }
                catch (Exception ex)
                {
                    mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString());
                    mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB;
                }
            }

        }
Ejemplo n.º 25
0
        internal List<BlockModelMetadata> SetBlockModelMetaData(Guid blockModelGUID, ImportDataMap testMap, string connString)
        {
            using (var entityObj = new NKDC(connString, null))
            {
                List<BlockModelMetadata> metaDataItems = new List<BlockModelMetadata>();

                foreach (ColumnMap cmap in testMap.columnMap)
                {
                    BlockModelMetadata metaData = new BlockModelMetadata();
                    metaData.BlockModelID = blockModelGUID;
                    metaData.BlockModelMetadataID = Guid.NewGuid();
                    metaData.IsColumnData = true;
                    string colName = cmap.sourceColumnName;
                    string columnValue = cmap.defaultValue;
                    Parameter param1 = new Parameter();
                    param1.ParameterName = cmap.targetColumnName;                   // source column
                    param1.ParameterType = "FieldName";
                    if (entityObj.BlockModelMetadatas.Where(f => f.BlockModelID == blockModelGUID && f.Parameter.Description == cmap.sourceColumnName).Any())
                        param1.Description = cmap.sourceColumnName = string.Format("{0}_{1}", cmap.sourceColumnName, Guid.NewGuid());
                    else
                        param1.Description = cmap.sourceColumnName;                                   // target column
                    param1.ParameterID = Guid.NewGuid();

                    if (cmap.sourceColumnName != null && cmap.sourceColumnName.ToLower().Contains("ppm"))
                    {
                        param1.UnitID = new Guid("E91773A4-2762-4EDE-8510-38F78FAF981D");// TODO: HACK - get the proper guid for the current unit type by querying database
                    }
                    else if (cmap.sourceColumnName != null && cmap.sourceColumnName.ToLower().Contains("pct"))
                    {
                        param1.UnitID = new Guid("AEDBBE0A-6A94-419F-8B43-A98CE942669A");// TODO: HACK - get the proper guid for the current unit type by querying database
                    }

                    metaData.BlockModelMetadataText = cmap.targetColumnName;
                    metaData.ParameterID = param1.ParameterID;
                    entityObj.Parameters.AddObject(param1);
                    entityObj.BlockModelMetadatas.AddObject(metaData);
                    metaDataItems.Add(metaData);
                    entityObj.SaveChanges();
                }
                return metaDataItems;
            }
        }