Exemple #1
0
 private void get_log_file_values(DataRow row, ge_log_file file)
 {
     file.Id     = (Guid)row["Id"];
     file.dataId = (Guid)row["dataId"];
     if (row["ReadingAggregates"] != DBNull.Value)
     {
         file.readingAggregates = (String)row["ReadingAggregates"];
     }
     if (row["FieldHeader"] != DBNull.Value)
     {
         file.fieldHeader = (String)row["FieldHeader"];
     }
     if (row["FileHeader"] != DBNull.Value)
     {
         file.fileHeader = (String)row["FileHeader"];
     }
     if (row["Comments"] != DBNull.Value)
     {
         file.Comments = (String)row["Comments"];
     }
     if (row["channel"] != DBNull.Value)
     {
         file.channel = (String)row["channel"];
     }
     if (row["SearchTemplate"] != DBNull.Value)
     {
         file.SearchTemplate = (String)row["SearchTemplate"];
     }
     if (row["templateId"] != DBNull.Value)
     {
         file.templateId = (Guid)row["templateId"];
     }
 }
Exemple #2
0
        private async Task <List <ge_log_file> > GetFiles(Guid dataId,
                                                          Boolean IncludeReadings = true)
        {
            if (dataId == null)
            {
                return(null);
            }


            var _logger = await _context.ge_data
                          .Include(d => d.project)
                          .AsNoTracking()
                          .SingleOrDefaultAsync(m => m.Id == dataId);

            dbConnectDetails cd = await getConnectDetails(_logger.projectId, logTables.DB_DATA_TYPE);

            if (cd == null)
            {
                return(null);
            }

            string dbConnectStr = cd.AsConnectionString();

            var dt_file2 = await Task.Run(() =>
            {
                using (SqlConnection cnn = new SqlConnection(dbConnectStr))
                {
                    cnn.Open();
                    dsTable <ge_log_file> ds_files = new logTables().file;
                    ds_files.setConnection(cnn);
                    ds_files.sqlWhere("dataId='" + dataId.ToString() + "'");

                    ds_files.getDataSet();
                    DataTable dt_file = ds_files.getDataTable();
                    return(dt_file);
                }
            });

            List <ge_log_file> local_log_files = new List <ge_log_file>();

            foreach (DataRow row in dt_file2.Rows)
            {
                ge_log_file file = new ge_log_file();
                get_log_file_values(row, file);

                if (IncludeReadings == true)
                {
                    file = await GetFile(file.dataId, file.channel, true);
                }

                local_log_files.Add(file);
            }


            return(local_log_files);
        }
Exemple #3
0
 private void set_log_file_values(ge_log_file file, DataRow row)
 {
     row["Id"]                = file.Id;
     row["dataId"]            = file.dataId;
     row["fieldHeader"]       = file.fieldHeader;
     row["ReadingAggregates"] = file.readingAggregates;
     row["FileHeader"]        = file.fileHeader;
     row["Comments"]          = file.Comments;
     row["channel"]           = file.channel;
     row["templateId"]        = file.templateId;
     row["SearchTemplate"]    = file.SearchTemplate;
 }
        private async Task <int> saveFileLogger()
        {
            ge_log_file exist = await _logService.GetByDataId(Id, table);

            if (exist != null)
            {
                log_file.Id = exist.Id;
                return(await _logService.UpdateFile(log_file, true));
            }
            else
            {
                return(await _logService.CreateFile(log_file));
            }
        }
        public async Task <IActionResult> Post(string log_file, string origin_data, string format)
        {
            ge_log_file  f = null;
            ge_data      d = null;
            ge_data_file b = new ge_data_file();

            if (format == "json")
            {
                d             = JsonConvert.DeserializeObject <ge_data>(origin_data);
                f             = JsonConvert.DeserializeObject <ge_log_file>(log_file);
                d.filetype    = "text/json";
                d.fileext     = ".json";
                d.encoding    = "utf-8";
                b.data_string = log_file;
            }

            if (format == "xml")
            {
                d          = (ge_data)origin_data.DeserializeFromXmlString <ge_data>();
                f          = (ge_log_file)log_file.DeserializeFromXmlString <ge_log_file>();
                d.filetype = "text/xml";
                d.fileext  = ".xml";
                d.encoding = "utf-8";
                b.data_xml = log_file;
            }

            var user = await GetUserAsync();

            d.Id = f.Id;
            string filename = d.filename.Substring(0, d.filename.IndexOf(".")) + ' ' + f.channel + ".xml";

            d.filename  = filename;
            d.filesize  = log_file.Length;
            d.createdDT = DateTime.Now;
            d.editedDT  = DateTime.Now;
            d.editedId  = user.Id;
            d.createdId = user.Id;

            string s1 = d.SerializeToXmlString <ge_data>();
            string s2 = b.SerializeToXmlString <ge_data_file>();

            var resp_post = await  new ge_dataController(_context,
                                                         _authorizationService,
                                                         _userManager,
                                                         _env,
                                                         _ge_config).Post(s1, s2, "xml");

            return(resp_post);
        }
Exemple #6
0
        public ge_data NewData(Guid projectId, string UserId, ge_log_file log_file, string saveAsFormat)
        {
            ge_data _data = NewData(projectId, UserId);

            ge_data_file _file = new ge_data_file();

            if (saveAsFormat == "text/xml")
            {
                //  _file.data_xml = SerializeToXmlString<ge_log_file>();
            }

            if (saveAsFormat == "json")
            {
                _file.data_string = JsonConvert.SerializeObject(log_file);
            }

            _data.file = _file;

            return(_data);
        }
Exemple #7
0
        public async Task <IActionResult> Put(string s1, Boolean IncludeReadings, string format)
        {
            ge_log_file log_file = null;;

            if (format == "json")
            {
                log_file = JsonConvert.DeserializeObject <ge_log_file>(s1);
            }

            if (format == "xml")
            {
                log_file = s1.DeserializeFromXmlString <ge_log_file>();
            }

            var resp = await UpdateFile(log_file, IncludeReadings);

            if (resp == -1)
            {
                BadRequest($"Unable to update log_file Id:{log_file.Id} dataId:{log_file.dataId} table:{log_file.channel}");
            }

            return(Ok(resp));
        }
Exemple #8
0
        public async Task <int> UpdateFile(ge_log_file data, Boolean includereadings)
        {
            var existing = await _unitOfWork.LoggerFile.FindByIdAsync(data.Id);

            if (existing == null)
            {
                return(-1);
            }

            var ret = await _unitOfWork.LoggerFile.UpdateAsync(data, includereadings);

            if (ret == 0 && includereadings == false)
            {
                return(await _unitOfWork.CommitAsync());
            }

            if (ret == 0 && includereadings == true)
            {
                return(await _unitOfWork.CommitBulkAsync());
            }

            return(-1);
        }
Exemple #9
0
        public async Task <IActionResult> Post(string s1, string format)
        {
            ge_log_file log_file = null;

            if (format == "json")
            {
                log_file = JsonConvert.DeserializeObject <ge_log_file>(s1);
            }

            if (format == "xml")
            {
                log_file = s1.DeserializeFromXmlString <ge_log_file>();
            }

            var resp = await AddNewFile(log_file);

            if (resp == -1)
            {
                BadRequest("Unable to add new log file");
            }

            return(Ok($"log file id:{log_file.Id} created"));
        }
Exemple #10
0
        private async Task <int> UpdateFile(ge_log_file file, Boolean IncludeReadings)
        {
            int NOT_OK = -1;
            int ret    = 0;

            if (file.dataId == null)
            {
                return(NOT_OK);
            }

            file.packFieldHeaders();
            file.packFileHeader();

            var _logger = await _context.ge_data
                          .Include(d => d.project)
                          .SingleOrDefaultAsync(m => m.Id == file.dataId);

            dbConnectDetails cd = await getConnectDetails(_logger.projectId, logTables.DB_DATA_TYPE);

            if (cd == null)
            {
                return(NOT_OK);
            }

            string dbConnectStr = cd.AsConnectionString();

            return(await Task.Run(() =>

            {
                using (SqlConnection cnn = new SqlConnection(dbConnectStr))
                {
                    dsTable <ge_log_reading> ds_readings = new logTables().reading;
                    dsTable <ge_log_file> ds_file = new logTables().file;
                    cnn.Open();
                    ds_file.setConnection(cnn);
                    ds_file.getDataTable();
                    ds_readings.setConnection(cnn);
                    ds_readings.getDataTable();
                    ds_file.sqlWhere("Id='" + file.Id + "'");
                    ds_file.getDataSet();
                    DataTable dt_file = ds_file.getDataTable();

                    if (dt_file == null)
                    {
                        return NOT_OK;
                    }

                    if (dt_file.Rows.Count == 0)
                    {
                        return NOT_OK;
                    }

                    DataRow file_row = dt_file.Rows[0];
                    set_log_file_values(file, file_row);
                    ret = ds_file.Update();

                    if (IncludeReadings)
                    {
                        ds_readings.sqlWhere("FileId='" + file.Id.ToString() + "'");
                        ds_readings.getDataSet();
                        DataTable dt_readings = ds_readings.getDataTable();
                        Boolean checkExisting = false;

                        if (dt_readings.Rows.Count > 0)
                        {
                            checkExisting = true;
                        }

                        foreach (ge_log_reading reading in file.readings)
                        {
                            DataRow row = null;
                            if (checkExisting == true)
                            {
                                if (reading.Id != Guid.Empty)
                                {
                                    row = dt_readings.Select($"Id='{reading.Id}'").SingleOrDefault();
                                }

                                if (row == null)
                                {
                                    row = dt_readings.Select($"ReadingDateTime='{String.Format("{0:yyyy-MM-dd HH:mm:ss.ffff}",reading.ReadingDateTime)}'").SingleOrDefault();
                                }
                            }

                            if (row == null)
                            {
                                row = ds_readings.NewRow();
                                reading.Id = Guid.NewGuid();
                                reading.fileId = file.Id;
                                ds_readings.addRow(row);
                            }
                            else
                            {
                                reading.Id = (Guid)row["Id"];
                                reading.fileId = file.Id;
                            }

                            set_log_reading_values(reading, row);
                        }

                        //what if there are other records (more) in dt_readings from a previous version of the ge_log_file?
                        // mark for deletion all records not 'new' or 'updated'
                        if (file.readings.Count() < dt_readings.Rows.Count)
                        {
                            foreach (DataRow row in dt_readings.Rows)
                            {
                                if (row.RowState == DataRowState.Added |
                                    row.RowState != DataRowState.Modified)
                                {
                                    row.Delete();
                                }
                            }
                        }


                        ret = ret + ds_readings.Update();
                        return ret;
                    }
                    return ret;
                }
            }));
        }
Exemple #11
0
        private async Task <ge_log_file> GetFile(Guid dataId,
                                                 string table            = "data_pressure",
                                                 Boolean IncludeReadings = true)
        {
            if (dataId == null)
            {
                return(null);
            }


            var _logger = await _context.ge_data
                          .Include(d => d.project)
                          .AsNoTracking()
                          .SingleOrDefaultAsync(m => m.Id == dataId);

            dbConnectDetails cd = await getConnectDetails(_logger.projectId, logTables.DB_DATA_TYPE);

            if (cd == null)
            {
                return(null);
            }

            string dbConnectStr = cd.AsConnectionString();

            return(await Task.Run(() =>

            {
                using (SqlConnection cnn = new SqlConnection(dbConnectStr))
                {
                    cnn.Open();
                    dsTable <ge_log_reading> ds_readings = new logTables().reading;
                    dsTable <ge_log_file> ds_file = new logTables().file;
                    ds_file.setConnection(cnn);
                    ds_readings.setConnection(cnn);

                    //Multichannel transducer have upto 8 tables which will all have the same dataId

                    if (string.IsNullOrEmpty(table))
                    {
                        ds_file.sqlWhere("dataId='" + dataId.ToString() + "' and (channel is null or channel='')");
                    }
                    else
                    {
                        ds_file.sqlWhere("dataId='" + dataId.ToString() + "' and channel='" + table + "'");
                    }

                    ds_file.getDataSet();
                    DataTable dt_file = ds_file.getDataTable();

                    if (dt_file == null)
                    {
                        return null;
                    }

                    if (dt_file.Rows.Count == 0)
                    {
                        return null;
                    }

                    ge_log_file file = new ge_log_file();

                    DataRow row = dt_file.Rows[0];
                    get_log_file_values(row, file);


                    if (IncludeReadings)
                    {
                        ds_readings.sqlWhere("FileId='" + file.Id.ToString() + "'");
                        ds_readings.getDataSet();
                        DataTable dt_readings = ds_readings.getDataTable();
                        file.readings = new List <ge_log_reading>();

                        foreach (DataRow rrow in dt_readings.Rows)
                        {
                            ge_log_reading r = new ge_log_reading();
                            get_log_reading_values(rrow, r);
                            file.readings.Add(r);
                        }
                        file.OrderReadings();
                    }

                    file.unpack_exist_file();

                    return file;
                }
            }));
        }
Exemple #12
0
        public async Task <int> CreateFile(ge_log_file newData)
        {
            await _unitOfWork.LoggerFile.AddAsync(newData);

            return(await _unitOfWork.CommitBulkAsync());
        }
Exemple #13
0
        private async Task <int> AddNewFile(ge_log_file file)
        {
            int NOT_OK = -1;
            int ret    = 0;

            if (file == null)
            {
                return(NOT_OK);
            }

            file.packFieldHeaders();
            file.packFileHeader();

            var _data = await _context.ge_data
                        .Include(d => d.project)
                        .SingleOrDefaultAsync(m => m.Id == file.dataId);

            dbConnectDetails cd = await getConnectDetails(_data.projectId, logTables.DB_DATA_TYPE);

            if (cd == null)
            {
                return(NOT_OK);
            }

            string dbConnectStr = cd.AsConnectionString();

            return(await Task.Run(() =>
            {
                using (SqlConnection cnn = new SqlConnection(dbConnectStr))
                {
                    dsTable <ge_log_reading> ds_readings = new logTables().reading;
                    dsTable <ge_log_file> ds_file = new logTables().file;
                    cnn.Open();
                    ds_file.setConnection(cnn);
                    ds_file.Reset();

                    ds_readings.setConnection(cnn);
                    ds_readings.Reset();

                    DataTable dt_file = ds_file.getDataTable();
                    DataRow file_row = dt_file.NewRow();

                    file.Id = Guid.NewGuid();
                    set_log_file_values(file, file_row);
                    ds_file.addRow(file_row);

                    ret = ds_file.Update();

                    DataTable dt_readings = ds_readings.getDataTable();

                    foreach (ge_log_reading reading in file.readings)
                    {
                        DataRow row = dt_readings.NewRow();
                        reading.Id = Guid.NewGuid();
                        reading.fileId = file.Id;
                        set_log_reading_values(reading, row);
                        ds_readings.addRow(row);
                    }

                    ret = ret + ds_readings.Update();
                    return ret;
                }
            }));
        }
Exemple #14
0
 public async Task <int> DeleteFile(ge_log_file dataToBeDeleted)
 {
     _unitOfWork.LoggerFile.Remove(dataToBeDeleted);
     return(await _unitOfWork.CommitAsync());
 }
Exemple #15
0
        private async Task <List <MOND> > createMOND(ge_log_file log_file,
                                                     int page_size,
                                                     int page,
                                                     string round_ref,
                                                     DateTime?fromDT,
                                                     DateTime?toDT,
                                                     string ge_source = "ge_flow",
                                                     Boolean addWLEV  = true)
        {
            // Find borehole in point table of gint database

            string holeId = log_file.getBoreHoleId();

            if (holeId == "")
            {
                return(null); // BadRequest ($"Borehole ref not provided");
            }

            POINT pt = await GetPointByHoleId(holeId);

            if (pt == null)
            {
                return(null);//         return BadRequest ($"Borehole ref {holeId} not found in {project.name}");
            }

            List <MONG> mgs = await GetParentsByHoleId(holeId);


            // Find monitoring point in mong table of gint database
            float probe_depth = log_file.getProbeDepth();

            if (probe_depth == 0)
            {
                return(null); // return BadRequest ($"No probe depth provided for borehole ref {holeId} not found in {project.name}");
            }


            MONG mg = null;

            string formatMATCH = "{0:00.0}";

            if (mgs.Count == 1)
            {
                mg = mgs.FirstOrDefault();
            }
            else
            {
                foreach (MONG m in mgs)
                {
                    if (m.MONG_DIS != null)
                    {
                        if (String.Format(formatMATCH, m.MONG_DIS.Value) == String.Format(formatMATCH, probe_depth))
                        {
                            mg = m;
                            break;
                        }
                    }
                }
            }

            if (mg == null)
            {
                return(null); // return BadRequest ($"No installations in borehole ref {holeId} have a probe depth of {probe_depth} in {project.name}");
            }

            // Add all readings to new items in List<MOND>
            List <MOND> MOND = new List <MOND>();

            string device_name = log_file.getDeviceName();

            float?gl = null;

            if (pt.Elevation != null)
            {
                gl = Convert.ToSingle(pt.Elevation.Value);
            }

            if (gl == null && pt.LOCA_GL != null)
            {
                gl = Convert.ToSingle(pt.LOCA_GL.Value);
            }

            // int round_no = getInt32(round_ref);

            string mond_rem_suffix = "";
            string mond_ref        = "";

            if (ge_source == "ge_flow")
            {
                mond_rem_suffix = " flow meter reading";
            }

            if (ge_source == "ge_logger")
            {
                mond_rem_suffix = " datalogger reading";
            }

            List <ge_log_reading> readings2 = log_file.getIncludeReadingsPage(fromDT, toDT, page_size, page);

            foreach (ge_log_reading reading in readings2)
            {
                foreach (value_header vh in log_file.field_headers)
                {
                    if (ge_source == "ge_flow")
                    {
                        mond_ref = String.Format("Round {0} Seconds {1:00}", round_ref, reading.Duration);
                    }

                    if (vh.id == "WDEPTH" && vh.units == "m")
                    {
                        // Add MOND WDEP record

                        MOND md = NewMOND(mg, reading, device_name, round_ref, "WDEP", mg.MONG_TYPE + mond_rem_suffix, mond_ref, vh.db_name, "Water Depth", vh.units, vh.format, null, ge_source);
                        if (md != null)
                        {
                            MOND.Add(md);
                        }

                        if (gl != null && addWLEV == true)
                        {
                            // Add MOND WLEV record
                            MOND md2 = NewMOND(mg, reading, device_name, round_ref, "WLEV", mg.MONG_TYPE + mond_rem_suffix, mond_ref, vh.db_name, "Water Level", vh.units, vh.format, gl, ge_source);
                            if (md2 != null)
                            {
                                MOND.Add(md2);
                            }
                        }
                    }

                    if (vh.id == "PH")
                    {
                        // Add MOND Potential Hydrogen
                        MOND md = NewMOND(mg, reading, device_name, round_ref, "PH", mg.MONG_TYPE + mond_rem_suffix, mond_ref, vh.db_name, "", vh.units, vh.format, null, ge_source);
                        if (md != null)
                        {
                            MOND.Add(md);
                        }
                    }

                    if (vh.id == "DO" && vh.units == "mg/l")
                    {
                        // Add MOND Disolved Oxygen
                        MOND md = NewMOND(mg, reading, device_name, round_ref, "DO", mg.MONG_TYPE + mond_rem_suffix, mond_ref, vh.db_name, "Dissolved Oxygen", vh.units, vh.format, null, ge_source);
                        if (md != null)
                        {
                            MOND.Add(md);
                        }
                    }

                    // if ((vh.id == "AEC" && vh.units == "μS/cm") |
                    //     (vh.id == "AEC" & vh.units == "mS/cm")) {
                    //     // Add MOND Electrical Conductivity
                    //     MOND md = NewMOND (mg, reading, device_name, round_ref, "AEC", mg.MONG_TYPE + mond_rem_suffix, mond_ref, vh.db_name, "Actual Electrical Conductivity", vh.units, vh.format, null, ge_source);
                    //     if (md!=null) MOND.Add (md);
                    // }

                    //if (vh.id == "AEC" && (vh.units == "μS/cm" || vh.units == "mS/cm")) {
                    if (vh.id == "AEC")
                    {
                        // Add MOND Electrical Conductivity
                        MOND md = NewMOND(mg, reading, device_name, round_ref, "AEC", mg.MONG_TYPE + mond_rem_suffix, mond_ref, vh.db_name, "Actual Electrical Conductivity", vh.units, vh.format, null, ge_source);
                        if (md != null)
                        {
                            MOND.Add(md);
                        }
                    }

                    //  if ((vh.id == "EC" && vh.units == "μS/cm")) {
                    if ((vh.id == "EC"))
                    {
                        // Add MOND Electrical Conductivity
                        MOND md = NewMOND(mg, reading, device_name, round_ref, "EC", mg.MONG_TYPE + mond_rem_suffix, mond_ref, vh.db_name, "Electrical Conductivity", vh.units, vh.format, null, ge_source);
                        if (md != null)
                        {
                            MOND.Add(md);
                        }
                    }

                    if (vh.id == "SAL" && vh.units == "g/cm3")
                    {
                        // Add MOND Salinity record
                        MOND md = NewMOND(mg, reading, device_name, round_ref, "SAL", mg.MONG_TYPE + mond_rem_suffix, mond_ref, vh.db_name, "Salinity", vh.units, vh.format, null, ge_source);
                        if (md != null)
                        {
                            MOND.Add(md);
                        }
                    }

                    if (vh.id == "TEMP" && vh.units == "Deg C")
                    {
                        // Add MOND Temp record
                        MOND md = NewMOND(mg, reading, device_name, round_ref, "DOWNTEMP", mg.MONG_TYPE + mond_rem_suffix, mond_ref, vh.db_name, "Downhole Temperature", vh.units, vh.format, null, ge_source);
                        MOND.Add(md);
                    }

                    if (vh.id == "RDX" && vh.units == "mV")
                    {
                        // Add MOND Redox Salinity record
                        MOND md = NewMOND(mg, reading, device_name, round_ref, "RDX", mg.MONG_TYPE + mond_rem_suffix, mond_ref, vh.db_name, "Redox Potential", vh.units, vh.format, null, ge_source);
                        if (md != null)
                        {
                            MOND.Add(md);
                        }
                    }

                    if (vh.id == "TURB" && vh.units == "NTU")
                    {
                        // Add MOND Salinity record
                        MOND md = NewMOND(mg, reading, device_name, round_ref, "TURB", mg.MONG_TYPE + mond_rem_suffix, mond_ref, vh.db_name, "Turbity", vh.units, vh.format, null, ge_source);
                        if (md != null)
                        {
                            MOND.Add(md);
                        }
                    }
                }
            }

            return(MOND);
        }
Exemple #16
0
        public async Task <List <MOND> > CreateMOND(ge_log_file log_file,
                                                    string table,
                                                    string round_ref,
                                                    DateTime?fromDT,
                                                    DateTime?toDT,
                                                    Boolean save_MOND
                                                    )
        {
            string ge_source = "";

            if (table.Contains("waterquality") ||
                table.Contains("wq"))
            {
                ge_source = "ge_flow";
            }

            if (table.Contains("depth") ||
                table.Contains("head") ||
                table.Contains("pressure") ||
                table.Contains("channel") ||
                table.Contains("r0") ||
                table.Contains("r1")
                )
            {
                ge_source = "ge_logger";
            }

            int page_size   = 1000;
            int row_count   = log_file.getIncludeReadings(fromDT, toDT).Count();
            int total_pages = Convert.ToInt32(row_count / page_size) + 1;

            List <MOND> ordered = new List <MOND>();

            for (int page = 1; page <= total_pages; page++)
            {
                List <MOND> batch = await createMOND(log_file,
                                                     page_size,
                                                     page,
                                                     round_ref,
                                                     fromDT,
                                                     toDT,
                                                     ge_source,
                                                     true);

                if (batch == null)
                {
                    continue;
                }

                if (batch.Count == 0)
                {
                    continue;
                }

                if (save_MOND == true)
                {
                    string where2 = $"ge_source='{ge_source}'";
                    await UpdateRange(batch, where2);
                }

                ordered.AddRange(batch.OrderBy(e => e.DateTime).ToList());
            }

            return(ordered);
        }
Exemple #17
0
        public ge_log_file NewFile(ge_search dic,
                                   string[] lines,
                                   Guid dataId,
                                   Guid templateId)
        {
            ge_log_file file = new ge_log_file();

            file.dataId     = dataId;
            file.templateId = templateId;

            file.search_template = dic;

            file.file_headers = dic.search_items;
            file.file_array   = dic.array_items;

            search_table st = dic.search_tables.FirstOrDefault();

            file.search_table  = st;
            file.field_headers = st.headers;
            file.channel       = st.name;

            value_header DateTimeReading = dic.getHeader(ge_log_constants.READINGDATETIME);

            int intReadTime = NOT_FOUND;

            if (DateTimeReading != null)
            {
                intReadTime = DateTimeReading.found;
            }

            value_header Duration    = dic.getHeader(ge_log_constants.DURATION);
            int          intDuration = NOT_FOUND;

            if (Duration != null)
            {
                intDuration = Duration.found;
            }

            value_header Header1   = dic.getHeader(ge_log_constants.VALUE1);
            int          intValue1 = NOT_FOUND;

            if (Header1 != null)
            {
                intValue1 = Header1.found;
            }

            value_header Header2   = dic.getHeader(ge_log_constants.VALUE2);
            int          intValue2 = NOT_FOUND;

            if (Header2 != null)
            {
                intValue2 = Header2.found;
            }

            value_header Header3   = dic.getHeader(ge_log_constants.VALUE3);
            int          intValue3 = NOT_FOUND;

            if (Header3 != null)
            {
                intValue3 = Header3.found;
            }

            value_header Header4   = dic.getHeader(ge_log_constants.VALUE4);
            int          intValue4 = NOT_FOUND;

            if (Header4 != null)
            {
                intValue4 = Header4.found;
            }

            value_header Header5   = dic.getHeader(ge_log_constants.VALUE5);
            int          intValue5 = NOT_FOUND;

            if (Header5 != null)
            {
                intValue5 = Header5.found;
            }

            value_header Header6   = dic.getHeader(ge_log_constants.VALUE6);
            int          intValue6 = NOT_FOUND;

            if (Header6 != null)
            {
                intValue6 = Header6.found;
            }

            value_header Header7   = dic.getHeader(ge_log_constants.VALUE7);
            int          intValue7 = NOT_FOUND;

            if (Header7 != null)
            {
                intValue7 = Header7.found;
            }

            value_header Header8   = dic.getHeader(ge_log_constants.VALUE8);
            int          intValue8 = NOT_FOUND;

            if (Header8 != null)
            {
                intValue8 = Header8.found;
            }

            value_header Header9   = dic.getHeader(ge_log_constants.VALUE9);
            int          intValue9 = NOT_FOUND;

            if (Header9 != null)
            {
                intValue9 = Header9.found;
            }

            value_header Header10   = dic.getHeader(ge_log_constants.VALUE10);
            int          intValue10 = NOT_FOUND;

            if (Header10 != null)
            {
                intValue10 = Header10.found;
            }

            value_header Header11   = dic.getHeader(ge_log_constants.VALUE11);
            int          intValue11 = NOT_FOUND;

            if (Header11 != null)
            {
                intValue11 = Header11.found;
            }

            value_header Header12   = dic.getHeader(ge_log_constants.VALUE12);
            int          intValue12 = NOT_FOUND;

            if (Header12 != null)
            {
                intValue12 = Header12.found;
            }

            value_header Header13   = dic.getHeader(ge_log_constants.VALUE13);
            int          intValue13 = NOT_FOUND;

            if (Header13 != null)
            {
                intValue13 = Header13.found;
            }

            value_header Header14   = dic.getHeader(ge_log_constants.VALUE14);
            int          intValue14 = NOT_FOUND;

            if (Header14 != null)
            {
                intValue14 = Header14.found;
            }

            value_header Header15   = dic.getHeader(ge_log_constants.VALUE15);
            int          intValue15 = NOT_FOUND;

            if (Header15 != null)
            {
                intValue15 = Header15.found;
            }

            value_header Header16   = dic.getHeader(ge_log_constants.VALUE16);
            int          intValue16 = NOT_FOUND;

            if (Header16 != null)
            {
                intValue16 = Header16.found;
            }

            value_header Header17   = dic.getHeader(ge_log_constants.VALUE17);
            int          intValue17 = NOT_FOUND;

            if (Header17 != null)
            {
                intValue17 = Header17.found;
            }

            value_header Header18   = dic.getHeader(ge_log_constants.VALUE18);
            int          intValue18 = NOT_FOUND;

            if (Header18 != null)
            {
                intValue18 = Header18.found;
            }
            value_header HeaderRemark = dic.getHeader(ge_log_constants.REMARK);
            int          intRemark    = NOT_FOUND;

            if (HeaderRemark != null)
            {
                intRemark = HeaderRemark.found;
            }

            value_header log_wdepthM         = file.getHeaderByIdUnits(ge_log_constants.WDEPTH, "m");
            int          intCheckValueForDry = NOT_FOUND;

            if (log_wdepthM != null)
            {
                intCheckValueForDry = log_wdepthM.found;
            }

            file.readings = new List <ge_log_reading>();

            int line_start = dic.data_start_row(NOT_FOUND);

            int line_end = dic.data_end_row(lines.Count());


            int readlines = addReadingsAny(file.readings,
                                           lines,
                                           line_start,
                                           line_end,
                                           intReadTime,
                                           intDuration,
                                           intValue1,
                                           intValue2,
                                           intValue3,
                                           intValue4,
                                           intValue5,
                                           intValue6,
                                           intValue7,
                                           intValue8,
                                           intValue9,
                                           intValue10,
                                           intValue11,
                                           intValue12,
                                           intValue13,
                                           intValue14,
                                           intValue15,
                                           intValue16,
                                           intValue17,
                                           intValue18,
                                           intRemark,
                                           intCheckValueForDry,
                                           DateTimeReading.format
                                           );

            if (readlines <= 0)
            {
                return(null);
            }

            file.init_new_file();
            file.calcReadingAggregates();

            return(file);
        }