private async Task <List <ge_log_file> > GetFiles(Guid dataId, Boolean IncludeReadings = true) { if (dataId == null) { return(null); } var _logger = await _context.ge_data .Include(d => d.project) .AsNoTracking() .SingleOrDefaultAsync(m => m.Id == dataId); dbConnectDetails cd = await getConnectDetails(_logger.projectId, logTables.DB_DATA_TYPE); if (cd == null) { return(null); } string dbConnectStr = cd.AsConnectionString(); var dt_file2 = await Task.Run(() => { using (SqlConnection cnn = new SqlConnection(dbConnectStr)) { cnn.Open(); dsTable <ge_log_file> ds_files = new logTables().file; ds_files.setConnection(cnn); ds_files.sqlWhere("dataId='" + dataId.ToString() + "'"); ds_files.getDataSet(); DataTable dt_file = ds_files.getDataTable(); return(dt_file); } }); List <ge_log_file> local_log_files = new List <ge_log_file>(); foreach (DataRow row in dt_file2.Rows) { ge_log_file file = new ge_log_file(); get_log_file_values(row, file); if (IncludeReadings == true) { file = await GetFile(file.dataId, file.channel, true); } local_log_files.Add(file); } return(local_log_files); }
private async Task <int> UpdateChannel(Guid[] Id, Guid dataId, string header, float [] values) { //value_header vh = Json.Convert<value_header>(header); int NOT_OK = -1; int ret = 0; if (dataId == null) { return(NOT_OK); } var _logger = await _context.ge_data .Include(d => d.project) .SingleOrDefaultAsync(m => m.Id == dataId); if (_logger == null) { return(NOT_OK); } dbConnectDetails cd = await getConnectDetails(_logger.projectId, logTables.DB_DATA_TYPE); if (cd == null) { return(NOT_OK); } string dbConnectStr = cd.AsConnectionString(); return(await Task.Run(() => { using (SqlConnection cnn = new SqlConnection(dbConnectStr)) { dsTable <ge_log_reading> ds_readings = new logTables().reading; cnn.Open(); ds_readings.setConnection(cnn); ds_readings.getDataTable(); ds_readings.sqlWhere("Id='" + Id.ToString() + "'"); ds_readings.getDataSet(); DataTable dt_readings = ds_readings.getDataTable(); DataRow row = dt_readings.Rows[0]; // if (Valid!=null) row["Valid"] = Valid; // if (Include!=null) row["Include"] = Include; // if (pflag!=null) row["pflag"] = pflag; // if (NotDry!=null) row["NotDry"] = NotDry; // if (Remark!=null) row["Remark"] = Remark; ret = ds_readings.Update(); return ret; } })); }
private async Task <int> UpdateFile(ge_log_file file, Boolean IncludeReadings) { int NOT_OK = -1; int ret = 0; if (file.dataId == null) { return(NOT_OK); } file.packFieldHeaders(); file.packFileHeader(); var _logger = await _context.ge_data .Include(d => d.project) .SingleOrDefaultAsync(m => m.Id == file.dataId); dbConnectDetails cd = await getConnectDetails(_logger.projectId, logTables.DB_DATA_TYPE); if (cd == null) { return(NOT_OK); } string dbConnectStr = cd.AsConnectionString(); return(await Task.Run(() => { using (SqlConnection cnn = new SqlConnection(dbConnectStr)) { dsTable <ge_log_reading> ds_readings = new logTables().reading; dsTable <ge_log_file> ds_file = new logTables().file; cnn.Open(); ds_file.setConnection(cnn); ds_file.getDataTable(); ds_readings.setConnection(cnn); ds_readings.getDataTable(); ds_file.sqlWhere("Id='" + file.Id + "'"); ds_file.getDataSet(); DataTable dt_file = ds_file.getDataTable(); if (dt_file == null) { return NOT_OK; } if (dt_file.Rows.Count == 0) { return NOT_OK; } DataRow file_row = dt_file.Rows[0]; set_log_file_values(file, file_row); ret = ds_file.Update(); if (IncludeReadings) { ds_readings.sqlWhere("FileId='" + file.Id.ToString() + "'"); ds_readings.getDataSet(); DataTable dt_readings = ds_readings.getDataTable(); Boolean checkExisting = false; if (dt_readings.Rows.Count > 0) { checkExisting = true; } foreach (ge_log_reading reading in file.readings) { DataRow row = null; if (checkExisting == true) { if (reading.Id != Guid.Empty) { row = dt_readings.Select($"Id='{reading.Id}'").SingleOrDefault(); } if (row == null) { row = dt_readings.Select($"ReadingDateTime='{String.Format("{0:yyyy-MM-dd HH:mm:ss.ffff}",reading.ReadingDateTime)}'").SingleOrDefault(); } } if (row == null) { row = ds_readings.NewRow(); reading.Id = Guid.NewGuid(); reading.fileId = file.Id; ds_readings.addRow(row); } else { reading.Id = (Guid)row["Id"]; reading.fileId = file.Id; } set_log_reading_values(reading, row); } //what if there are other records (more) in dt_readings from a previous version of the ge_log_file? // mark for deletion all records not 'new' or 'updated' if (file.readings.Count() < dt_readings.Rows.Count) { foreach (DataRow row in dt_readings.Rows) { if (row.RowState == DataRowState.Added | row.RowState != DataRowState.Modified) { row.Delete(); } } } ret = ret + ds_readings.Update(); return ret; } return ret; } })); }
private async Task <ge_log_file> GetFile(Guid dataId, string table = "data_pressure", Boolean IncludeReadings = true) { if (dataId == null) { return(null); } var _logger = await _context.ge_data .Include(d => d.project) .AsNoTracking() .SingleOrDefaultAsync(m => m.Id == dataId); dbConnectDetails cd = await getConnectDetails(_logger.projectId, logTables.DB_DATA_TYPE); if (cd == null) { return(null); } string dbConnectStr = cd.AsConnectionString(); return(await Task.Run(() => { using (SqlConnection cnn = new SqlConnection(dbConnectStr)) { cnn.Open(); dsTable <ge_log_reading> ds_readings = new logTables().reading; dsTable <ge_log_file> ds_file = new logTables().file; ds_file.setConnection(cnn); ds_readings.setConnection(cnn); //Multichannel transducer have upto 8 tables which will all have the same dataId if (string.IsNullOrEmpty(table)) { ds_file.sqlWhere("dataId='" + dataId.ToString() + "' and (channel is null or channel='')"); } else { ds_file.sqlWhere("dataId='" + dataId.ToString() + "' and channel='" + table + "'"); } ds_file.getDataSet(); DataTable dt_file = ds_file.getDataTable(); if (dt_file == null) { return null; } if (dt_file.Rows.Count == 0) { return null; } ge_log_file file = new ge_log_file(); DataRow row = dt_file.Rows[0]; get_log_file_values(row, file); if (IncludeReadings) { ds_readings.sqlWhere("FileId='" + file.Id.ToString() + "'"); ds_readings.getDataSet(); DataTable dt_readings = ds_readings.getDataTable(); file.readings = new List <ge_log_reading>(); foreach (DataRow rrow in dt_readings.Rows) { ge_log_reading r = new ge_log_reading(); get_log_reading_values(rrow, r); file.readings.Add(r); } file.OrderReadings(); } file.unpack_exist_file(); return file; } })); }