private async Task <int> UpdateFile(ge_log_file file, Boolean IncludeReadings) { int NOT_OK = -1; int ret = 0; if (file.dataId == null) { return(NOT_OK); } file.packFieldHeaders(); file.packFileHeader(); var _logger = await _context.ge_data .Include(d => d.project) .SingleOrDefaultAsync(m => m.Id == file.dataId); dbConnectDetails cd = await getConnectDetails(_logger.projectId, logTables.DB_DATA_TYPE); if (cd == null) { return(NOT_OK); } string dbConnectStr = cd.AsConnectionString(); return(await Task.Run(() => { using (SqlConnection cnn = new SqlConnection(dbConnectStr)) { dsTable <ge_log_reading> ds_readings = new logTables().reading; dsTable <ge_log_file> ds_file = new logTables().file; cnn.Open(); ds_file.setConnection(cnn); ds_file.getDataTable(); ds_readings.setConnection(cnn); ds_readings.getDataTable(); ds_file.sqlWhere("Id='" + file.Id + "'"); ds_file.getDataSet(); DataTable dt_file = ds_file.getDataTable(); if (dt_file == null) { return NOT_OK; } if (dt_file.Rows.Count == 0) { return NOT_OK; } DataRow file_row = dt_file.Rows[0]; set_log_file_values(file, file_row); ret = ds_file.Update(); if (IncludeReadings) { ds_readings.sqlWhere("FileId='" + file.Id.ToString() + "'"); ds_readings.getDataSet(); DataTable dt_readings = ds_readings.getDataTable(); Boolean checkExisting = false; if (dt_readings.Rows.Count > 0) { checkExisting = true; } foreach (ge_log_reading reading in file.readings) { DataRow row = null; if (checkExisting == true) { if (reading.Id != Guid.Empty) { row = dt_readings.Select($"Id='{reading.Id}'").SingleOrDefault(); } if (row == null) { row = dt_readings.Select($"ReadingDateTime='{String.Format("{0:yyyy-MM-dd HH:mm:ss.ffff}",reading.ReadingDateTime)}'").SingleOrDefault(); } } if (row == null) { row = ds_readings.NewRow(); reading.Id = Guid.NewGuid(); reading.fileId = file.Id; ds_readings.addRow(row); } else { reading.Id = (Guid)row["Id"]; reading.fileId = file.Id; } set_log_reading_values(reading, row); } //what if there are other records (more) in dt_readings from a previous version of the ge_log_file? // mark for deletion all records not 'new' or 'updated' if (file.readings.Count() < dt_readings.Rows.Count) { foreach (DataRow row in dt_readings.Rows) { if (row.RowState == DataRowState.Added | row.RowState != DataRowState.Modified) { row.Delete(); } } } ret = ret + ds_readings.Update(); return ret; } return ret; } })); }
private async Task <int> AddNewFile(ge_log_file file) { int NOT_OK = -1; int ret = 0; if (file == null) { return(NOT_OK); } file.packFieldHeaders(); file.packFileHeader(); var _data = await _context.ge_data .Include(d => d.project) .SingleOrDefaultAsync(m => m.Id == file.dataId); dbConnectDetails cd = await getConnectDetails(_data.projectId, logTables.DB_DATA_TYPE); if (cd == null) { return(NOT_OK); } string dbConnectStr = cd.AsConnectionString(); return(await Task.Run(() => { using (SqlConnection cnn = new SqlConnection(dbConnectStr)) { dsTable <ge_log_reading> ds_readings = new logTables().reading; dsTable <ge_log_file> ds_file = new logTables().file; cnn.Open(); ds_file.setConnection(cnn); ds_file.Reset(); ds_readings.setConnection(cnn); ds_readings.Reset(); DataTable dt_file = ds_file.getDataTable(); DataRow file_row = dt_file.NewRow(); file.Id = Guid.NewGuid(); set_log_file_values(file, file_row); ds_file.addRow(file_row); ret = ds_file.Update(); DataTable dt_readings = ds_readings.getDataTable(); foreach (ge_log_reading reading in file.readings) { DataRow row = dt_readings.NewRow(); reading.Id = Guid.NewGuid(); reading.fileId = file.Id; set_log_reading_values(reading, row); ds_readings.addRow(row); } ret = ret + ds_readings.Update(); return ret; } })); }