protected string ProcessDictionary(string filename) { string insertresults = ""; SQL_utils sql = new SQL_utils("data"); //Check for tblpk int tblpk = sql.IntScalar_from_SQLstring("select coalesce(tblpk,-1) from def.tbl where measureID=" + Request.QueryString["mID"]); if (tblpk > 0) { DataSet ds = SpreadsheetGearUtils.GetDataSet(filename, false); DataTable dt = ds.Tables[0]; // Add tblpk DataColumn col = new DataColumn("tblpk", typeof(int)); col.DefaultValue = tblpk; // Add fldextractionmode - and thus allow this measure to be imported DataColumn col2 = new DataColumn("fldextractionmode", typeof(int)); int mode = (int)FieldExtractionMode.matchFldname; col2.DefaultValue = mode; dt.Columns.AddRange(new DataColumn[2] { col, col2 }); insertresults = sql.BulkInsert(dt, "fld", "def"); } sql.Close(); return(insertresults); }
private void InsertREDCapData() { DataTable dtready = _dtRC.AsEnumerable().Where(f => f.Field <string>("isready") == "ready").CopyToDataTable(); DataColumn col = new DataColumn("verified", typeof(int)); col.DefaultValue = 0; dtready.Columns.Add(col); SQL_utils sql = new SQL_utils("data"); resultslog.Log("INFO: Attempting to match data types"); DataTable dt_dest = sql.DataTable_from_SQLstring(String.Format("select * from {0} where 1=2", tblname)); dtready.MatchColumnDataTypes(dt_dest); try { string insertresults = sql.BulkInsert(dtready, tblname); resultslog.Log(insertresults); //resultslog.Log(String.Format("SUCCESS: inserted {0} rows of data into {1}", dtready.Rows.Count, tblname)); if (!insertresults.Contains("Error")) { UpdateTrackingDB(dtready); } } catch (Exception ex) { resultslog.Log(String.Format("FAILED to insert data into {0} (# rows = {1})", tblname, dtready.Rows.Count)); } sql.Close(); }
public string SaveFieldsToDB() { string result = ""; string result2 = ""; if (dt_metadata.HasRows()) { SQL_utils sql = new SQL_utils("data"); string first_tokenid = dt_metadata.AsEnumerable().Select(f => f.Field <int>("tokenid")).First().ToString(); if (first_tokenid != "") { string sql_n = String.Format("select count(*) from def.REDCap_fields where tokenid = {0}", first_tokenid); int nflds = sql.IntScalar_from_SQLstring(sql_n); if (nflds > 0) { string sql_del = String.Format("delete from def.REDCap_fields where tokenid = {0}", first_tokenid); sql.NonQuery_from_SQLstring(sql_del); result = String.Format("{0} records deleted from [redcap_fields]. ", nflds); } result2 = sql.BulkInsert(dt_metadata, "redcap_fields", "def"); } } return(String.Format("{0} {1}", result, result2)); }
public string SaveEventsToDB() { string result = ""; if (dt_redcapevents.HasRows()) { SQL_utils sql = new SQL_utils("data"); result = sql.BulkInsert(dt_redcapevents, "redcap_form", "def"); } return(result); }
public string SaveFormEventsToDB() { string result = ""; if (dt_formevents.HasRows()) { SQL_utils sql = new SQL_utils("data"); string first_tokenid = dt_formevents.AsEnumerable().Select(f => f.Field <int>("tokenid")).First().ToString(); string sql_del = String.Format("delete from def.REDCap_formevent where tokenid = {0}", first_tokenid); if (first_tokenid != "") { sql.NonQuery_from_SQLstring(sql_del); result = sql.BulkInsert(dt_formevents, "redcap_formevent", "def"); } } return(result); }
protected void BulkInsert(int tblpk) { SQL_utils sql = new SQL_utils("data"); string tblname = sql.StringScalar_from_SQLstring("select tblname from def.tbl where tblpk=" + tblpk.ToString()); int studymeasID = sql.IntScalar_from_SQLstring("select studymeasID from uwautism_research_backend..tblstudymeas where studyID=1110 and " + "measureID = (select measureID from def.tbl where tblpk=" + tblpk.ToString() + ")"); DataTable dt_surveyflds = sql.DataTable_from_SQLstring("select q_id, b.tblpk, b.fldpk, fldname, stem, fieldlabel from avh.questions_def_flds a join def.fld b ON a.tblpk=b.tblpk and a.fldpk=b.fldpk where a.tblpk=" + tblpk.ToString()); string ids = "select id from uwautism_research_backend..vwmasterstatus_S where studyID=1110"; DataTable dt_source = sql.DataTable_from_SQLstring(String.Format("select * from avh.vw{0} where id in ({1})", tblpk, ids)); Debug.WriteLine(String.Format("{0} numrecs: {1} going to {2}", tblpk, dt_source.Rows.Count, tblname)); sql.BulkInsert(dt_source, tblname); sql.Close(); }
public void SaveFormDataToDB(string form, int studyid) { SQL_utils sql = new SQL_utils("data"); _import_results.Log("Processing <b>" + form + "</b>"); DataTable dt_redcap = DataFromForm(form); dt_redcap = AddStudymeasToREDCapFormData(dt_redcap); string idfld = REDCap_id_fldname_for_DB(sql, form, studyid); _import_results.Log(String.Format("ID contained in REDCap field '{0}'", idfld)); dt_redcap = dataops.VerifyID(dt_redcap, idfld, studyid); int num_notid = dt_redcap.AsEnumerable().Where(f => f.Field <string>("not_id") != null).Count(); if (num_notid > 0) { List <string> not_ids = dt_redcap.AsEnumerable().Where(f => f.Field <string>("not_id") != null).Select(f => f.Field <string>("not_id")).ToList(); _import_results.Log(String.Format("ERROR. Data contains {0} records with ID's not in the DB: {1}", num_notid, String.Join(",", not_ids))); } else { _import_results.Log("All ID's in REDCap data are present in the DB."); dt_redcap = dataops.CopyColumntoID(dt_redcap, idfld); dt_redcap.AddConstantInt("verified", 0); string timestamp_column = ""; foreach (DataColumn col in dt_redcap.Columns) { if (col.ColumnName.EndsWith("_timestamp")) { timestamp_column = col.ColumnName; dt_redcap.RenameColumn(timestamp_column, "redcap_timestamp"); } } int studymeasid1 = dt_redcap.AsEnumerable().Select(f => f.Field <int>("studymeasid")).Min(); int studymeasid2 = dt_redcap.AsEnumerable().Select(f => f.Field <int>("studymeasid")).Max(); int measureid1 = sql.IntScalar_from_SQLstring("select measureid from uwautism_research_backend..tblstudymeas where studymeasid=" + studymeasid1.ToString()); int measureid2 = sql.IntScalar_from_SQLstring("select measureid from uwautism_research_backend..tblstudymeas where studymeasid=" + studymeasid2.ToString()); if (measureid1 == measureid2) { string tblname = sql.StringScalar_from_SQLstring(String.Format("select tblname from def.tbl where measureid={0}", measureid1)); string result; try { DataTable dt_redcap2 = dt_redcap.ConvertEmptyStringToDBNull(); result = sql.BulkInsert(dt_redcap2, tblname); } catch (Exception ex) { result = "ERROR: " + ex.Message; } _import_results.Log(result); } } //return result; //return "ok"; }
public static string ProcessActigraph(string id, string filepath, string filename, int studymeasID) { DateTime starttime = DateTime.Now; string results = String.Format("<br/>Begin processing. {0}", System.DateTime.Now.ToString()); List <string> lines = DataImporter.ReadLinesFromFiles(filepath, filename); //TOFIX - now hardcoded for Sleep Pilot study List <int> chunk_studymeasID = new List <int> { 8015, 8015, 8015, 6998, -1, 8014 }; // -1 because we are not yet processing Marker/Score List //combine subject, actiwatch, and analysis inputs into the same table with 3 fields: parameter, value, units ActigraphChunkMarkers markers = new ActigraphChunkMarkers(studymeasID); markers.PopulateIndices(lines); for (int i = 0; i < markers.Count; i++) { int smID = 0; // markers[i].studymeasID; if (smID > 0) { bool resetDaynum = false; string fldDaynum = ""; DataImportSettings act_settings = new DataImportSettings(id, smID); act_settings.rowstoprocess = markers[i].linenumber_end - markers[i].linenumber_start; act_settings.markerstring = markers[i].text.Replace("-", "").Replace(" Properties", ""); //Num rows to skip varies by section if (markers[i].text.Contains("Statistics")) { act_settings.skipstartingrows = 3; } else if (markers[i].text.Contains("Epoch")) { act_settings.skipstartingrows = 18; } else { act_settings.skipstartingrows = 1; //Because we are sending just the text to parse we just skip the header } //Set the constString for fields with mode = FieldExtractionMode.useMarkerString if (markers[i].text.Contains("Subject") | markers[i].text.Contains("Actiwatch") | markers[i].text.Contains("Analysis")) { act_settings.markerstring = markers[i].text.Replace("-", "").Replace(" Properties", ""); } foreach (Importfield fld in act_settings.fields) { if (fld.mode == FieldExtractionMode.useMarkerString) { fld.constString = act_settings.markerstring; } if (fld.mode == FieldExtractionMode.calcDayNum) { resetDaynum = true; fldDaynum = fld.field; } } Debug.WriteLine(String.Format(" >>>> {0} {1} {2}", act_settings.skipstartingrows, act_settings.rowstoprocess, markers[i].text)); string text_to_parse; if (act_settings.measureID == 4853) //Actigraph Epoch { Debug.WriteLine(" ************** lines.Count = " + lines.Count.ToString()); List <string> lines_no_excluded = lines.GetRange(markers[i].linenumber_start - 1, (markers[i].linenumber_end - markers[i].linenumber_start)); lines_no_excluded = lines_no_excluded.Where(f => !f.Contains("EXCLUDED")).ToList(); Debug.WriteLine(" ************** lines_no_excluded.Count = " + lines_no_excluded.Count.ToString()); text_to_parse = String.Join(Environment.NewLine, lines_no_excluded); } else { text_to_parse = String.Join(Environment.NewLine, lines.GetRange(markers[i].linenumber_start - 1, (markers[i].linenumber_end - markers[i].linenumber_start))); } DataImporter importer = new DataImporter(id, smID); //DataTable dt = DataImporter.GetDataTableFromText(text_to_parse, act_settings); DataTable dt = importer.GetDataTableFromText(text_to_parse, act_settings); results += "</br>" + markers[i].text + " >> Found " + dt.Rows.Count.ToString() + " records. >> "; if (dt.HasRows()) { //reset Daynum to begin at 1. Needed because the actigraph starts a few days earlier before actual data is obtained if (resetDaynum & fldDaynum != "") { int minDaynum = dt.AsEnumerable().Select(f => f.Field <int>(fldDaynum)).Min(); foreach (DataRow row in dt.Rows) { int tmp = Convert.ToInt32(row[fldDaynum]); row[fldDaynum] = tmp + (-1 * (minDaynum - 1)); } } SQL_utils sql = new SQL_utils("data"); results += sql.BulkInsert(dt, act_settings.tblname); sql.Close(); } else { results += "</br> No Rows!!"; } } } double timeelapsed = Math.Round((DateTime.Now - starttime).TotalSeconds, 2); string daylightinfo_results = LogDaylightInfo(id, studymeasID, 0); results += String.Format("<br/>{0}", daylightinfo_results); results += String.Format("<br/>End processing. {0} {1} seconds.", System.DateTime.Now.ToString(), timeelapsed); return(results); }
public async Task GetWeather() { string slat = txtlat.Text; string slon = txtlong.Text; //slat = "40.909615"; //slon = "-73.113439"; if (slat != "" & slon != "") { var client = new DarkSkyService("9a9c19a4d9a62d812bf17136c99cc6cf"); //Forecast result = await client.GetTimeMachineWeatherAsync(lat, lon, date); var numcalls = client.ApiCallsMade; //Don't go above 1000 for now to keep it all free if (numcalls > 950) { lblResults.Text = "You've made more than 950 requests today."; } else { DateTime d1 = Convert.ToDateTime(date1.Value); d1 = DateTime.SpecifyKind(d1, DateTimeKind.Local); DateTimeOffset date = d1; double lat = Convert.ToDouble(slat); double lon = Convert.ToDouble(slon); try { Forecast result = await client.GetTimeMachineWeatherAsync(lat, lon, date); DataTable dt = HourlyInfo(result); if (dt.Rows.Count > 0) { grid.DataSource = dt; grid.DataBind(); grid.Visible = true; SQL_utils sql = new SQL_utils("data"); string bulkinsertresult = sql.BulkInsert(dt, "ALL_WeatherInfo"); sql.Close(); lblResults.Text = bulkinsertresult; } } catch (Exception ex) { Debug.WriteLine("** ERROR in TimeMachineWeatherAsync"); Debug.WriteLine(ex.Message); lblResults.Text += ex.Message; string param = String.Format("<br/>Lat:{0} Long:{1} Date:{2}", lat, lon, date.ToString()); //lblResults.Text += param; } } } }