static void ErrorCheckRun() { lasttimeRun = DateTime.Now; try { using (var cd = new ConfigData()) { cd.PopulateErrorList(); foreach (var config in cd.ErrorListConfig) { //Here lets look at scheduledHouor and last run time to see if process should be run. var error = new Error(config); if (hasRunWithin(config)) { continue; } if (isNotTheHour(config)) //If it's a good hoour then run bad hour then skip (continue skips) { continue; } if (isNotDayOfMonth(config)) { continue; } cd.updateErrorRunDT(config); //since it's time to run this lets update the RunDT so we don't run more in the same hour. // For each config run the export. try { var result = error.Process(config); if (result.Length > 0) { Log($"ErrorCheck listed: {config.Name} time: {DateTime.Now:hh:mm:ss}"); } } catch (Exception ex) { var n = new LogError(); n.InsertError($"Export Error on {config.Name} Exception: {ex.Message}", "not set yet", emailAddress, true); Log($"Export: {config.Name} Exception{ex.Message} time: {DateTime.Now:hh:mm:ss}"); } } } } catch (Exception ex) { var n = new LogError(); n.InsertError(ex.Message, "not set yet", emailAddress); Log(ex.Message + $" time: {DateTime.Now:hh:mm:ss}"); } }
private string getFile() { string fnOnly = null; //This should be dependent on files not yet run. List <string> runFiles = getRunFiles(); var list = Directory.GetFiles(folder, startFileName + "*"); foreach (var fn in list) { fnOnly = fn.Substring(fn.LastIndexOf("\\") + 1); if (runFiles.Any(f => f.Equals(fnOnly))) { try { if (config.archiveLocation != null && config.archiveLocation.Length > 2) { string target = config.archiveLocation + "\\" + fnOnly; if (File.Exists(target)) { FileDelete(fn); } else { File.Copy(fn, target); FileDelete(fn); } } } catch (Exception ex) { var n = new LogError(); n.InsertError("Copy to archive failed." + ex.Message, fnOnly, config.Email); } continue; } return(fn); } return(""); }
public bool Read() { filename = getFile(); if (filename.Length == 0) { return(false); } try { var streamReader = new StreamReader(File.OpenRead(filename)); parser = new CsvParser(streamReader); parser.Read(); } catch (Exception ex) { var le = new LogError(); le.InsertError(ex.Message, "Reading File " + filename, config.Email); Console.WriteLine(ex.Message); return(false); } return(true); }
public List <string> tt; //ordered type from csv public bool CheckFields(ReadCsv r) { rcsv = r; var csvColumns = new List <string>(); int csvFieldCount = r.parser.FieldCount; using (IDbConnection dbConnection = ConnectionID) { //string sql = $"SELECT * FROM sys.columns WHERE object_id = OBJECT_ID('{config.Desto}') "; try { tt = new List <string>(); string sql = "SELECT QUOTENAME(SCHEMA_NAME(tb.[schema_id])) AS 'Schema' " + ",QUOTENAME(OBJECT_NAME(tb.[OBJECT_ID])) AS 'Table' " + ",C.NAME as 'Column'" + ",T.name AS 'Type',C.max_length ,C.is_nullable , C.is_identity as [identity] " + "FROM SYS.COLUMNS C INNER JOIN SYS.TABLES tb ON tb.[object_id] = C.[object_id] " + "INNER JOIN SYS.TYPES T ON C.system_type_id = T.user_type_id " + $"WHERE tb.[is_ms_shipped] = 0 and tb.[object_id] = OBJECT_ID('{config.Desto}')"; dbConnection.Open(); int tc = 0; string sTempRC; var data = dbConnection.Query(sql).ToList(); cil = new List <ColumnInfo>(); foreach (dynamic row in data) { tc++; if (row.identity) { continue; } if (row.Column.IndexOf(" ") > 0) { sTempRC = row.Column; //.Replace("\"",""); } else { sTempRC = row.Column; } cil.Add(new ColumnInfo { name = sTempRC, type = row.Type }); } rr = r.parser.RawRecord.Split(','); for (int icnt = 0; icnt < rr.Length; icnt++) { tc++; //Clean up carraig return in file here on column names (last one may have it). if (rr[icnt].IndexOf("\n") > 0) { rr[icnt] = rr[icnt].Replace("\n", ""); } if (rr[icnt].IndexOf("\r") > 0) { rr[icnt] = rr[icnt].Replace("\r", ""); } while (rr[icnt].IndexOf("\"") >= 0) { rr[icnt] = rr[icnt].Replace("\"", ""); } var lu = lookup(rr[icnt].ToLower()); if (lu == null) { var le = new LogError(); le.InsertError($"error field {rr[icnt]} not exist in schema {config.Name}", r.filename, config.Email, false); Program.Log($"error field {rr[icnt]} not exist in schema {config.Name} filename - {r.filename}"); return(false); } Debug.WriteLine(" column num:" + tc); tt.Add(lu.type); } foreach (string cs in rr) { var rcomp = cs; csvColumns.Add(rcomp.ToLower()); } foreach (string col in csvColumns) { var test = checkColumn(col.ToLower()); if (!test) //checks column exist in db schema { var le = new LogError(); le.InsertError($"error field {col} not exist in schema " + config.Name, r.filename, config.Email, false); return(false); } } return(true); } catch (Exception ex) { var le = new LogError(); le.InsertError(ex.Message, r.filename, config.Email, false); Program.Log(ex.Message + $" filename {r.filename} time:{DateTime.Now:hh:mm:ss}"); } } return(true); }
private void error(string e, string second) { var err = new LogError(); err.InsertError(e, second, config.Email); }
public void ImportData(Config config) { //because this is a staging table, lets first delete the records by truncating table. //using (IDbConnection dbConnection = ConnectionID) //{ // //string sqlT = $"truncate table {config.Desto}"; // //dbConnection.Execute(sqlT); //} var datastr = new StringBuilder(); string dt = DateTime.Now.ToString("yyyy-MM-dd hh:mm:ss"); //string qry = "SELECT top 1 * FROM ImportData." + config.Desto; string parms = null; string fieldName; foreach (var s in rr) { if (s.IndexOf(" ") > 0) { fieldName = "[" + s + "]"; } else { fieldName = s; } parms += fieldName + ","; } parms += "sourcefilename,DateImported"; //parms = parms.Remove(parms.Length - 1); int reccount = 0; using (IDbConnection dbConnection = ConnectionID) { dynamic csvr = rcsv.parser.Read(); while (csvr != null && csvr.Length > 0) { int i = 0; foreach (string d in csvr) { if (tt[i].Contains("char")) { if (d.Trim().Length != 0) { datastr.Append("'" + d.Replace("'", "''") + "',"); } else { datastr.Append("null,"); } } else if (tt[i].Contains("date")) { if (d.Trim().Length < 2) { datastr.Append("null,"); } else { DateTime ddd; if (DateTime.TryParse(d, out ddd)) { //DateTime ddd = Convert.ToDateTime(d); if (ddd.Year < 1901) { datastr.Append("null,"); } else { datastr.Append("'" + d + "',"); } } else { datastr.Append("null,"); } } } else { if (d.Trim().Length != 0) { datastr.Append(d + ","); } else { datastr.Append("null,"); } } i++; } datastr.Append("'" + rcsv.filename + "','" + dt + "'"); //data = data.Remove(data.Length - 1); string sql = $"insert into {config.Desto} ({parms}) values ({datastr})"; datastr.Length = 0; try { dbConnection.Execute(sql); } catch (Exception e) { var le = new LogError(); le.InsertError("insert error: " + e.Message, rcsv.filename, config.Email); Program.Log($"error at csv line: {reccount}"); Program.Log($"Sql Execute Error: {sql}"); Program.Log($"error: {e.Message} file: {rcsv.filename}"); } reccount++; csvr = rcsv.parser.Read(); } string message = $"'{reccount} Records added'"; string fnOnly = rcsv.filename.Substring(rcsv.filename.LastIndexOf("\\") + 1); string tempFn = $"'{fnOnly}'"; string sqlLog = $"insert into dbo.ImportSourceLog (Daterun,Message,Filename) values (getdate(),{message},{tempFn})"; dbConnection.Execute(sqlLog); Program.Log($"Completed inserting data for {rcsv.filename}, record count: {reccount}"); rcsv.parser.Dispose(); } using (IDbConnection dbConnectionService = Connection) { try { if (config.MoveProc != null && config.MoveProc.Length > 2) { dbConnectionService.Execute(config.MoveProc, commandType: CommandType.StoredProcedure, commandTimeout: 500); } } catch (Exception ex) { Program.Log($"error at Stored proc {config.MoveProc} Exception:{ex.Message}"); } } }
static void ExportRun() { //This project moves data from csv files into the import tables listed in the ImportSource table. lasttimeRun = DateTime.Now; // Set time to 7:30 so if it's run in the afternoon, it will run again the next morning. Log($"Export Run: {lasttimeRun:hh:mm:ss}"); //Read ImportSource table for parameters try { using (var cd = new ConfigData()) { cd.PopulateExportList(); foreach (var config in cd.ExportListConfig) { //Here lets look at scheduledHouor and last run time to see if process should be run. var export = new Export(config); if (hasRunWithin(config)) { continue; } if (isNotTheHour(config)) //If it's a good hoour then run bad hour then skip (continue skips) { continue; } if (isNotDayOfMonth(config)) { continue; } cd.updateExportRunDT(config); //since it's time to run this lets update the RunDT so we don't run more in the same hour. // For each config run the export. try { export.Process(config); } catch (Exception ex) { var n = new LogError(); n.InsertError($"Export Error on {config.Name} Exception: {ex.Message}", "not set yet", emailAddress, true); Log($"Export: {config.Name} Exception{ex.Message} time: {DateTime.Now:hh:mm:ss}"); } //var fname = export.Process(config); //if (string.IsNullOrEmpty(fname)) //this means no data was found no filename created. // continue; //if (!string.IsNullOrEmpty(config.FtpServerName)) //{ // FtpClient client = new FtpClient(config.FtpServerName); // // if you don't specify login credentials, we use the "anonymous" user account // client.Credentials = new NetworkCredential(config.FtpUserName, config.FtpPassword); // // begin connecting to the server // client.Connect(); // client.UploadFile(config.Desto, fname); //} } } } catch (Exception ex) { var n = new LogError(); n.InsertError(ex.Message, "not set yet", emailAddress); Log(ex.Message + $" time: {DateTime.Now:hh:mm:ss}"); } }
static void ImportRun(string[] args) { //This project moves data from csv files into the import tables listed in the ImportSource table. lasttimeRun = DateTime.Now; // Set time to 7:30 so if it's run in the afternoon, it will run again the next morning. Log($"Main: {lasttimeRun:hh:mm:ss}"); //Read ImportSource table for parameters try { if (args.Length > 0 && args[0] == "testemail") { var te = new LogError(); te.InsertError("test email", "na", emailAddress, true); return; } var cd = new ConfigData(); cd.PopulateList(); foreach (var config in cd.ImportListConfig) { var rcsv = new ReadCsv(config); while (rcsv.Read()) { try { config.Email = emailAddress; var import = new Import(config); if (import.CheckFields(rcsv)) { import.ImportData(config); } import.Dispose(); } catch (Exception ex) { var n = new LogError(); n.InsertError(ex.Message, rcsv.filename, emailAddress); } try { if (config.archiveLocation != null && config.archiveLocation.Length > 2) { //archive already processed found files string target = config.archiveLocation + "\\" + Path.GetFileName(rcsv.filename); if (!File.Exists(target)) { File.Copy(rcsv.filename, config.archiveLocation + "\\" + Path.GetFileName(rcsv.filename)); } ReadCsv.FileDelete(rcsv.filename); } } catch (Exception ex) { var n = new LogError(); try { //we have to delete to prevent endless loop. ReadCsv.FileDelete(rcsv.filename); Log($"delete file {rcsv.filename} time: {DateTime.Now:hh:mm:ss}"); } catch (Exception ex2) { n.InsertError("Delete file failed.", rcsv.filename, emailAddress); Log($"delete failed {rcsv.filename} exception: {ex2.Message} time: {DateTime.Now:hh:mm:ss}"); throw new Exception("ERROR! Get out of loop"); } n.InsertError("Copy to archive failed.", rcsv.filename, emailAddress); Log($"copy failed {rcsv.filename} exception: {ex.Message} time: {DateTime.Now:hh:mm:ss}"); } } } } catch (Exception ex) { var n = new LogError(); n.InsertError(ex.Message, "not set yet", emailAddress); Log(ex.Message + $" time: {DateTime.Now:hh:mm:ss}"); } }