/// <summary> /// Start called when 'Caller' wants to parse one row at a time. Initiate a new job based on input parameters. /// Either applicationId should contain a valid application id or applicationName should be a valid application name. /// </summary> /// <param name="dataSourceId">Should be a valid application id. If this is filled, then applicationName is optional.</param> /// <param name="dataSourceName">Should be a valid application name. If this is filled, then applicationId is optional.</param> /// <param name="processingBy">user name who is processing, we could get this from context, but for scenario like 'a126042, Deb'jyoti Das', let caller decide the user name.</param> /// <returns>Retruns process identifier as GUID or string.empty (fail condition)</returns> public string InitializeJob(int dataSourceId, string dataSourceName, string processingBy) { if ((dataSourceId <= 0) && (string.IsNullOrEmpty(dataSourceName))) { ExtensionMethods.TraceInformation("Job could not be initialized, data source is not defined. Id = {0}, name = '{1}'", dataSourceId, dataSourceName); return(string.Empty); //fail response } try { Job newJob = new Job(dataSourceId, dataSourceName, processingBy); if (!newJob.IsValid) { ExtensionMethods.TraceInformation("Job could not be initialized, data source is not defined. Id ={0}, name = '{1}'", dataSourceId, dataSourceName); return(string.Empty); //fail response } Registry.Instance.Entries.Add(newJob.JobIdentifier, newJob); Trace.Flush(); return(newJob.JobIdentifier); } catch (Exception ex) { string errorMessage = ex.ToString() + (ex.InnerException == null ? string.Empty : ex.InnerException.Message); ExtensionMethods.TraceError(errorMessage); Trace.Flush(); throw ex; } }
public override IdpeMessage Parse(bool onlyConstraints) { try { if (!onlyConstraints) { _IsParsed = true; ExtensionMethods.TraceInformation("Validating 'Not Referenced' type '{0}' value '{1}', executing query '{1}' with '{2}'", Value, Formula, ConnectionString); if (SqlClientManager.CheckReferenceKey(ConnectionString, DatabaseType, Formula, Value)) { this._ParseResult = new IdpeMessage(IdpeMessageCodes.IDPE_NOT_REFERENCED_TYPE_DATA_RESULT_FOUND); this._ParseResult.Message = string.Format(this._ParseResult.Message, PrintRowColPosition(), Value, ColumnName); return(this._ParseResult); } else { this._ParseResult = new IdpeMessage(IdpeMessageCodes.IDPE_NOT_REFERENCED_TYPE_DATA_RESULT_NOT_FOUND); return(this._ParseResult); } } } catch (Exception ex) { ExtensionMethods.TraceError(ex.ToString()); } return(this._ParseResult); }
public override IdpeMessage Parse(bool onlyConstraints) { try { this._ParseResult = new IdpeMessage(IdpeMessageCodes.IDPE_SUCCESS); if (base.Value.ToUpper() != "NULL") { if (ValueDateTime == null) { TryExtractingSpecificType(); if (ValueDateTime == null) //still null, then throw { this._ParseResult = new IdpeMessage(IdpeMessageCodes.IDPE_DATE_TYPE_DATA_VALIDATION_FAILED); this._ParseResult.Message = string.Format(this._ParseResult.Message, PrintRowColPosition(), Type, base.Value); return(this._ParseResult); } } CheckConstraints(); } } catch (Exception ex) { ExtensionMethods.TraceError(ex.ToString()); } _IsParsed = true; return(this._ParseResult); }
void FileCopy(string fromFileName, string toFileName, bool move) { try { DateTime fileReceived = DateTime.Now; File.Copy(fromFileName, toFileName); while (true) { if (FileDownloadCompleted(toFileName)) { if (move) { File.Delete(fromFileName); } return; } // Calculate the elapsed time and stop if the maximum retry // period has been reached. TimeSpan timeElapsed = DateTime.Now - fileReceived; if (timeElapsed.TotalMinutes > IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.RetryTimeOut) { ExtensionMethods.TraceError("The file \"{0}\" could not be copied.", fromFileName); return; } Thread.Sleep(300); } } catch (Exception ex) { ExtensionMethods.TraceError("An unknown error occurred!. {0}. {1} This error needs immediate attention", ex.ToString(), Environment.NewLine + Environment.NewLine); Trace.Flush(); } }
public override IdpeMessage Parse(bool onlyConstraints) { try { if (_IsHavingSqlQuery && !onlyConstraints) { ExtensionMethods.TraceInformation("Generating 'Generated' type '{0}', executing query '{1}'", ColumnName, Formula); bool isErrored = false; _Value = SqlClientManager.ExecuteQuery(ConnectionString, DatabaseType, Formula, ref isErrored); if (isErrored) { this._ParseResult = new IdpeMessage(IdpeMessageCodes.IDPE_GENERATED_TYPE_DATA_VALIDATION_FAILED); this._ParseResult.Message = string.Format(this._ParseResult.Message, PrintRowColPosition(), string.Empty, ColumnName); } } CheckConstraints(); } catch (Exception ex) { this._ParseResult = new IdpeMessage(IdpeMessageCodes.IDPE_GENERATED_TYPE_DATA_VALIDATION_FAILED); this._ParseResult.Message = string.Format(this._ParseResult.Message, PrintRowColPosition(), Formula, ColumnName); ExtensionMethods.TraceError(ex.ToString()); } _IsParsed = true; return(this._ParseResult); }
UnhandledExceptionAction WorkerManagerUnhandledException(WorkflowApplicationUnhandledExceptionEventArgs e) { ExtensionMethods.TraceError("OnUnhandledException in WorkerManager {0}\n{1}", e.InstanceId, e.UnhandledException.Message); Trace.Flush(); return(UnhandledExceptionAction.Terminate); }
void WorkerManagerAborted(WorkflowApplicationAbortedEventArgs e) { ExtensionMethods.TraceError("WorkerManager {0} Aborted.", e.InstanceId); ExtensionMethods.TraceError("Exception: {0}\n{1}", e.Reason.GetType().FullName, e.Reason.Message); Trace.Flush(); eventDone.Set(); }
public void FileCopy(string fromFileName, string toFileName, bool move) { try { DateTime fileReceived = DateTime.Now; if (File.Exists(fromFileName)) { File.Copy(fromFileName, toFileName); while (true) { if ((CurrentFile != null) && (!File.Exists(CurrentFile))) { if (File.Exists(fromFileName)) { new FileUtility().Delete(fromFileName); } break; //multi-instances - Must have been taken care by other instance } if (FileDownloadCompleted(toFileName)) { if (move) { new FileUtility().Delete(fromFileName); } return; } // Calculate the elapsed time and stop if the maximum retry // period has been reached. TimeSpan timeElapsed = DateTime.Now - fileReceived; if (timeElapsed.TotalMinutes > IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.RetryTimeOut) { ExtensionMethods.TraceError("The file \"{0}\" could not be copied.", fromFileName); return; } Thread.Sleep(300); } } } catch (IOException ioe) { //we can eat this exception, it was observed that mostly in multi instance scenario, access violation occurrs as // the file is already taken/processed by an instance. Trace.TraceError(ioe.Message); Trace.Flush(); } catch (Exception ex) { ExtensionMethods.TraceError(ex.ToString()); Trace.Flush(); } }
public void Process(string fileFullName) { string fileName = Path.GetFileName(fileFullName); //move file string moveTo = Path.Combine(LocalFileSystemFolderArchive, fileName); Directory.CreateDirectory(Path.GetDirectoryName(moveTo)); string renamedToIdentifier = Guid.NewGuid().ToString(); if (File.Exists(moveTo)) { string moveToBUName = Path.Combine(LocalFileSystemFolderArchive, string.Format("{0}_{1}", renamedToIdentifier, fileName)); new FileUtility().FileCopy(moveTo, moveToBUName, true); //backup existing } new FileUtility().FileCopy(fileFullName, moveTo, true); //move file if (this.DataSourceParameters == null) { this.DataSourceParameters = new Dictionary <string, object>(); } DataSourceParameters.Clear(); DataSourceParameters.Add("DataSourceId", this.DataSourceId); FileSystemWatcherEventArgs e = new FileSystemWatcherEventArgs(DataSourceParameters, moveTo, renamedToIdentifier); try { //if (File.Exists(moveTo)) //{ // using (StreamReader sr = new StreamReader(moveTo)) // { // e.FileContent = sr.ReadToEnd(); // sr.Close(); // } // InvokeFileDownloaded(e); //} InvokeFileDownloaded(e); } catch (BusinessException ex) { ExtensionMethods.TraceError(ex.ToString()); Trace.Flush(); } catch (Exception ex) { ExtensionMethods.TraceError("An unknown error occurred!. {0}. {1} This error needs immediate attention", ex.ToString(), Environment.NewLine + Environment.NewLine); Trace.Flush(); } }
private void RetrieveFromDB() { try { Dictionary <string, string> thisParams = _SQLClientManager.GenerateParameters(this._DataSourceId); foreach (KeyValuePair <string, string> entry in thisParams) { this._AttributeValuePair.Add(entry.Key, entry.Value); } } catch (Exception ex) { ExtensionMethods.TraceError("Error while parsing generated parameters " + Environment.NewLine + ex.Message + Environment.NewLine + ex.StackTrace); } }
public override IdpeMessage Parse(bool onlyConstraints) { try { this._ParseResult = new IdpeMessage(IdpeMessageCodes.IDPE_SUCCESS); CheckConstraints(); } catch (Exception ex) { this._ParseResult = new IdpeMessage(IdpeMessageCodes.IDPE_STRING_TYPE_DATA_VALIDATION_FAILED); this._ParseResult.Message = string.Format(this._ParseResult.Message, PrintRowColPosition(), Type, Value); ExtensionMethods.TraceError(ex.ToString()); } _IsParsed = true; return(this._ParseResult); }
public Attribute this[string columnName] { get { Attribute foundAttribute = (from a in this where a.Name.Equals(columnName, StringComparison.OrdinalIgnoreCase) select a).SingleOrDefault(); if (foundAttribute == null) { ExtensionMethods.TraceError("No attribute found with name {0}.{1}{2}", columnName, Environment.NewLine, new StackTrace().ToString()); } return(foundAttribute); } }
/// <summary> /// This is mainly to handle instantiation of custom trace listner (if possible). Incorrect file path or permission issue may fail /// instantiating a custom trace listner if instantiated from web.config.system.diagonstic settings. Most of the methods will throw error /// as they have Trace statements in it, and no operation will be performed. This place is to intialize trace listner silently, /// else eat the exception and keep quite. /// </summary> static Idpe() { try { #if WEB AppDomain.CurrentDomain.SetData("SQLServerCompactEditionUnderWebHosting", true); #endif Init(); Trace.Flush(); } catch (Exception ex) { string errorMessage = ex.ToString() + (ex.InnerException == null ? string.Empty : ex.InnerException.Message); ExtensionMethods.TraceError(errorMessage); Trace.Flush(); } }
public GlobalEventsOnComplete this[string name] { get { GlobalEventsOnComplete gec = (from a in this where a.Name.ToLower().Equals(name.ToLower()) select a).SingleOrDefault(); if (gec == null) { ExtensionMethods.TraceError("GEC: No gec found with name {0}.{1}{2}", name, Environment.NewLine, new StackTrace().ToString()); } return(gec); } }
internal void WorkerAborted(WorkflowApplicationAbortedEventArgs e) { //CompleteJobIfAllWorkersCompleted(); if (!OneAbortMessageLogged) { //because we dont want to send same message for all workers. StringBuilder sb = new StringBuilder(); sb.AppendLine(string.Format("Fatal Error: A worker {0} was aborted. Forcefully aborting the job!", e.InstanceId)); sb.AppendLine(); sb.Append(string.Format("Reason was: {0}\n{1}", e.Reason.GetType().FullName, e.Reason.ToString())); ExtensionMethods.TraceError(sb.ToString()); Trace.Flush(); OneAbortMessageLogged = true; } AbortJob(); }
void ExecuteQuery(DataSource dataSource, IdpeKey connectionStringKey, string updateQuery) { //IdpeKey connectionStringKey = dataSource.Keys.GetKey(connectionStringKeyName); //if (connectionStringKey == null) // throw new KeyNotFoundException(string.Format("The connection string '{0}' was not defined!", connectionStringKeyName)); DatabaseTypes databaseType = connectionStringKey.GetDatabaseType(); string actualConnectionString = connectionStringKey.Value; IDal myDal = new DataAccessLayer(databaseType).Instance; IDbConnection conn = myDal.CreateConnection(actualConnectionString); conn.Open(); IDbTransaction transaction = myDal.CreateTransaction(conn); IDbCommand commandUpdate = myDal.CreateCommand(); commandUpdate.Connection = conn; commandUpdate.Transaction = transaction; commandUpdate.CommandText = new CommandParser(dataSource).Parse(updateQuery); try { commandUpdate.ExecuteNonQuery(); transaction.Commit(); } catch (Exception ex) { transaction.Rollback(); ExtensionMethods.TraceError(ex.ToString()); throw new Exception(ex.Message, ex); } finally { if (conn.State == ConnectionState.Open) { conn.Close(); } conn.Dispose(); commandUpdate.Dispose(); } }
List <string> DirSearch(string sDir) { List <string> existingFiles = new List <string>(); try { foreach (string d in Directory.GetDirectories(sDir)) { foreach (string f in Directory.GetFiles(d)) { existingFiles.Add(f); } DirSearch(d); } } catch (Exception ex) { ExtensionMethods.TraceError(ex.ToString()); } return(existingFiles); }
private string[] GetFilesList(string ftpFolderPath, string userName, string password, string dataSourceId) { try { FtpWebRequest Request; FtpWebResponse Response; Request = (FtpWebRequest)FtpWebRequest.Create(new Uri(ftpFolderPath)); Request.Credentials = new NetworkCredential(userName, password); Request.Proxy = null; Request.Method = WebRequestMethods.Ftp.ListDirectory; Request.UseBinary = true; Response = (FtpWebResponse)Request.GetResponse(); StreamReader reader = new StreamReader(Response.GetResponseStream()); string Data = reader.ReadToEnd(); return(Data.Split('\n')); } catch (Exception ex) { string errorMessage = string.Format("Error while pulling data from FTP! DataSource Id= {0} URL = {1}, User Name= {2}, Password = ******", dataSourceId, ftpFolderPath, userName); ExtensionMethods.TraceError(errorMessage + Environment.NewLine + ex.ToString()); return(new List <string>().ToArray()); } }
public static DataTable ReadFile(string fileName, string delimiter = ",", bool firstRowIsHeader = true) { if (!File.Exists(fileName)) { return(new DataTable()); } TextFieldParser parser = new TextFieldParser(fileName); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(delimiter); bool columnInformationTaken = false; DataTable table = new DataTable(); bool failed = false; while (!parser.EndOfData) { string[] fields = parser.ReadFields(); if (!columnInformationTaken) { if (firstRowIsHeader) { foreach (string field in fields) { table.Columns.Add(field); } } else { for (int c = 1; c <= fields.Length; c++) { table.Columns.Add("Column" + c.ToString()); } table.Rows.Add(fields); } columnInformationTaken = true; } else { if (table.Columns.Count != fields.Length) { ExtensionMethods.TraceError("Data source attribute count and data fed is not equal, could not parse data!"); failed = true; break; } table.Rows.Add(fields); } } parser.Close(); if (failed) { parser.Dispose(); parser = null; table.Dispose(); table = null; return(new DataTable()); } else { return(table); } }
/// <summary> /// Processes a file by (1) moving file to archive folder, and then (2) invoking FileDownloaded event /// </summary> /// <param name="fileFullName"></param> /// <param name="fileName"></param> /// <param name="dataSourceId"></param> /// <param name="handleArchive"></param> public void Process(string fileFullName, string fileName, int dataSourceId, bool handleArchive = true) { if (fileName.Contains("\\")) { fileName = fileName.Substring(fileName.LastIndexOf("\\") + 1); } if (dataSourceId == 0) { int tempInt = 0; int.TryParse(Directory.GetParent(fileFullName).Name, out tempInt); if (tempInt == 0) { throw new Exception(string.Format("File '{0}' dropped on wrong location!", fileFullName)); } dataSourceId = tempInt; } lock (_lock) { string moveTo = fileFullName; string renamedToIdentifier = Guid.NewGuid().ToString(); if (handleArchive) { string archiveLoc = ArchiveLocation + "\\" + dataSourceId + "\\" + DateTime.Now.ToString("yyyyMMdd"); //move file moveTo = Path.Combine(archiveLoc, fileName); Directory.CreateDirectory(Path.GetDirectoryName(moveTo)); if (File.Exists(moveTo)) { string moveToBUName = Path.Combine(archiveLoc, string.Format("{0}_{1}", renamedToIdentifier, fileName)); FileCopy(moveTo, moveToBUName, true); //backup existing } FileCopy(fileFullName, moveTo, true); //move file } if (this.DataSourceParameters == null) { this.DataSourceParameters = new Dictionary <string, object>(); } DataSourceParameters.Clear(); DataSourceParameters.Add("DataSourceId", dataSourceId); FileSystemWatcherEventArgs e = new FileSystemWatcherEventArgs(DataSourceParameters, moveTo, renamedToIdentifier); try { InvokeFileDownloaded(e); } catch (BusinessException ex) { ExtensionMethods.TraceError(ex.ToString()); Trace.Flush(); } catch (Exception ex) { ExtensionMethods.TraceError("An unknown error occurred!. {0}. {1} This error needs immediate attention", ex.ToString(), Environment.NewLine + Environment.NewLine); Trace.Flush(); } } }
public override IdpeMessage Parse(bool onlyConstraints) { try { string code = ParseFormulaGetCode(Formula); _ReferenceKey = string.Empty; string sqlErrorMessage = string.Empty; if (Registry.Instance.CodeSets != null) { CodeSet thisCodeSet = (from cs in Registry.Instance.CodeSets where ((cs.Code.Equals(code, StringComparison.OrdinalIgnoreCase)) && (cs.Value.Equals(Value, StringComparison.OrdinalIgnoreCase))) select cs).SingleOrDefault(); if (thisCodeSet != null) { _Value = thisCodeSet.Value; _ValueEnumCode = thisCodeSet.EnumCode; _ReferenceKey = thisCodeSet.ReferenceKey; } else { //check one more time with EnumCode, as IDPE supports Value OR EnumCode int enumCode = 0; if (int.TryParse(Value, out enumCode)) { thisCodeSet = (from cs in Registry.Instance.CodeSets where ((cs.Code.Equals(code, StringComparison.OrdinalIgnoreCase)) && (cs.EnumCode == enumCode)) select cs).SingleOrDefault(); if (thisCodeSet != null) { _Value = thisCodeSet.Value; _ValueEnumCode = thisCodeSet.EnumCode; _ReferenceKey = thisCodeSet.ReferenceKey; } else { _ValueEnumCode = -1; } } else { _ValueEnumCode = -1; } } } if ((_ValueEnumCode == -1) || (sqlErrorMessage != string.Empty)) { this._ParseResult = new IdpeMessage(IdpeMessageCodes.IDPE_CODESET_TYPE_DATA_VALIDATION_FAILED); this._ParseResult.Message = string.Format(_ParseResult.Message, PrintRowColPosition(), Value); } else { _ValueEnumValue = _Value; return(new IdpeMessage(IdpeMessageCodes.IDPE_SUCCESS)); //when got value, return success } } catch (Exception ex) { this._ParseResult = new IdpeMessage(IdpeMessageCodes.IDPE_CODESET_TYPE_DATA_VALIDATION_FAILED); this._ParseResult.Message = string.Format(_ParseResult.Message, PrintRowColPosition(), Value); ExtensionMethods.TraceError(ex.ToString()); } _IsParsed = true; return(this._ParseResult); }
void WorkerManagerIdle(WorkflowApplicationIdleEventArgs e) { ExtensionMethods.TraceError("WorkerManager {0} Idle.", e.InstanceId); Trace.Flush(); }
PersistableIdleAction WorkerManagerPersistableIdle(WorkflowApplicationIdleEventArgs e) { ExtensionMethods.TraceError("WorkerManager {0} Idle.", e.InstanceId); Trace.Flush(); return(PersistableIdleAction.Unload); }
internal void InvokeFileProcessed(int datasourceId, string jobId, List <IdpeKey> appKeys, string fileName, string outputFolder, string zipUniqueId) { Trace.TraceInformation("Invoke"); Trace.Flush(); Trace.TraceInformation("datasourceId:{0}, JobId:{1},appkeys={2},filename={3},outputfolder={4},zipuniqueid={5}", datasourceId, jobId, appKeys.Count, fileName, outputFolder, zipUniqueId); Trace.Flush(); Job currentJob = null; if ((!(string.IsNullOrEmpty(jobId))) && (Registry.Instance.Entries.ContainsKey(jobId))) { currentJob = Registry.Instance.Entries[jobId] as Job; } #region Handling ZipFile ZipFileInformation zipInfo = null; if (!string.IsNullOrEmpty(zipUniqueId)) { zipInfo = Registry.Instance.ZipFiles[zipUniqueId]; if (zipInfo.TotalFiles == zipInfo.TotalProcessedFiles) { return; } else { zipInfo.TotalProcessedFiles = zipInfo.TotalProcessedFiles + 1; } } #endregion Handling ZipFile #region Handling Pusher if ((currentJob != null) && (!string.IsNullOrEmpty(currentJob.DataSource.PusherTypeFullName))) { ExtensionMethods.TraceInformation("Pullers - Initializing '{0}' Pusher '{1}'.", currentJob.DataSource.Name, currentJob.DataSource.PusherTypeFullName); object objPusher = null; if (currentJob.DataSource.PusherType == PusherTypes.Ftp) { objPusher = new PusherFtp(); } else if (currentJob.DataSource.PusherType == PusherTypes.DosCommands) { objPusher = new PusherDosCommands(); } else if (currentJob.DataSource.PusherType == PusherTypes.SqlQuery) { objPusher = new PusherSqlQuery(); } else if (currentJob.DataSource.PusherType == PusherTypes.Custom) { objPusher = Activator.CreateInstance(Type.GetType(currentJob.DataSource.PusherTypeFullName)); } if (objPusher != null) { if ((currentJob.Errors.Count == 0) && (currentJob.DataSource.OutputWriter.IsErrored == false)) { ((Pushers)objPusher).FileProcessed(new PullersEventArgs(datasourceId, jobId, appKeys, fileName, outputFolder, zipUniqueId, zipInfo)); ExtensionMethods.TraceInformation("Pullers - Pusher called!"); } else if ((currentJob.Errors.Count > 0) && (currentJob.DataSource.DataFeederType == DataFeederTypes.PullSql)) { new SqlWatcherHelper(currentJob).ExecuteRecoveryScript(); } else { if (currentJob.DataSource.AllowPartial()) { ((Pushers)objPusher).FileProcessed(new PullersEventArgs(datasourceId, jobId, appKeys, fileName, outputFolder, zipUniqueId, zipInfo)); ExtensionMethods.TraceInformation("Pullers - Pusher called!"); } else { string message = "Pullers - There were error(s) while processing, the pusher was not called. Please study the error(s) and do the needful."; ExtensionMethods.TraceError(message); } } } } #endregion Handling Pusher #region Logging History if (currentJob != null) { string subFileName = null; if (!string.IsNullOrEmpty(zipUniqueId)) { subFileName = Path.GetFileName(fileName); //removing output extension if (subFileName.Contains(".")) { subFileName = subFileName.Substring(0, subFileName.LastIndexOf(".")); } } new Manager().SaveLog(currentJob.FileName, subFileName, datasourceId, currentJob.TotalRowsToBeProcessed, currentJob.TotalValid, currentJob.StartedAt, DateTime.Now, SreEnvironmentDetails()); } #endregion Logging History #region Sending Email if (currentJob != null) { //send email in positive scenario. If would have failed, an error email would have automatically sent. ExtensionMethods.TraceInformation("Pullers - A job processed, total rows to be processed = {0}, total valid rows = {1}", currentJob.TotalRowsToBeProcessed, currentJob.TotalValid); Trace.Flush(); string strEmailAfterFileProcessed = currentJob.DataSource.Keys.GetKeyValue(IdpeKeyTypes.EmailAfterFileProcessed); string strEmailAfterFileProcessedAttachInputFile = currentJob.DataSource.Keys.GetKeyValue(IdpeKeyTypes.EmailAfterFileProcessedAttachInputFile); string strEmailAfterFileProcessedAttachOutputFile = currentJob.DataSource.Keys.GetKeyValue(IdpeKeyTypes.EmailAfterFileProcessedAttachOutputFile); if (strEmailAfterFileProcessed.ParseBool()) { string strEmailAfterFileProcessedAttachOtherFiles = currentJob.DataSource.Keys.GetKeyValue(IdpeKeyTypes.EmailAfterFileProcessedAttachOtherFiles); string message = string.Format("Pullers - A file from '{0}' was just processed with {1} record(s)!", currentJob.DataSource.Name, currentJob.TotalRowsProcessed); if (string.IsNullOrEmpty(strEmailAfterFileProcessedAttachOtherFiles)) { List <string> outFile = new List <string>(); if (strEmailAfterFileProcessedAttachOutputFile.ParseBool()) { outFile.Add(fileName); new PostMan(currentJob, false).Send(message, "File Processed", !strEmailAfterFileProcessedAttachInputFile.ParseBool(), outFile); } else { new PostMan(currentJob, false).Send(message, "File Processed", !strEmailAfterFileProcessedAttachInputFile.ParseBool(), outFile); } } else { List <string> otherFiles = new List <string>(strEmailAfterFileProcessedAttachOtherFiles.Split(",".ToCharArray())); new PostMan(currentJob, false).Send(message, "File Processed", !strEmailAfterFileProcessedAttachInputFile.ParseBool(), otherFiles); } } } #endregion Sending Email #region Handling Global Events if ((currentJob != null) && (currentJob.ErroredByPusher == false)) { PullersEventArgs e = new PullersEventArgs(datasourceId, jobId, appKeys, fileName, outputFolder, zipUniqueId, zipInfo); Registry.Instance.GlobalEventsOnCompletes.Complete(datasourceId, e); } #endregion Handling Global Events if (currentJob != null) { currentJob.PerformanceCounter.PrintTrace(jobId); } }
private void OnCreated(object source, FileSystemEventArgs e) { if (e.Name == "New Folder") { return; } Registry.Instance.LocalFileWatcher.EnableRaisingEvents = false; bool fileCopied = false; lock (_lock) { if (IsItLastFewRecentFile(e.FullPath)) { return; } DateTime fileReceived = DateTime.Now; CurrentFile = e.FullPath; while (true) { if (FileDownloadCompleted(CurrentFile)) { fileCopied = true; AddToLastFewRecentFiles(e.FullPath); break; } else { // Calculate the elapsed time and stop if the maximum retry // period has been reached. TimeSpan timeElapsed = DateTime.Now - fileReceived; if (timeElapsed.TotalMinutes > IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.RetryTimeOut) { Registry.Instance.LocalFileWatcher.EnableRaisingEvents = true; ExtensionMethods.TraceError("The file \"{0}\" could not be processed. Time elapsed = '{1}', LocalFileWatcherMaximumRetryPeriod = '{2}'", CurrentFile, timeElapsed.TotalMinutes, IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.RetryTimeOut); break; } Thread.Sleep(300); if (string.IsNullOrEmpty(CurrentFile)) { break; } } Trace.Flush(); } } if (fileCopied) { Registry.Instance.LocalFileWatcher.EnableRaisingEvents = true; this.Process(CurrentFile); lock (_lock) { CurrentFile = string.Empty; } } HandleExistingFiles(); //this is important (when more than 1 file is dropped) Trace.Flush(); }
protected override void Execute(CodeActivityContext context) { MaxThreads = EyediaCoreConfigurationSection.CurrentConfig.MaxThreads; job = context.GetValue(this.Job); NumberOfWorkerRemains = job.JobSlices.Count; job.TraceInformation("Initializing workers. Job slices:{0}. Max threads:{1}", job.JobSlices.Count, MaxThreads); Trace.Flush(); Stopwatch sw = Stopwatch.StartNew(); for (int i = 0; i < job.JobSlices.Count; i++) { Dictionary <string, object> inputs = new Dictionary <string, object>(); WorkerData data = new WorkerData(job, i); inputs.Add("Data", data); WorkflowApplication workflowApp = new WorkflowApplication(new Worker(), inputs); workflowApp.Completed = WorkerCompleted; workflowApp.Aborted = WorkerAborted; workflowApp.Idle = WorkerIdle; workflowApp.PersistableIdle = WorkerPersistableIdle; workflowApp.Unloaded = WorkerUnloaded; workflowApp.OnUnhandledException = WorkerUnhandledException; try { workflowApp.Run(new TimeSpan(0, IdpeConfigurationSection.CurrentConfig.WorkerTimeOut, 0)); } catch (TimeoutException timeoutException) { ExtensionMethods.TraceError("Worker has been timed out! {0}{1}", Environment.NewLine, timeoutException.ToString()); Trace.Flush(); CompleteJobIfAllWorkersCompleted(); if (NumberOfWorkerRemains < MaxThreads) { AllThreadsAreBusy.Set(); } } job.JobSlices[i].Status = JobSlice.JobSliceStatus.Processing; job.JobSlices[i].WorkflowInstanceId = workflowApp.Id; job.TraceInformation("Initializing worker '{0}' with {1}", i + 1, job.JobSlices[i].WorkflowInstanceId); if (job.NumberOfSlicesProcessing >= MaxThreads) { ExtensionMethods.TraceInformation("All threads are busy, waiting...Threads: [Max allowed:{0}, Completed:{1}, Running:{2}", MaxThreads, NumberOfWorkerRemains, job.NumberOfSlicesProcessing); Trace.Flush(); if (!AllThreadsAreBusy.WaitOne(new TimeSpan(0, IdpeConfigurationSection.CurrentConfig.TimeOut, 0))) //config timeout in seconds { //timed out job.AbortRequested = true; job.AbortReason = Services.Job.AbortReasons.TimedOut; string errorMsg = "All threads are busy since long, the complete process has been timed out! Time out (in Minutes) configured as " + IdpeConfigurationSection.CurrentConfig.TimeOut + Environment.NewLine; errorMsg += "Job Id:" + job.JobIdentifier + Environment.NewLine; errorMsg += "File Name:" + job.FileName + Environment.NewLine; errorMsg += "TotalRowsToBeProcessed:" + job.TotalRowsToBeProcessed + Environment.NewLine; errorMsg += "TotalRowsProcessed:" + job.TotalRowsProcessed + Environment.NewLine; job.TraceError(errorMsg); Trace.Flush(); AbortWorkers(); AllThreadsAreBusy.Set(); } } Trace.Flush(); } job.TraceInformation("Waiting to get finished..."); Trace.Flush(); TheJobCompleted.WaitOne(); DoWithJob(job.IsErrored); if (!job.AbortRequested) { ExtensionMethods.TraceInformation("Job '{0}' is finished at '{1}'. Workers elapsed time:{2}", job.JobIdentifier, job.FinishedAt, sw.Elapsed.ToString()); } else { ExtensionMethods.TraceInformation("Job '{0}' is aborted at '{1}'. Workers elapsed time:{2}", job.JobIdentifier, job.FinishedAt, sw.Elapsed.ToString()); } }
public static DataTable ReadExcelFile(string fileName, bool isFirstRowHeader, int spreadSheetNumber, ref int columnCount) { try { ExcelDataReaderInstantiator excelDataReaderInstantiator = null; try { excelDataReaderInstantiator = new ExcelDataReaderInstantiator(fileName); } catch (IOException ioe) { //we can eat this exception, as during multi instance scenario the file might have already been processed by other instance } DataSet result = null; if ((excelDataReaderInstantiator != null) && (excelDataReaderInstantiator.ExcelReader != null)) { excelDataReaderInstantiator.ExcelReader.IsFirstRowAsColumnNames = isFirstRowHeader; result = excelDataReaderInstantiator.ExcelReader.AsDataSet(true); excelDataReaderInstantiator.ExcelReader.Close(); } if (result == null) { new DataTable(); } else if (result.Tables.Count > 0) { if (spreadSheetNumber < result.Tables.Count) { columnCount = result.Tables[spreadSheetNumber].Columns.Count; return(result.Tables[spreadSheetNumber]); } else { new DataTable(); } } else { //nothing could be retrieved using Excel Lib. //Lets try using standard mechanism if (IdpeConfigurationSection.CurrentConfig.MicrosoftExcelOLEDataReader.Enabled) { ExtensionMethods.TraceInformation("Could not read using default excel library, trying to read using OLE"); DataTable table = ReadUsingOLE(fileName, isFirstRowHeader); if (table.Rows.Count == 0) { ExtensionMethods.TraceError("Could not read any record from '{0}'.", Path.GetFileName(fileName)); } columnCount = table.Columns.Count; return(table); } } return(new DataTable()); } catch (FileNotFoundException fnfe) { //this try/catch is placed when multi instances of IDPE runs on same machine return(new DataTable()); } }
/// <summary> /// Parses attributes /// </summary> /// <param name="attributes">Master attribute list</param> /// <param name="column">A column(Attributes) to be parsed</param> /// <param name="isSystemRow">true if processing system row</param> /// <param name="rowPosition">Row position, used only in error information, to identify row. 0(Zero) in case of system attributes.</param> /// <param name="doNotWriteErrorInTraceFile">To avoid confusion, if this is true, it wont write sql query formatting errors into trace file. /// (in case of first attempt, values may not be ready to replace in queries, which is absolutely a valid scenario)</param> internal void Parse(List <IdpeAttribute> attributes, Column column, bool isSystemRow, int rowPosition, bool doNotWriteErrorInTraceFile) { try { List <SreType> sqlQueryTypes = new List <SreType>(); for (int a = 0; a < attributes.Count; a++) { string value = string.Empty; if (column[attributes[a].Name].IsNull == true) { value = "NULL"; column[a].Error = new IdpeMessage(IdpeMessageCodes.IDPE_SUCCESS); } else { value = column[attributes[a].Name].Value; } //if (attributes[a].Name == "AssetId") // Debugger.Break(); SreType SREType = SreTypeFactory.GetInstance(attributes[a].Name, value, attributes[a].Type, attributes[a].Formula, attributes[a].Minimum, attributes[a].Maximum, isSystemRow, rowPosition, this._SQLClientManager, this._DataSourceKeys); column[a].Type = SREType; if (column[attributes[a].Name].IgnoreParsing) { column[a].Error = new IdpeMessage(IdpeMessageCodes.IDPE_SUCCESS); continue; } if ((SREType.Type != AttributeTypes.Referenced) && (SREType.Type != AttributeTypes.NotReferenced) && (SREType.Type != AttributeTypes.Generated)) { //parse rightway...We dont have SQL Queries to fire //we can override errors for all except 'IsValid'...because if rule fails, we manually override its error msgs //and if IsValid.err msg already populated, we will lose that... if (attributes[a].Name == "IsValid") { IdpeMessage errMsg = SREType.Parse(false); if (column[a].Error == null) { column[a].Error = errMsg; } else { column[a].Error.Message += column[a].Error.Message; } } else { IdpeMessage thisResult = SREType.Parse(false); if (column[a].Error == null) { column[a].Error = thisResult; } else { if (thisResult.Code != IdpeMessageCodes.IDPE_SUCCESS) { string oldErrorMsg = column[a].Error.Message; column[a].Error = thisResult; column[a].Error.Message = string.Format("{0},{1}", column[a].Error.Message, oldErrorMsg); } } } //if codeset type, store enum value and code as well if (SREType.Type == AttributeTypes.Codeset) { SreCodeset sreCodeset = SREType as SreCodeset; //if (string.IsNullOrEmpty(column[attributes[a].Name].ValueEnumValue)) // column[attributes[a].Name].ValueEnumValue = column[attributes[a].Name].Value; column[attributes[a].Name].ValueEnumCode = sreCodeset.ValueEnumCode; column[attributes[a].Name].ValueEnumValue = sreCodeset.ValueEnumValue; column[attributes[a].Name].ValueEnumReferenceKey = sreCodeset.ReferenceKey; } else { column[a].Value = SREType.Value; //in case anything updated after parse(at this moment, formatted datetime) } } else { //to be parsed once all others are parsed. sqlQueryTypes.Add(SREType); } column[a].IsNull = SREType.IsNull; } //we have parsed all other values except with sql queries, lets parse those. foreach (SreType item in sqlQueryTypes) { //if (item.ColumnName == "OldInvoiceNumber") // Debugger.Break(); Attribute currentAttribute = column[item.ColumnName]; //efficient, instead of calling string indxr multiple times. if ((item.Type == AttributeTypes.Generated) && (!item.IsHavingSqlQuery)) { if (string.IsNullOrEmpty(currentAttribute.Value)) { currentAttribute.Value = GetFormulaResult(item.Formula); } IdpeMessage thisResult = item.Parse(false); if (currentAttribute.Error == null) { currentAttribute.Error = thisResult; } else { if (thisResult.Code != IdpeMessageCodes.IDPE_SUCCESS) { string oldErrorMsg = currentAttribute.Error.Message; currentAttribute.Error = thisResult; currentAttribute.Error.Message = string.Format("{0},{1}", currentAttribute.Error.Message, oldErrorMsg); } } } else if ((item.Type == AttributeTypes.NotReferenced) && (string.IsNullOrEmpty(currentAttribute.Value))) { //NotReferenced can not be empty at least. currentAttribute.Error = new IdpeMessage(IdpeMessageCodes.IDPE_REFERENCED_TYPE_DATA_CAN_NOT_BE_NULL); currentAttribute.Error.Message = string.Format(currentAttribute.Error.Message, PrintRowColPosition(item.RecordPosition, item.ColumnName, isSystemRow), currentAttribute.Value, item.ColumnName); continue; } else { string errorMessage = string.Empty; string value = currentAttribute.Value; SqlCommandTypes sqlCommandTypes = SqlCommandTypes.Unknown; string connectionStringKeyName = string.Empty; string SQLQuery = FormatSQLFormula(item.Formula, ref sqlCommandTypes, ref connectionStringKeyName, ref errorMessage); string FormattedSQLQuery = string.Empty; if (errorMessage == string.Empty) { if (connectionStringKeyName != Constants.DefaultConnectionStringKeyName) { item.ConnectionStringKeyName = connectionStringKeyName; //string k_n_type = _ApplicationKeys.GetKeyValue(connectionStringKeyName); IdpeKey key = _DataSourceKeys.GetKey(connectionStringKeyName); item.ConnectionString = key.Value; item.DatabaseType = (IdpeKeyTypes)key.Type; } if (sqlCommandTypes == SqlCommandTypes.SqlCommand) { FormattedSQLQuery = FormatSQLParameters(SQLQuery, value, column, ref errorMessage); } else if (sqlCommandTypes == SqlCommandTypes.StoreProcedure) { //todo //FormattedSQLQuery = } } if (errorMessage != string.Empty) { //Prepare return error message (and not the technical exception) IdpeMessage returnMessage = new IdpeMessage(IdpeMessageCodes.IDPE_TYPE_DATA_VALIDATION_FAILED_GENERIC); //Write the error into trace with mapId. Guid detailesMapId = Guid.NewGuid(); //Do not write in trace, as it is actually not an error, tried first attempt if (!doNotWriteErrorInTraceFile) { ExtensionMethods.TraceError(string.Format("{0}:{1}", detailesMapId.ToString(), errorMessage)); } //set mapId in client error msg returnMessage.Message = string.Format("{0}. Map Id:{1}", returnMessage.Message, detailesMapId.ToString()); if (currentAttribute.Error == null) { //send the generic error with mapId currentAttribute.Error = returnMessage; } else { if (currentAttribute.Error.Code != IdpeMessageCodes.IDPE_SUCCESS) { string oldErrorMsg = currentAttribute.Error.Message; currentAttribute.Error = returnMessage; currentAttribute.Error.Message = string.Format("{0},{1}", currentAttribute.Error.Message, oldErrorMsg); } } } else { item.OverrideFormula(FormattedSQLQuery); currentAttribute.Error = item.Parse(false); //SQL Query results are always overridden. currentAttribute.Value = item.Value; } } } //Parsing done, as we have more data (SQL Parameters) now, lets just format all queries one more time, there might be few more parameters in context. FormatAllSQLQueries(column); } catch (Exception ex) { Trace.Write(string.Format("Attribute parsing error, row position = {0}{1}{2}", rowPosition, Environment.NewLine, ex.ToString())); } }