/// <summary> /// Parses attributes /// </summary> /// <param name="attributes">Master attribute list</param> /// <param name="column">A column(Attributes) to be parsed</param> /// <param name="isSystemRow">true if processing system row</param> /// <param name="rowPosition">Row position, used only in error information, to identify row. 0(Zero) in case of system attributes.</param> /// <param name="doNotWriteErrorInTraceFile">To avoid confusion, if this is true, it wont write sql query formatting errors into trace file. /// (in case of first attempt, values may not be ready to replace in queries, which is absolutely a valid scenario)</param> internal void Parse(List <IdpeAttribute> attributes, Column column, bool isSystemRow, int rowPosition, bool doNotWriteErrorInTraceFile) { try { List <SreType> sqlQueryTypes = new List <SreType>(); for (int a = 0; a < attributes.Count; a++) { string value = string.Empty; if (column[attributes[a].Name].IsNull == true) { value = "NULL"; column[a].Error = new IdpeMessage(IdpeMessageCodes.IDPE_SUCCESS); } else { value = column[attributes[a].Name].Value; } //if (attributes[a].Name == "AssetId") // Debugger.Break(); SreType SREType = SreTypeFactory.GetInstance(attributes[a].Name, value, attributes[a].Type, attributes[a].Formula, attributes[a].Minimum, attributes[a].Maximum, isSystemRow, rowPosition, this._SQLClientManager, this._DataSourceKeys); column[a].Type = SREType; if (column[attributes[a].Name].IgnoreParsing) { column[a].Error = new IdpeMessage(IdpeMessageCodes.IDPE_SUCCESS); continue; } if ((SREType.Type != AttributeTypes.Referenced) && (SREType.Type != AttributeTypes.NotReferenced) && (SREType.Type != AttributeTypes.Generated)) { //parse rightway...We dont have SQL Queries to fire //we can override errors for all except 'IsValid'...because if rule fails, we manually override its error msgs //and if IsValid.err msg already populated, we will lose that... if (attributes[a].Name == "IsValid") { IdpeMessage errMsg = SREType.Parse(false); if (column[a].Error == null) { column[a].Error = errMsg; } else { column[a].Error.Message += column[a].Error.Message; } } else { IdpeMessage thisResult = SREType.Parse(false); if (column[a].Error == null) { column[a].Error = thisResult; } else { if (thisResult.Code != IdpeMessageCodes.IDPE_SUCCESS) { string oldErrorMsg = column[a].Error.Message; column[a].Error = thisResult; column[a].Error.Message = string.Format("{0},{1}", column[a].Error.Message, oldErrorMsg); } } } //if codeset type, store enum value and code as well if (SREType.Type == AttributeTypes.Codeset) { SreCodeset sreCodeset = SREType as SreCodeset; //if (string.IsNullOrEmpty(column[attributes[a].Name].ValueEnumValue)) // column[attributes[a].Name].ValueEnumValue = column[attributes[a].Name].Value; column[attributes[a].Name].ValueEnumCode = sreCodeset.ValueEnumCode; column[attributes[a].Name].ValueEnumValue = sreCodeset.ValueEnumValue; column[attributes[a].Name].ValueEnumReferenceKey = sreCodeset.ReferenceKey; } else { column[a].Value = SREType.Value; //in case anything updated after parse(at this moment, formatted datetime) } } else { //to be parsed once all others are parsed. sqlQueryTypes.Add(SREType); } column[a].IsNull = SREType.IsNull; } //we have parsed all other values except with sql queries, lets parse those. foreach (SreType item in sqlQueryTypes) { //if (item.ColumnName == "OldInvoiceNumber") // Debugger.Break(); Attribute currentAttribute = column[item.ColumnName]; //efficient, instead of calling string indxr multiple times. if ((item.Type == AttributeTypes.Generated) && (!item.IsHavingSqlQuery)) { if (string.IsNullOrEmpty(currentAttribute.Value)) { currentAttribute.Value = GetFormulaResult(item.Formula); } IdpeMessage thisResult = item.Parse(false); if (currentAttribute.Error == null) { currentAttribute.Error = thisResult; } else { if (thisResult.Code != IdpeMessageCodes.IDPE_SUCCESS) { string oldErrorMsg = currentAttribute.Error.Message; currentAttribute.Error = thisResult; currentAttribute.Error.Message = string.Format("{0},{1}", currentAttribute.Error.Message, oldErrorMsg); } } } else if ((item.Type == AttributeTypes.NotReferenced) && (string.IsNullOrEmpty(currentAttribute.Value))) { //NotReferenced can not be empty at least. currentAttribute.Error = new IdpeMessage(IdpeMessageCodes.IDPE_REFERENCED_TYPE_DATA_CAN_NOT_BE_NULL); currentAttribute.Error.Message = string.Format(currentAttribute.Error.Message, PrintRowColPosition(item.RecordPosition, item.ColumnName, isSystemRow), currentAttribute.Value, item.ColumnName); continue; } else { string errorMessage = string.Empty; string value = currentAttribute.Value; SqlCommandTypes sqlCommandTypes = SqlCommandTypes.Unknown; string connectionStringKeyName = string.Empty; string SQLQuery = FormatSQLFormula(item.Formula, ref sqlCommandTypes, ref connectionStringKeyName, ref errorMessage); string FormattedSQLQuery = string.Empty; if (errorMessage == string.Empty) { if (connectionStringKeyName != Constants.DefaultConnectionStringKeyName) { item.ConnectionStringKeyName = connectionStringKeyName; //string k_n_type = _ApplicationKeys.GetKeyValue(connectionStringKeyName); IdpeKey key = _DataSourceKeys.GetKey(connectionStringKeyName); item.ConnectionString = key.Value; item.DatabaseType = (IdpeKeyTypes)key.Type; } if (sqlCommandTypes == SqlCommandTypes.SqlCommand) { FormattedSQLQuery = FormatSQLParameters(SQLQuery, value, column, ref errorMessage); } else if (sqlCommandTypes == SqlCommandTypes.StoreProcedure) { //todo //FormattedSQLQuery = } } if (errorMessage != string.Empty) { //Prepare return error message (and not the technical exception) IdpeMessage returnMessage = new IdpeMessage(IdpeMessageCodes.IDPE_TYPE_DATA_VALIDATION_FAILED_GENERIC); //Write the error into trace with mapId. Guid detailesMapId = Guid.NewGuid(); //Do not write in trace, as it is actually not an error, tried first attempt if (!doNotWriteErrorInTraceFile) { ExtensionMethods.TraceError(string.Format("{0}:{1}", detailesMapId.ToString(), errorMessage)); } //set mapId in client error msg returnMessage.Message = string.Format("{0}. Map Id:{1}", returnMessage.Message, detailesMapId.ToString()); if (currentAttribute.Error == null) { //send the generic error with mapId currentAttribute.Error = returnMessage; } else { if (currentAttribute.Error.Code != IdpeMessageCodes.IDPE_SUCCESS) { string oldErrorMsg = currentAttribute.Error.Message; currentAttribute.Error = returnMessage; currentAttribute.Error.Message = string.Format("{0},{1}", currentAttribute.Error.Message, oldErrorMsg); } } } else { item.OverrideFormula(FormattedSQLQuery); currentAttribute.Error = item.Parse(false); //SQL Query results are always overridden. currentAttribute.Value = item.Value; } } } //Parsing done, as we have more data (SQL Parameters) now, lets just format all queries one more time, there might be few more parameters in context. FormatAllSQLQueries(column); } catch (Exception ex) { Trace.Write(string.Format("Attribute parsing error, row position = {0}{1}{2}", rowPosition, Environment.NewLine, ex.ToString())); } }
internal string Handle() { ExtensionMethods.TraceInformation("Handling a zip file '{0}', processing by '{1}' of data source '{2}'.", ZipFileName, ProcessingBy, DataSourceId); string unzipLocation = Path.Combine(EyediaCoreConfigurationSection.CurrentConfig.TempDirectory, Constants.IdpeBaseFolderName); unzipLocation = Path.Combine(unzipLocation, "TempZip"); unzipLocation = Path.Combine(unzipLocation, ZipUniqueId); UnZippedFileNames = ZipFileHandler.UnZip(ZipFileName, unzipLocation).ToArray(); Array.Sort(UnZippedFileNames, (f1, f2) => Path.GetExtension(f1).CompareTo(Path.GetExtension(f2))); //todo if (UnZippedFileNames.Length == 0) { //2nd attempt. Reason - Sometimes unzip returns 0 files. Thread.Sleep(500); UnZippedFileNames = ZipFileHandler.UnZip(ZipFileName, unzipLocation).ToArray(); } string validationErrorMessage = string.Format("The zip file '{0}' processing has beeen aborted as validation process failed", ZipFileName); if (DataSourceSpecificZipFileWatcher != null) { DataSourceSpecificZipFileWatcher.UnZippedFileNames = UnZippedFileNames; DataSourceSpecificZipFileWatcher.ZipUniqueId = ZipUniqueId; DataSourceSpecificZipFileWatcher.ZipFileName = ZipFileName; if (!DataSourceSpecificZipFileWatcher.Prepare(ref validationErrorMessage)) { throw new BusinessException(validationErrorMessage); } } else { if (!Prepare(ref validationErrorMessage)) { throw new BusinessException(validationErrorMessage); } } UnZippedFileNames = AddUniqueIdToFiles(UnZippedFileNames, ZipUniqueId); SetZipFileInfo(this.ZipUniqueId, UnZippedFileNames.Length); ExtensionMethods.TraceInformation("There are {0} files.", UnZippedFileNames.Length); foreach (string unZippedFileName in UnZippedFileNames) { string onlyFileName = Path.GetFileName(unZippedFileName); if (DataSourceSpecificZipFileWatcher != null) { ExtensionMethods.TraceInformation("'{0}' will be processed with specific handler '{1}'", onlyFileName, DataSourceSpecificZipFileWatcher.ToString()); DataSourceSpecificZipFileWatcher.Handle(unZippedFileName, onlyFileName, Path.GetExtension(unZippedFileName), FileStatus.Process); } else { ExtensionMethods.TraceInformation("'{0}' will be processed with default handler", onlyFileName); string fileExt = Path.GetExtension(unZippedFileName).ToLower(); //if (fileExt == ".txt") // Debugger.Break(); FileStatus fileProposedStatus = FileStatus.Process; if (IsInIgnoreList(fileExt)) { fileProposedStatus = FileStatus.Ignore; } if (IsInIgnoreListButCopy(fileExt)) { fileProposedStatus = FileStatus.IgnoreMoveToOutput; } Handle(unZippedFileName, Path.GetFileName(unZippedFileName), fileExt, fileProposedStatus); } ExtensionMethods.TraceInformation("'{0}' handled.", onlyFileName); } ExtensionMethods.TraceInformation("All files handled."); Trace.Flush(); return(ZipUniqueId); }
private void DoWithData(WorkerData data) { if ((data.Job.AbortRequested) && (data.Job.IsFinished == false)) { ExtensionMethods.TraceInformation("Abort was requested. Trying to abort job..."); Trace.Flush(); AbortJob(); } else { lock (_lock) { JobSlice processedJobSlice = null; try { if (job.JobSlices.Count > 0) { processedJobSlice = job.JobSlices[data.SlicePosition]; } else { //job was already disposed for some reason. Errors should get already taken care. } } catch (ArgumentException ex) { StringBuilder sb = new StringBuilder("Dumping ArgumentException:" + Environment.NewLine + ex.ToString()); sb.AppendLine(string.Format("Trying to access job.JobSlices[{0}]. Total job slices were {1}", data.SlicePosition, job.JobSlices.Count)); job.TraceError(sb.ToString()); } catch (Exception ex) { StringBuilder sb = new StringBuilder("Dumping Exception:" + Environment.NewLine + ex.ToString()); sb.AppendLine(string.Format("Trying to access job.JobSlices[{0}]. Total job slices were {1}", data.SlicePosition, job.JobSlices.Count)); job.TraceError(sb.ToString()); } if (processedJobSlice == null) { AbortJob(); return; } processedJobSlice.Status = JobSlice.JobSliceStatus.Processed; if (!job.AbortRequested) { ExtensionMethods.TraceInformation(Environment.NewLine); job.TraceInformation("JS - '{0}' - processed '{1}' records. {2}{3}", processedJobSlice.JobSliceId, data.Rows.Count, Environment.NewLine, data.TraceLog.ToString()); Trace.Flush(); } job.TotalRowsProcessed += processedJobSlice.InputData.Rows.Count; job.Rows.AddRange(data.Rows); job.Errors.AddRange(data.Errors); job.Warnings.AddRange(data.Warnings); job.BadDataInCsvFormat.AddRange(data.BadDataInCsvFormat); data.Dispose(); } } }
protected override void Execute(CodeActivityContext context) { MaxThreads = EyediaCoreConfigurationSection.CurrentConfig.MaxThreads; job = context.GetValue(this.Job); NumberOfWorkerRemains = job.JobSlices.Count; job.TraceInformation("Initializing workers. Job slices:{0}. Max threads:{1}", job.JobSlices.Count, MaxThreads); Trace.Flush(); Stopwatch sw = Stopwatch.StartNew(); for (int i = 0; i < job.JobSlices.Count; i++) { Dictionary <string, object> inputs = new Dictionary <string, object>(); WorkerData data = new WorkerData(job, i); inputs.Add("Data", data); WorkflowApplication workflowApp = new WorkflowApplication(new Worker(), inputs); workflowApp.Completed = WorkerCompleted; workflowApp.Aborted = WorkerAborted; workflowApp.Idle = WorkerIdle; workflowApp.PersistableIdle = WorkerPersistableIdle; workflowApp.Unloaded = WorkerUnloaded; workflowApp.OnUnhandledException = WorkerUnhandledException; try { workflowApp.Run(new TimeSpan(0, IdpeConfigurationSection.CurrentConfig.WorkerTimeOut, 0)); } catch (TimeoutException timeoutException) { ExtensionMethods.TraceError("Worker has been timed out! {0}{1}", Environment.NewLine, timeoutException.ToString()); Trace.Flush(); CompleteJobIfAllWorkersCompleted(); if (NumberOfWorkerRemains < MaxThreads) { AllThreadsAreBusy.Set(); } } job.JobSlices[i].Status = JobSlice.JobSliceStatus.Processing; job.JobSlices[i].WorkflowInstanceId = workflowApp.Id; job.TraceInformation("Initializing worker '{0}' with {1}", i + 1, job.JobSlices[i].WorkflowInstanceId); if (job.NumberOfSlicesProcessing >= MaxThreads) { ExtensionMethods.TraceInformation("All threads are busy, waiting...Threads: [Max allowed:{0}, Completed:{1}, Running:{2}", MaxThreads, NumberOfWorkerRemains, job.NumberOfSlicesProcessing); Trace.Flush(); if (!AllThreadsAreBusy.WaitOne(new TimeSpan(0, IdpeConfigurationSection.CurrentConfig.TimeOut, 0))) //config timeout in seconds { //timed out job.AbortRequested = true; job.AbortReason = Services.Job.AbortReasons.TimedOut; string errorMsg = "All threads are busy since long, the complete process has been timed out! Time out (in Minutes) configured as " + IdpeConfigurationSection.CurrentConfig.TimeOut + Environment.NewLine; errorMsg += "Job Id:" + job.JobIdentifier + Environment.NewLine; errorMsg += "File Name:" + job.FileName + Environment.NewLine; errorMsg += "TotalRowsToBeProcessed:" + job.TotalRowsToBeProcessed + Environment.NewLine; errorMsg += "TotalRowsProcessed:" + job.TotalRowsProcessed + Environment.NewLine; job.TraceError(errorMsg); Trace.Flush(); AbortWorkers(); AllThreadsAreBusy.Set(); } } Trace.Flush(); } job.TraceInformation("Waiting to get finished..."); Trace.Flush(); TheJobCompleted.WaitOne(); DoWithJob(job.IsErrored); if (!job.AbortRequested) { ExtensionMethods.TraceInformation("Job '{0}' is finished at '{1}'. Workers elapsed time:{2}", job.JobIdentifier, job.FinishedAt, sw.Elapsed.ToString()); } else { ExtensionMethods.TraceInformation("Job '{0}' is aborted at '{1}'. Workers elapsed time:{2}", job.JobIdentifier, job.FinishedAt, sw.Elapsed.ToString()); } }
/// <summary> /// Instantiate new job with a data source(id OR name to be passed) /// </summary> /// <param name="dataSourceId">The data source id (id OR name is required)</param> /// <param name="dataSourceName">The data source name (id OR name is required)</param> /// <param name="processingBy">Processing by user name</param> /// <param name="fileName">The file name</param> public Job(int dataSourceId, string dataSourceName, string processingBy, string fileName = null) { this.ProcessingBy = processingBy; this.FileName = fileName; this.Rows = new List <Row>(); //these list are in this case thread OK, because we add from individual splitted job return items this.Errors = new List <string>(); this.Warnings = new List <string>(); this.BadDataInCsvFormat = new List <string>(); this.StartedAt = DateTime.Now; this.JobIdentifier = ShortGuid.NewGuid(); this.ProcessVariables = new ConcurrentDictionary <string, object>(); this.CsvRows = new List <string>(); this.PerformanceCounter = new PerformanceCounter(); PerformanceCounter.StartNew(JobIdentifier); if ((dataSourceId == 0) && (string.IsNullOrEmpty(dataSourceName))) { return;//Invalid dummy job; } this.DataSource = new DataSource(dataSourceId, dataSourceName); this.DataSource.ClearAdditionalAttachments(); if (!this.DataSource.IsValid) { string errorMessage = string.Format("Could not create job as data source was not valid. Data source id was {0} and name was {1}" , dataSourceId, string.IsNullOrEmpty(dataSourceName) ? "<Unknown>" : dataSourceName); this.DataSource.TraceError(errorMessage); return; //we need not to do anything here as job is automatically invalid } #region OutputWriter object objOutputWriter = null; if (DataSource.OutputType == OutputTypes.Xml) { //set default objOutputWriter = new OutputWriterGeneric(this); } else if (DataSource.OutputType == OutputTypes.Delimited) { objOutputWriter = new OutputWriterDelimited(this); } else if (DataSource.OutputType == OutputTypes.FixedLength) { objOutputWriter = new OutputWriterFixedLength(this); } else if (DataSource.OutputType == OutputTypes.CSharpCode) { objOutputWriter = new OutputWriterCSharpCode(this); } else if (DataSource.OutputType == OutputTypes.Database) { objOutputWriter = new OutputWriterDatabase(this); } else if (DataSource.OutputType == OutputTypes.Custom) { if ((!string.IsNullOrEmpty(this.DataSource.OutputWriterTypeFullName)) && (Type.GetType(this.DataSource.OutputWriterTypeFullName) != null)) { objOutputWriter = Activator.CreateInstance(Type.GetType(this.DataSource.OutputWriterTypeFullName), this); } else { objOutputWriter = new OutputWriterGeneric(this); } } else { objOutputWriter = new OutputWriterGeneric(this); } this.DataSource.OutputWriter = (OutputWriter)objOutputWriter; #endregion OutputWriter #region PlugIns if ((!string.IsNullOrEmpty(this.DataSource.PlugInsType)) && (Type.GetType(this.DataSource.PlugInsType) != null)) { object objPlugIns = Activator.CreateInstance(Type.GetType(this.DataSource.PlugInsType), this); this.DataSource.PlugIns = (PlugIns)objPlugIns; } else { //set default object objPlugIns = Activator.CreateInstance(Type.GetType("Eyedia.IDPE.Services.PlugInsGeneric"), this); this.DataSource.PlugIns = (PlugIns)objPlugIns; } #endregion PlugIns IdpeKey key = DataSource.Key(IdpeKeyTypes.IsFirstRowHeader); if (key != null) { DataSource.IsFirstRowHeader = key.Value.ParseBool(); } if ((DataSource.DataFormatType == DataFormatTypes.Delimited) || (DataSource.DataFormatType == DataFormatTypes.FixedLength)) { ExtractHeaderFooter(); } this.SqlClientManager = new SqlClientManager(this.DefaultConnectionString, this.DefaultConnectionType); this.Parameters = new Parameters(this.DataSource.Id, this.DataSource.Name, this.DataSource.Key(IdpeKeyTypes.GenerateParametersFromDatabase), this.SqlClientManager); ExtensionMethods.TraceInformation(Environment.NewLine); this.TraceInformation("New job '{0}' created.", this.JobIdentifier); }
private void ExtractHeaderFooter() { IdpeKey headerAttribute = DataSource.Key(IdpeKeyTypes.HeaderLine1Attribute); IdpeKey footerAttribute = DataSource.Key(IdpeKeyTypes.FooterLine1Attribute); if ((headerAttribute == null) && (footerAttribute == null)) { return; } headerAttribute = null; footerAttribute = null; try { string[] allLines = FileContent.ToString().Split(new string[] { "\n", "\r\n" }, StringSplitOptions.RemoveEmptyEntries); string line = string.Empty; int howManyFound = 0; for (int i = 1; i <= 6; i++) { IdpeKeyTypes keyType = (IdpeKeyTypes)Enum.Parse(typeof(IdpeKeyTypes), "HeaderLine" + i + "Attribute"); headerAttribute = DataSource.Key(keyType); if (headerAttribute != null) { line = allLines[i - 1]; ProcessVariables.AddOrUpdate(headerAttribute.Value, line, (key, oldValue) => line); } else { break; } howManyFound++; } allLines = allLines.SubArray(howManyFound, allLines.Length - howManyFound); howManyFound = 0; for (int i = 6; i >= 1; i--) { IdpeKeyTypes keyType = (IdpeKeyTypes)Enum.Parse(typeof(IdpeKeyTypes), "FooterLine" + i + "Attribute"); footerAttribute = DataSource.Key(keyType); if (footerAttribute == null) { continue; } else { line = allLines[allLines.Length - (howManyFound + 1)]; ProcessVariables.AddOrUpdate(footerAttribute.Value, line, (key, oldValue) => line); } howManyFound++; } allLines = allLines.SubArray(0, allLines.Length - howManyFound); FileNameWithoutHeaderAndOrFooter = FileName + ShortGuid.NewGuid().ToString(); using (StreamWriter sw = new StreamWriter(FileNameWithoutHeaderAndOrFooter)) { for (int i = 0; i < allLines.Length; i++) { sw.WriteLine(allLines[i]); } } return; } catch (Exception ex) { ExtensionMethods.TraceInformation(ex.ToString()); this.Errors.Add("File content is invalid! Please check that file has required header(s) and footer(s) and at least 1 valid record!"); } }
protected override void Execute(CodeActivityContext context) { ExtensionMethods.TraceInformation(context.GetValue(this.Message)); Trace.Flush(); }
public override IdpeMessage Parse(bool onlyConstraints) { try { string code = ParseFormulaGetCode(Formula); _ReferenceKey = string.Empty; string sqlErrorMessage = string.Empty; if (Registry.Instance.CodeSets != null) { CodeSet thisCodeSet = (from cs in Registry.Instance.CodeSets where ((cs.Code.Equals(code, StringComparison.OrdinalIgnoreCase)) && (cs.Value.Equals(Value, StringComparison.OrdinalIgnoreCase))) select cs).SingleOrDefault(); if (thisCodeSet != null) { _Value = thisCodeSet.Value; _ValueEnumCode = thisCodeSet.EnumCode; _ReferenceKey = thisCodeSet.ReferenceKey; } else { //check one more time with EnumCode, as IDPE supports Value OR EnumCode int enumCode = 0; if (int.TryParse(Value, out enumCode)) { thisCodeSet = (from cs in Registry.Instance.CodeSets where ((cs.Code.Equals(code, StringComparison.OrdinalIgnoreCase)) && (cs.EnumCode == enumCode)) select cs).SingleOrDefault(); if (thisCodeSet != null) { _Value = thisCodeSet.Value; _ValueEnumCode = thisCodeSet.EnumCode; _ReferenceKey = thisCodeSet.ReferenceKey; } else { _ValueEnumCode = -1; } } else { _ValueEnumCode = -1; } } } if ((_ValueEnumCode == -1) || (sqlErrorMessage != string.Empty)) { this._ParseResult = new IdpeMessage(IdpeMessageCodes.IDPE_CODESET_TYPE_DATA_VALIDATION_FAILED); this._ParseResult.Message = string.Format(_ParseResult.Message, PrintRowColPosition(), Value); } else { _ValueEnumValue = _Value; return(new IdpeMessage(IdpeMessageCodes.IDPE_SUCCESS)); //when got value, return success } } catch (Exception ex) { this._ParseResult = new IdpeMessage(IdpeMessageCodes.IDPE_CODESET_TYPE_DATA_VALIDATION_FAILED); this._ParseResult.Message = string.Format(_ParseResult.Message, PrintRowColPosition(), Value); ExtensionMethods.TraceError(ex.ToString()); } _IsParsed = true; return(this._ParseResult); }
public void Start() { SqlWatchers = new Dictionary <int, SqlWatcher>(); try { if (!Directory.Exists(IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.DirectoryPull)) { Directory.CreateDirectory(IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.DirectoryPull); } WindowsUtility.SetFolderPermission(IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.DirectoryPull); } catch (Exception ex) { Trace.TraceInformation(ex.Message); Trace.Flush(); } DateTime stTm = DateTime.Now; if (EyediaCoreConfigurationSection.CurrentConfig.TempDirectory.Length > Constants.MaxTempFolderPath) { throw new ConfigurationErrorsException(string.Format("'{0}' can not have more than '{1}' length", EyediaCoreConfigurationSection.CurrentConfig.TempDirectory, Constants.MaxTempFolderPath)); } _Manager = new Manager(); List <int> allDataSourceIds = _Manager.GetAllDataSourceIds(false); foreach (int dataSourceId in allDataSourceIds) { string dir = Path.Combine(IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.DirectoryPull, dataSourceId.ToString()); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } } string sysDir = Path.Combine(IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.DirectoryPull, "100"); if (!Directory.Exists(sysDir)) { Directory.CreateDirectory(sysDir); } StartLocalFileSystemWatcher(); List <IdpeDataSource> dataSources = _Manager.GetDataSources(); foreach (IdpeDataSource dataSource in dataSources) { List <IdpeKey> keys = Cache.Instance.Bag[dataSource.Id + ".keys"] as List <IdpeKey>; if (keys == null) { keys = DataSource.LoadKeys(dataSource.Id); } if ((dataSource.DataFeederType != null) && ((DataFeederTypes)dataSource.DataFeederType == DataFeederTypes.PullFtp)) { #region Initializing FtpPullers string ftpRemoteLocation = FindSREKeyUsingType(keys, IdpeKeyTypes.FtpRemoteLocation); string ftpUserName = FindSREKeyUsingType(keys, IdpeKeyTypes.FtpUserName); string ftpPassword = FindSREKeyUsingType(keys, IdpeKeyTypes.FtpPassword); string strinterval = FindSREKeyUsingType(keys, IdpeKeyTypes.FtpWatchInterval); int ftpWatchIntervalInMinutes = 0; int.TryParse(strinterval, out ftpWatchIntervalInMinutes); if (ftpWatchIntervalInMinutes == 0) { ftpWatchIntervalInMinutes = 1; } string ftpLocalLocation = Path.Combine(IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.DirectoryPull, dataSource.Id.ToString()); string appOutputFolder = Path.Combine(IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.DirectoryOutput, dataSource.Id.ToString(), DateTime.Now.ToString("yyyyMMdd")); Dictionary <string, object> datasourceParameters = new Dictionary <string, object>(); lock (_lock) { datasourceParameters.Add("DataSourceId", dataSource.Id); datasourceParameters.Add("OutputFolder", appOutputFolder); datasourceParameters.Add("ProcessingBy", dataSource.ProcessingBy); } FtpFileSystemWatcher ftpWatcher = new FtpFileSystemWatcher(datasourceParameters, ftpRemoteLocation, ftpLocalLocation, ftpWatchIntervalInMinutes, ftpUserName, ftpPassword, false, true); ExtensionMethods.TraceInformation("Pullers - Initialized 'PullFtp'; '{0}' [ftpRemoteLocation = {1}, ftpLocalLocation = {2}, ftpUserName = {3},ftpPassword = {4}, interval = {5}]", dataSource.Name, ftpRemoteLocation, ftpLocalLocation, ftpUserName, ftpPassword, ftpWatchIntervalInMinutes); Registry.Instance.FtpWatchers.Add(ftpWatcher); #endregion Initializing FtpPullers } else if ((dataSource.DataFeederType != null) && ((DataFeederTypes)dataSource.DataFeederType == DataFeederTypes.PullLocalFileSystem)) { if (IsLocalFileSystemFoldersOverriden(keys)) { SreFileSystemWatcher sreFileSystemWatcher = new SreFileSystemWatcher(dataSource.Id, keys); Watchers.FileSystemWatcherEventHandler handler = new Watchers.FileSystemWatcherEventHandler(FileDownloaded); sreFileSystemWatcher.FileDownloaded -= handler; sreFileSystemWatcher.FileDownloaded += handler; sreFileSystemWatcher.StartWatching(); Registry.Instance.DataSourceFileWatcher.Add(dataSource.Id, sreFileSystemWatcher); } //we dont have to do anything if it is generic } else if ((dataSource.DataFeederType != null) && ((DataFeederTypes)dataSource.DataFeederType == DataFeederTypes.PullSql)) { #region Initializing SQL Pullers SqlWatcher sqlWatcher = new SqlWatcher(dataSource); sqlWatcher.StartDownloading(); SqlWatchers.Add(dataSource.Id, sqlWatcher); #endregion Initializing SQL Pullers } Trace.Flush(); } ExtensionMethods.TraceInformation("Pullers - Initialization finished.{0}", DateTime.Now - stTm); }
public IDictionary <string, object> Execute(Job job, WorkerData data) { IDictionary <string, object> outArgs = null; Dictionary <string, object> inArgs = new Dictionary <string, object>(); string preFix = string.Empty; try { if ((this.RuleSetType == RuleSetTypes.SqlPullInit) || (this.RuleSetType == RuleSetTypes.PreValidate) || (this.RuleSetType == RuleSetTypes.PostValidate)) { job.ThrowErrorIfNull(this.Activity.DisplayName); inArgs.Add("Job", job); preFix = ExtensionMethods.GetTracePrefix(job); } else { data.ThrowErrorIfNull(this.Activity.DisplayName); inArgs.Add("Data", data); preFix = ExtensionMethods.GetTracePrefix(data); } if (inArgs.Keys.Count == 0) { throw new ArgumentNullException(string.Format("{0}All Pre/PostValidate rules should have Job argument and all Row Preparing/Prepared/Validate should have Data as argument!", preFix)); } if (this.Activity != null) { if (string.IsNullOrEmpty(this.Activity.Name)) { string datasourceName = string.Empty; if (job != null) { datasourceName = job.DataSource.Name; } else if (data != null) { datasourceName = data.Job.DataSource.Name; } throw new Exception(string.Format("{0}Corrupt rule found while executing a '{1} type rule, associated with '{2}' datasource.", preFix, this.RuleSetType.ToString(), datasourceName)); } string traceInfo = string.Format("{0}BRE - '{1}' - '{2}'", preFix, this.RuleSetType.ToString(), this.Activity.Name); if (data != null) { data.CurrentRow.TraceLog.WriteLine(traceInfo); } else { ExtensionMethods.TraceInformation(traceInfo); //pre/postvalidate } WorkflowInvoker invoker = new WorkflowInvoker(this.Activity); outArgs = invoker.Invoke(inArgs); } } catch (BusinessException ex) { ExtensionMethods.TraceInformation(ex.Message); //It is actually not an exception or error new PostMan(job, false).Send(PostMan.__warningStartTag + ex.Message + PostMan.__warningEndTag); } catch (Exception ex) { //There was some issue with the job and it was aborted/errored out, in that case we dont have log multiple errors if (!job.IsErrored) { if (!job.AbortRequested) { job.IsErrored = true; job.TraceError("{0}.Error. Job = '{1}', Rule Name = '{2}' Rule Type = '{3}'. {4}{5}", preFix, job.JobIdentifier, this.Activity.Name, this.RuleSetType, Environment.NewLine, ex.ToString()); Trace.Flush(); throw ex; } } } return(outArgs); }
internal void InvokeFileProcessed(int datasourceId, string jobId, List <IdpeKey> appKeys, string fileName, string outputFolder, string zipUniqueId) { Trace.TraceInformation("Invoke"); Trace.Flush(); Trace.TraceInformation("datasourceId:{0}, JobId:{1},appkeys={2},filename={3},outputfolder={4},zipuniqueid={5}", datasourceId, jobId, appKeys.Count, fileName, outputFolder, zipUniqueId); Trace.Flush(); Job currentJob = null; if ((!(string.IsNullOrEmpty(jobId))) && (Registry.Instance.Entries.ContainsKey(jobId))) { currentJob = Registry.Instance.Entries[jobId] as Job; } #region Handling ZipFile ZipFileInformation zipInfo = null; if (!string.IsNullOrEmpty(zipUniqueId)) { zipInfo = Registry.Instance.ZipFiles[zipUniqueId]; if (zipInfo.TotalFiles == zipInfo.TotalProcessedFiles) { return; } else { zipInfo.TotalProcessedFiles = zipInfo.TotalProcessedFiles + 1; } } #endregion Handling ZipFile #region Handling Pusher if ((currentJob != null) && (!string.IsNullOrEmpty(currentJob.DataSource.PusherTypeFullName))) { ExtensionMethods.TraceInformation("Pullers - Initializing '{0}' Pusher '{1}'.", currentJob.DataSource.Name, currentJob.DataSource.PusherTypeFullName); object objPusher = null; if (currentJob.DataSource.PusherType == PusherTypes.Ftp) { objPusher = new PusherFtp(); } else if (currentJob.DataSource.PusherType == PusherTypes.DosCommands) { objPusher = new PusherDosCommands(); } else if (currentJob.DataSource.PusherType == PusherTypes.SqlQuery) { objPusher = new PusherSqlQuery(); } else if (currentJob.DataSource.PusherType == PusherTypes.Custom) { objPusher = Activator.CreateInstance(Type.GetType(currentJob.DataSource.PusherTypeFullName)); } if (objPusher != null) { if ((currentJob.Errors.Count == 0) && (currentJob.DataSource.OutputWriter.IsErrored == false)) { ((Pushers)objPusher).FileProcessed(new PullersEventArgs(datasourceId, jobId, appKeys, fileName, outputFolder, zipUniqueId, zipInfo)); ExtensionMethods.TraceInformation("Pullers - Pusher called!"); } else if ((currentJob.Errors.Count > 0) && (currentJob.DataSource.DataFeederType == DataFeederTypes.PullSql)) { new SqlWatcherHelper(currentJob).ExecuteRecoveryScript(); } else { if (currentJob.DataSource.AllowPartial()) { ((Pushers)objPusher).FileProcessed(new PullersEventArgs(datasourceId, jobId, appKeys, fileName, outputFolder, zipUniqueId, zipInfo)); ExtensionMethods.TraceInformation("Pullers - Pusher called!"); } else { string message = "Pullers - There were error(s) while processing, the pusher was not called. Please study the error(s) and do the needful."; ExtensionMethods.TraceError(message); } } } } #endregion Handling Pusher #region Logging History if (currentJob != null) { string subFileName = null; if (!string.IsNullOrEmpty(zipUniqueId)) { subFileName = Path.GetFileName(fileName); //removing output extension if (subFileName.Contains(".")) { subFileName = subFileName.Substring(0, subFileName.LastIndexOf(".")); } } new Manager().SaveLog(currentJob.FileName, subFileName, datasourceId, currentJob.TotalRowsToBeProcessed, currentJob.TotalValid, currentJob.StartedAt, DateTime.Now, SreEnvironmentDetails()); } #endregion Logging History #region Sending Email if (currentJob != null) { //send email in positive scenario. If would have failed, an error email would have automatically sent. ExtensionMethods.TraceInformation("Pullers - A job processed, total rows to be processed = {0}, total valid rows = {1}", currentJob.TotalRowsToBeProcessed, currentJob.TotalValid); Trace.Flush(); string strEmailAfterFileProcessed = currentJob.DataSource.Keys.GetKeyValue(IdpeKeyTypes.EmailAfterFileProcessed); string strEmailAfterFileProcessedAttachInputFile = currentJob.DataSource.Keys.GetKeyValue(IdpeKeyTypes.EmailAfterFileProcessedAttachInputFile); string strEmailAfterFileProcessedAttachOutputFile = currentJob.DataSource.Keys.GetKeyValue(IdpeKeyTypes.EmailAfterFileProcessedAttachOutputFile); if (strEmailAfterFileProcessed.ParseBool()) { string strEmailAfterFileProcessedAttachOtherFiles = currentJob.DataSource.Keys.GetKeyValue(IdpeKeyTypes.EmailAfterFileProcessedAttachOtherFiles); string message = string.Format("Pullers - A file from '{0}' was just processed with {1} record(s)!", currentJob.DataSource.Name, currentJob.TotalRowsProcessed); if (string.IsNullOrEmpty(strEmailAfterFileProcessedAttachOtherFiles)) { List <string> outFile = new List <string>(); if (strEmailAfterFileProcessedAttachOutputFile.ParseBool()) { outFile.Add(fileName); new PostMan(currentJob, false).Send(message, "File Processed", !strEmailAfterFileProcessedAttachInputFile.ParseBool(), outFile); } else { new PostMan(currentJob, false).Send(message, "File Processed", !strEmailAfterFileProcessedAttachInputFile.ParseBool(), outFile); } } else { List <string> otherFiles = new List <string>(strEmailAfterFileProcessedAttachOtherFiles.Split(",".ToCharArray())); new PostMan(currentJob, false).Send(message, "File Processed", !strEmailAfterFileProcessedAttachInputFile.ParseBool(), otherFiles); } } } #endregion Sending Email #region Handling Global Events if ((currentJob != null) && (currentJob.ErroredByPusher == false)) { PullersEventArgs e = new PullersEventArgs(datasourceId, jobId, appKeys, fileName, outputFolder, zipUniqueId, zipInfo); Registry.Instance.GlobalEventsOnCompletes.Complete(datasourceId, e); } #endregion Handling Global Events if (currentJob != null) { currentJob.PerformanceCounter.PrintTrace(jobId); } }
internal void FileDownloaded(FileSystemWatcherEventArgs e) { JobProcessorFileHandler jobProcessorFile = new JobProcessorFileHandler(e); jobProcessorFile.PrepareInput(); if (!jobProcessorFile.IsRequestFromWCF) { ExtensionMethods.TraceInformation("Pullers - Got a new file for {0} - '{1}'", jobProcessorFile.DataSourceId, e.FileName); } else { ExtensionMethods.TraceInformation("Pullers - Got a new WCF request for {0}, JobId = '{1}'", jobProcessorFile.DataSourceId, jobProcessorFile.JobId); } if (DataSourceIsDisabled(jobProcessorFile.DataSourceId, e)) { return; } StringBuilder result = new StringBuilder(); JobProcessor jobProcessor = new JobProcessor(); if (jobProcessorFile.IsRequestFromWCF) { jobProcessor.IsWCFRequest = true; result = jobProcessor.ProcessJob(jobProcessorFile.DataSourceId, string.Empty, jobProcessorFile.ProcessingBy, jobProcessorFile.InputFileNameOnly, jobProcessorFile.JobId, jobProcessorFile.WithError, jobProcessorFile.WithWarning); } else if ((jobProcessorFile.InputFileExtension.ToLower() == ".zip") || (jobProcessorFile.InputFileExtension.ToLower() == ".rar") || (jobProcessorFile.InputFileExtension.ToLower() == ".tar")) { jobProcessorFile.PrepareInputZip(); result.AppendLine(jobProcessorFile.ZipUniuqeId); } else if ((jobProcessorFile.InputFileExtension.ToLower() == ".xls") || (jobProcessorFile.InputFileExtension.ToLower() == ".xlsx")) { result = jobProcessor.ProcessSpreadSheet(jobProcessorFile.DataSourceId, string.Empty, jobProcessorFile.ProcessingBy, e.FileName); } else if (jobProcessorFile.InputFileExtension.ToLower() == ".edi") { new EDIX12FileWatcher(jobProcessorFile.DataSourceId, e.FileName).Process(); ExtensionMethods.TraceInformation("Pullers - {0} successfully processed. Output file was {1}", e.FileName, jobProcessorFile.OutputFileName); if (File.Exists(jobProcessorFile.OutputFileName)) { InvokeFileProcessed(jobProcessorFile.DataSourceId, jobProcessor.JobId, jobProcessorFile.Keys, jobProcessorFile.OutputFileName, jobProcessorFile.ActualOutputFolder, jobProcessorFile.ZipUniuqeId); } jobProcessor.Dispose(); return; } else { result = jobProcessor.ProcessJob(jobProcessorFile.DataSourceId, string.Empty, jobProcessorFile.ProcessingBy, e.FileName, string.Empty, jobProcessorFile.WithError, jobProcessorFile.WithWarning); } #region Commented2 /* * if (File.Exists(jobProcessorFile.OutputFileName)) * { * string buName = Path.Combine(jobProcessorFile.OutputFolder, string.Format("{0}_{1}", e.RenamedToIdentifier, Path.GetFileName(jobProcessorFile.OutputFileName))); * FileCopy(jobProcessorFile.OutputFileName, buName, true); //backup existing * } * * if (((jobProcessorFile.InputFileExtension.ToLower() == ".zip") || (jobProcessorFile.InputFileExtension.ToLower() == ".rar") || (jobProcessorFile.InputFileExtension.ToLower() == ".tar")) || && (jobProcessorFile.Keys.GetKeyValue(SreKeyTypes.ZipDoNotCreateAcknoledgementInOutputFolder).ParseBool())) ||{ || ExtensionMethods.TraceInformation("Pullers - The data source '{0}' has been configured as not to copy zip acknoledgement file. File will not be created!", || jobProcessorFile.DataSourceId); || return; ||} || ||if (result.Length > 0) ||{ || using (StreamWriter tw = new StreamWriter(jobProcessorFile.OutputFileName)) || { || tw.Write(result); || tw.Close(); || } || || ExtensionMethods.TraceInformation("{0} successfully processed. Output file was {1}", e.FileName, outputFileName); || InvokeFileProcessed(dataSourceId, jobProcessor.JobId, keys, outputFileName, actualOutputFolder, zipUniuqeId); ||} ||else ||{ || ExtensionMethods.TraceInformation("Pullers - Failed to process '{0}', empty data came from output writer! Check log for more details.", e.FileName); ||} */ #endregion Commented2 if (jobProcessorFile.PrepareOutput(result)) { InvokeFileProcessed(jobProcessorFile.DataSourceId, jobProcessor.JobId, jobProcessorFile.Keys, jobProcessorFile.OutputFileName, jobProcessorFile.ActualOutputFolder, jobProcessorFile.ZipUniuqeId); } jobProcessor.Dispose(); }
/// <summary> /// Processes a file by (1) moving file to archive folder, and then (2) invoking FileDownloaded event /// </summary> /// <param name="fileFullName"></param> /// <param name="fileName"></param> /// <param name="dataSourceId"></param> /// <param name="handleArchive"></param> public void Process(string fileFullName, string fileName, int dataSourceId, bool handleArchive = true) { if (fileName.Contains("\\")) { fileName = fileName.Substring(fileName.LastIndexOf("\\") + 1); } if (dataSourceId == 0) { int tempInt = 0; int.TryParse(Directory.GetParent(fileFullName).Name, out tempInt); if (tempInt == 0) { throw new Exception(string.Format("File '{0}' dropped on wrong location!", fileFullName)); } dataSourceId = tempInt; } lock (_lock) { string moveTo = fileFullName; string renamedToIdentifier = Guid.NewGuid().ToString(); if (handleArchive) { string archiveLoc = ArchiveLocation + "\\" + dataSourceId + "\\" + DateTime.Now.ToString("yyyyMMdd"); //move file moveTo = Path.Combine(archiveLoc, fileName); Directory.CreateDirectory(Path.GetDirectoryName(moveTo)); if (File.Exists(moveTo)) { string moveToBUName = Path.Combine(archiveLoc, string.Format("{0}_{1}", renamedToIdentifier, fileName)); FileCopy(moveTo, moveToBUName, true); //backup existing } FileCopy(fileFullName, moveTo, true); //move file } if (this.DataSourceParameters == null) { this.DataSourceParameters = new Dictionary <string, object>(); } DataSourceParameters.Clear(); DataSourceParameters.Add("DataSourceId", dataSourceId); FileSystemWatcherEventArgs e = new FileSystemWatcherEventArgs(DataSourceParameters, moveTo, renamedToIdentifier); try { InvokeFileDownloaded(e); } catch (BusinessException ex) { ExtensionMethods.TraceError(ex.ToString()); Trace.Flush(); } catch (Exception ex) { ExtensionMethods.TraceError("An unknown error occurred!. {0}. {1} This error needs immediate attention", ex.ToString(), Environment.NewLine + Environment.NewLine); Trace.Flush(); } } }
public void PrintTrace(string jobId) { if (EyediaCoreConfigurationSection.CurrentConfig.Tracking.PerformanceCounter) { ExtensionMethods.TraceInformation("Printing perfromance counters."); } else { return; } if ((!(WorkSheet.ContainsKey(jobId))) || (IsEverythingOK == false)) { ExtensionMethods.TraceInformation("Can not print! Performance counter could not calculate due to some issues. Reset it again for next process."); return; } StringBuilder sb = new StringBuilder(); PerformanceTask thePerformanceTask = WorkSheet[jobId] as PerformanceTask; if (thePerformanceTask.EndedAt == DateTime.MinValue) { thePerformanceTask.Stop(); } sb.AppendLine(string.Format("Started at:{0}", thePerformanceTask.StartedAt)); #region PreValidate FormatJobPerformanceTasks(GetPerformanceTaskSubPerformanceTask(thePerformanceTask, JobPerformanceTaskNames.JobInit), sb); FormatJobPerformanceTasks(GetPerformanceTaskSubPerformanceTask(thePerformanceTask, JobPerformanceTaskNames.FeedData), sb); FormatJobPerformanceTasks(GetPerformanceTaskSubPerformanceTask(thePerformanceTask, JobPerformanceTaskNames.SliceData), sb); FormatJobPerformanceTasks(GetPerformanceTaskSubPerformanceTask(thePerformanceTask, JobPerformanceTaskNames.WorkerInit), sb); FormatJobPerformanceTasks(GetPerformanceTaskSubPerformanceTask(thePerformanceTask, JobPerformanceTaskNames.PreValidate), sb); #endregion PreValidate #region Rows List <KeyValuePair <int, List <PerformanceTaskNameFormatted> > > aRowList = thePerformanceTask.GetRowLevelPerformanceTaskList(); sb.AppendLine(GetColumnNames()); int counter = 0; foreach (KeyValuePair <int, List <PerformanceTaskNameFormatted> > item in aRowList) { List <PerformanceTaskNameFormatted> lst = item.Value.OrderBy(i => i.Position).ToList(); string row = string.Empty; string[] PerformanceTaskNames = Enum.GetNames(typeof(RowPerformanceTaskNames)); TimeSpan tsTotalForThisRow = TimeSpan.Zero; foreach (string PerformanceTaskName in PerformanceTaskNames) { RowPerformanceTaskNames thisPerformanceTaskName = (RowPerformanceTaskNames)Enum.Parse(typeof(RowPerformanceTaskNames), PerformanceTaskName); PerformanceTaskNameFormatted tnf = lst.Where(l => l.PerformanceTaskName == thisPerformanceTaskName).SingleOrDefault(); row += string.Format("{0},", tnf == null ? "NULL" : tnf.Duration.ToString()); if (tnf != null) { tsTotalForThisRow += tnf.Duration; } } //if (row.Length > 1) // row = row.Substring(0, row.Length - 1); row += tsTotalForThisRow.ToString(); sb.AppendLine(row); counter++; } #endregion Rows #region PostValidate FormatJobPerformanceTasks(GetPerformanceTaskSubPerformanceTask(thePerformanceTask, JobPerformanceTaskNames.PostValidate), sb); FormatJobPerformanceTasks(GetPerformanceTaskSubPerformanceTask(thePerformanceTask, JobPerformanceTaskNames.WorkerFinalize), sb); FormatJobPerformanceTasks(GetPerformanceTaskSubPerformanceTask(thePerformanceTask, JobPerformanceTaskNames.ExecuteWorkerManager), sb); FormatJobPerformanceTasks(GetPerformanceTaskSubPerformanceTask(thePerformanceTask, JobPerformanceTaskNames.ResultToDataTable), sb); FormatJobPerformanceTasks(GetPerformanceTaskSubPerformanceTask(thePerformanceTask, JobPerformanceTaskNames.OutputWriter), sb); #endregion PostValidate TimeSpan ts = thePerformanceTask.EndedAt - thePerformanceTask.StartedAt; string formatedTs = string.Format("{0} Days,{1} Hours,{2} Minutes,{3} Seconds, {4} Miliseconds", ts.Days, ts.Hours, ts.Minutes, ts.Seconds, ts.Milliseconds); sb.AppendLine(string.Format("Finished at:{0}. Elapsed time:{1} ({2}).", thePerformanceTask.EndedAt, formatedTs, ts.TotalMilliseconds)); Trace.WriteLine(Environment.NewLine); Trace.Write(sb.ToString()); Trace.WriteLine(Environment.NewLine); ExtensionMethods.TraceInformation("Printing perfromance counters. Done!"); Trace.Flush(); }
private void OnCreated(object source, FileSystemEventArgs e) { if (e.Name == "New Folder") { return; } Registry.Instance.LocalFileWatcher.EnableRaisingEvents = false; bool fileCopied = false; lock (_lock) { if (IsItLastFewRecentFile(e.FullPath)) { return; } DateTime fileReceived = DateTime.Now; CurrentFile = e.FullPath; while (true) { if (FileDownloadCompleted(CurrentFile)) { fileCopied = true; AddToLastFewRecentFiles(e.FullPath); break; } else { // Calculate the elapsed time and stop if the maximum retry // period has been reached. TimeSpan timeElapsed = DateTime.Now - fileReceived; if (timeElapsed.TotalMinutes > IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.RetryTimeOut) { Registry.Instance.LocalFileWatcher.EnableRaisingEvents = true; ExtensionMethods.TraceError("The file \"{0}\" could not be processed. Time elapsed = '{1}', LocalFileWatcherMaximumRetryPeriod = '{2}'", CurrentFile, timeElapsed.TotalMinutes, IdpeConfigurationSection.CurrentConfig.LocalFileWatcher.RetryTimeOut); break; } Thread.Sleep(300); if (string.IsNullOrEmpty(CurrentFile)) { break; } } Trace.Flush(); } } if (fileCopied) { Registry.Instance.LocalFileWatcher.EnableRaisingEvents = true; this.Process(CurrentFile); lock (_lock) { CurrentFile = string.Empty; } } HandleExistingFiles(); //this is important (when more than 1 file is dropped) Trace.Flush(); }
public static DataTable ReadExcelFile(string fileName, bool isFirstRowHeader, int spreadSheetNumber, ref int columnCount) { try { ExcelDataReaderInstantiator excelDataReaderInstantiator = null; try { excelDataReaderInstantiator = new ExcelDataReaderInstantiator(fileName); } catch (IOException ioe) { //we can eat this exception, as during multi instance scenario the file might have already been processed by other instance } DataSet result = null; if ((excelDataReaderInstantiator != null) && (excelDataReaderInstantiator.ExcelReader != null)) { excelDataReaderInstantiator.ExcelReader.IsFirstRowAsColumnNames = isFirstRowHeader; result = excelDataReaderInstantiator.ExcelReader.AsDataSet(true); excelDataReaderInstantiator.ExcelReader.Close(); } if (result == null) { new DataTable(); } else if (result.Tables.Count > 0) { if (spreadSheetNumber < result.Tables.Count) { columnCount = result.Tables[spreadSheetNumber].Columns.Count; return(result.Tables[spreadSheetNumber]); } else { new DataTable(); } } else { //nothing could be retrieved using Excel Lib. //Lets try using standard mechanism if (IdpeConfigurationSection.CurrentConfig.MicrosoftExcelOLEDataReader.Enabled) { ExtensionMethods.TraceInformation("Could not read using default excel library, trying to read using OLE"); DataTable table = ReadUsingOLE(fileName, isFirstRowHeader); if (table.Rows.Count == 0) { ExtensionMethods.TraceError("Could not read any record from '{0}'.", Path.GetFileName(fileName)); } columnCount = table.Columns.Count; return(table); } } return(new DataTable()); } catch (FileNotFoundException fnfe) { //this try/catch is placed when multi instances of IDPE runs on same machine return(new DataTable()); } }
public static DataTable ReadFile(string fileName, string delimiter = ",", bool firstRowIsHeader = true) { if (!File.Exists(fileName)) { return(new DataTable()); } TextFieldParser parser = new TextFieldParser(fileName); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(delimiter); bool columnInformationTaken = false; DataTable table = new DataTable(); bool failed = false; while (!parser.EndOfData) { string[] fields = parser.ReadFields(); if (!columnInformationTaken) { if (firstRowIsHeader) { foreach (string field in fields) { table.Columns.Add(field); } } else { for (int c = 1; c <= fields.Length; c++) { table.Columns.Add("Column" + c.ToString()); } table.Rows.Add(fields); } columnInformationTaken = true; } else { if (table.Columns.Count != fields.Length) { ExtensionMethods.TraceError("Data source attribute count and data fed is not equal, could not parse data!"); failed = true; break; } table.Rows.Add(fields); } } parser.Close(); if (failed) { parser.Dispose(); parser = null; table.Dispose(); table = null; return(new DataTable()); } else { return(table); } }