Beispiel #1
0
        /// <summary>
        /// Feeds data into the job from a list objects
        /// </summary>
        /// <param name="inputData">List of objects</param>
        ///<param name="overridenMapping">Comma separated additional mapping override information. For example if "EmpId" from object to be mapped with "EmployeeId" of attribute, then "EmpId=EmployeeId,Ename=EmployeeName"</param>
        /// <returns></returns>
        public List <string> Feed(List <Object> inputData, string overridenMapping)
        {
            this.DataFedFrom = inputData;
            AnyToDataTable anyToDataTable = new AnyToDataTable(this);
            List <string>  errors         = anyToDataTable.Feed(inputData, overridenMapping);

            this.TotalRowsToBeProcessed = this.InputData.Rows.Count;
            int skipHeaderInCsvRows = this.DataSource.IsFirstRowHeader ? 1 : 0;

            int recordsPerThread = GetRecordsPerThread();

            List <DataTable> chunks = InputData.Split(recordsPerThread);

            _JobSlices = new List <JobSlice>();
            int counter = 0;

            foreach (DataTable table in chunks)
            {
                JobSlice jobSlice = new JobSlice(this.JobIdentifier, counter, table, this.CsvRows.Skip(skipHeaderInCsvRows).Skip(counter * recordsPerThread).Take(recordsPerThread).ToList());
                _JobSlices.Add(jobSlice);
                counter++;
            }
            return(errors);
        }
Beispiel #2
0
        private void DoWithData(WorkerData data)
        {
            if ((data.Job.AbortRequested) &&
                (data.Job.IsFinished == false))
            {
                ExtensionMethods.TraceInformation("Abort was requested. Trying to abort job...");
                Trace.Flush();
                AbortJob();
            }
            else
            {
                lock (_lock)
                {
                    JobSlice processedJobSlice = null;
                    try
                    {
                        if (job.JobSlices.Count > 0)
                        {
                            processedJobSlice = job.JobSlices[data.SlicePosition];
                        }
                        else
                        {
                            //job was already disposed for some reason. Errors should get already taken care.
                        }
                    }
                    catch (ArgumentException ex)
                    {
                        StringBuilder sb = new StringBuilder("Dumping ArgumentException:" + Environment.NewLine + ex.ToString());
                        sb.AppendLine(string.Format("Trying to access job.JobSlices[{0}]. Total job slices were {1}", data.SlicePosition, job.JobSlices.Count));
                        job.TraceError(sb.ToString());
                    }
                    catch (Exception ex)
                    {
                        StringBuilder sb = new StringBuilder("Dumping Exception:" + Environment.NewLine + ex.ToString());
                        sb.AppendLine(string.Format("Trying to access job.JobSlices[{0}]. Total job slices were {1}", data.SlicePosition, job.JobSlices.Count));
                        job.TraceError(sb.ToString());
                    }
                    if (processedJobSlice == null)
                    {
                        AbortJob();
                        return;
                    }

                    processedJobSlice.Status = JobSlice.JobSliceStatus.Processed;

                    if (!job.AbortRequested)
                    {
                        ExtensionMethods.TraceInformation(Environment.NewLine);
                        job.TraceInformation("JS - '{0}' - processed '{1}' records. {2}{3}",
                                             processedJobSlice.JobSliceId, data.Rows.Count, Environment.NewLine, data.TraceLog.ToString());
                        Trace.Flush();
                    }

                    job.TotalRowsProcessed += processedJobSlice.InputData.Rows.Count;
                    job.Rows.AddRange(data.Rows);
                    job.Errors.AddRange(data.Errors);
                    job.Warnings.AddRange(data.Warnings);
                    job.BadDataInCsvFormat.AddRange(data.BadDataInCsvFormat);

                    data.Dispose();
                }
            }
        }
Beispiel #3
0
        /// <summary>
        /// Feeds data into the job from an xml or CSV or FL or file name (in case of Excel or custom feeder)
        /// </summary>
        /// <param name="inputData">The parsed data table</param>
        /// <returns></returns>
        public List <string> Feed(DataTable inputData = null)
        {
            List <string> errors = new List <string>();

            if (inputData == null)
            {
                PerformanceCounter.Start(JobIdentifier, JobPerformanceTaskNames.FeedData);
                AnyToDataTable anyToDataTable = new AnyToDataTable(this);
                errors = anyToDataTable.Feed();
                PerformanceCounter.Stop(JobIdentifier, JobPerformanceTaskNames.FeedData);
            }
            else
            {
                InputData = inputData;
                CsvRows   = new List <string>();
                if (DataSource.IsFirstRowHeader)
                {
                    var columnNames = inputData.Columns.Cast <DataColumn>().Select(column => column.ColumnName).ToArray();
                    CsvRows.Add(string.Join(",", columnNames));
                }
                foreach (DataRow row in inputData.Rows)
                {
                    var fields = row.ItemArray.Select(field => field.ToString()).ToArray();
                    CsvRows.Add("\"" + string.Join("\",\"", fields) + "\"");
                }


                if ((DataSource.IsFirstRowHeader) && (CsvRows.Count > 0))
                {
                    BadDataInCsvFormat.Add(CsvRows[0]); //storing header information
                }
                ColumnCount = inputData.Columns.Count;
            }

            if (this.InputData.Rows.Count == 0)
            {
                return(new List <string>());
            }

            PerformanceCounter.Start(JobIdentifier, JobPerformanceTaskNames.SliceData);
            if (!AddAdditionalColumns())
            {
                return(new List <string>());
            }

            this.TotalRowsToBeProcessed = this.InputData.Rows.Count;
            int skipHeaderInCsvRows = this.DataSource.IsFirstRowHeader ? 1 : 0;

            int recordsPerThread = GetRecordsPerThread();

            this.TraceInformation("Slicing jobs with '{0}' records per thread.", recordsPerThread);
            Trace.Flush();
            List <DataTable> chunks = InputData.Split(recordsPerThread);

            this.TraceInformation("Job sliced. Total = {0}.", chunks.Count);
            Trace.Flush();
            _JobSlices = new List <JobSlice>();
            int counter = 0;

            foreach (DataTable table in chunks)
            {
                JobSlice jobSlice = new JobSlice(this.JobIdentifier, counter, table, this.CsvRows.Skip(skipHeaderInCsvRows).Skip(counter * recordsPerThread).Take(recordsPerThread).ToList());
                _JobSlices.Add(jobSlice);
                counter++;
            }
            this.TraceInformation("All slices are initialized.");
            Trace.Flush();
            PerformanceCounter.Stop(JobIdentifier, JobPerformanceTaskNames.SliceData);
            return(errors);
        }