Example #1
0
        /// <summary>
        /// Feeds data into the job from a list objects
        /// </summary>
        /// <param name="inputData">List of objects</param>
        ///<param name="overridenMapping">Comma separated additional mapping override information. For example if "EmpId" from object to be mapped with "EmployeeId" of attribute, then "EmpId=EmployeeId,Ename=EmployeeName"</param>
        /// <returns></returns>
        public List <string> Feed(List <Object> inputData, string overridenMapping)
        {
            this.DataFedFrom = inputData;
            AnyToDataTable anyToDataTable = new AnyToDataTable(this);
            List <string>  errors         = anyToDataTable.Feed(inputData, overridenMapping);

            this.TotalRowsToBeProcessed = this.InputData.Rows.Count;
            int skipHeaderInCsvRows = this.DataSource.IsFirstRowHeader ? 1 : 0;

            int recordsPerThread = GetRecordsPerThread();

            List <DataTable> chunks = InputData.Split(recordsPerThread);

            _JobSlices = new List <JobSlice>();
            int counter = 0;

            foreach (DataTable table in chunks)
            {
                JobSlice jobSlice = new JobSlice(this.JobIdentifier, counter, table, this.CsvRows.Skip(skipHeaderInCsvRows).Skip(counter * recordsPerThread).Take(recordsPerThread).ToList());
                _JobSlices.Add(jobSlice);
                counter++;
            }
            return(errors);
        }
Example #2
0
        /// <summary>
        /// Feeds data into the job from an xml or CSV or FL or file name (in case of Excel or custom feeder)
        /// </summary>
        /// <param name="inputData">The parsed data table</param>
        /// <returns></returns>
        public List <string> Feed(DataTable inputData = null)
        {
            List <string> errors = new List <string>();

            if (inputData == null)
            {
                PerformanceCounter.Start(JobIdentifier, JobPerformanceTaskNames.FeedData);
                AnyToDataTable anyToDataTable = new AnyToDataTable(this);
                errors = anyToDataTable.Feed();
                PerformanceCounter.Stop(JobIdentifier, JobPerformanceTaskNames.FeedData);
            }
            else
            {
                InputData = inputData;
                CsvRows   = new List <string>();
                if (DataSource.IsFirstRowHeader)
                {
                    var columnNames = inputData.Columns.Cast <DataColumn>().Select(column => column.ColumnName).ToArray();
                    CsvRows.Add(string.Join(",", columnNames));
                }
                foreach (DataRow row in inputData.Rows)
                {
                    var fields = row.ItemArray.Select(field => field.ToString()).ToArray();
                    CsvRows.Add("\"" + string.Join("\",\"", fields) + "\"");
                }


                if ((DataSource.IsFirstRowHeader) && (CsvRows.Count > 0))
                {
                    BadDataInCsvFormat.Add(CsvRows[0]); //storing header information
                }
                ColumnCount = inputData.Columns.Count;
            }

            if (this.InputData.Rows.Count == 0)
            {
                return(new List <string>());
            }

            PerformanceCounter.Start(JobIdentifier, JobPerformanceTaskNames.SliceData);
            if (!AddAdditionalColumns())
            {
                return(new List <string>());
            }

            this.TotalRowsToBeProcessed = this.InputData.Rows.Count;
            int skipHeaderInCsvRows = this.DataSource.IsFirstRowHeader ? 1 : 0;

            int recordsPerThread = GetRecordsPerThread();

            this.TraceInformation("Slicing jobs with '{0}' records per thread.", recordsPerThread);
            Trace.Flush();
            List <DataTable> chunks = InputData.Split(recordsPerThread);

            this.TraceInformation("Job sliced. Total = {0}.", chunks.Count);
            Trace.Flush();
            _JobSlices = new List <JobSlice>();
            int counter = 0;

            foreach (DataTable table in chunks)
            {
                JobSlice jobSlice = new JobSlice(this.JobIdentifier, counter, table, this.CsvRows.Skip(skipHeaderInCsvRows).Skip(counter * recordsPerThread).Take(recordsPerThread).ToList());
                _JobSlices.Add(jobSlice);
                counter++;
            }
            this.TraceInformation("All slices are initialized.");
            Trace.Flush();
            PerformanceCounter.Stop(JobIdentifier, JobPerformanceTaskNames.SliceData);
            return(errors);
        }