private void ValidateSpreadsheetRows(DataTable batchData, AllowedData allowedData, BatchUpload batchUpload)
        {
            batchUpload.ColumnsCount = batchData.Columns.Count;
            batchUpload.TotalDataRows = batchData.Rows.Count;

            //TODO FIN-841 check columns are all present

            for (int i = 0; i < batchData.Rows.Count; i++)
            {
                DataRow row = batchData.Rows[i];

                List<UploadValidationFailure> validationFailures = ValidateUploadedRow(row, i, allowedData);
                if (validationFailures.Count == 0)
                {
                    UploadDataModel upload = new BatchRowParser(row).GetUploadDataModel();
                    upload.RowNumber = i + 2;

                    batchUpload.DataToUpload.Add(upload);
                }
                else
                {
                    foreach (UploadValidationFailure error in validationFailures)
                        batchUpload.UploadValidationFailures.Add(error);
                }
            }
        }
        public void TestArchiveCoreData()
        {
            var batchUpload = new BatchUpload();
            batchUpload.DuplicateRowInDatabaseErrors.Add(new DuplicateRowInDatabaseError
            {
                AgeId = AgeIds.AllAges,
                AreaCode = AreaCodes.CountyUa_Cambridgeshire,
                DbValue = 123.456,
                ErrorMessage = "TestRowIsValidIfCategoryColumnsAreNotPresent Error Message",
                ExcelValue = 543.21,
                IndicatorId = IndicatorIds.ObesityYear6,
                RowNumber = 123,
                SexId = SexIds.Male,
                Uid = 999999999
            });

            _coreDataRepository.InsertCoreData(GetRecordToInsert(), batchId);

            var records = ArchivedRecords(batchUpload);
            Assert.IsTrue(records.Any());

            //var duplicateRows = string.Join(",", records.Select(x => x.Uid).Take(10).ToList());
            var duplicateRows = records.Select(x => new DuplicateRowInDatabaseError { Uid = x.Uid }).Take(10).ToList();

            //Insert the duplicates to the CoreDataset Archive table and delete the coredataset rows
            // in question (All in one transaction)
            _coreDataRepository.InsertCoreDataArchive(duplicateRows, batchId);

            //Check that the row has been removed from coredataset.
            Assert.IsFalse(ArchivedRecords(batchUpload).Count == 0);

            //Check that the row has been inserted into the core dataset archive.
            Assert.IsTrue(ArchivedRecords(batchUpload).Any());

            //Finally, delete the CoreDataSetArchive record
            _coreDataRepository.DeleteCoreDataArchive(batchId);

            //Check that it has been deleted
            Assert.IsFalse(ArchivedRecords(batchUpload).Count == 0);


        }
        public BatchUpload Validate(DataTable indicators, BatchUpload batchUpload)
        {
            var allowedData = new AllowedData(_profilesReader);

            // Validate spreadsheet data
            ValidateSpreadsheetRows(indicators, allowedData, batchUpload);

            if (batchUpload.DataToUpload.Any())
            {
                //Validate the spreadsheet data to see if there's duplication within it.
                batchUpload.DuplicateUploadErrorsExist = ValidateSpreadsheetForDuplicatedRows(batchUpload);

                batchUpload.DuplicateRowInDatabaseErrors =
                    new CoreDataSetDuplicateChecker().GetDuplicates(batchUpload.DataToUpload, _coreDataRepository,
                        UploadJobType.Batch);
            }

            GetDistinctIndicators(indicators);

            return batchUpload;
        }
 private BatchUpload ToBatchUpload(UploadJob job)
 {
     var batchUpload = new BatchUpload
     {
         ShortFileName = job.Filename,
         FileName = job.Filename,
         DataToUpload = new List<UploadDataModel>(),
         DuplicateRowInDatabaseErrors = new List<DuplicateRowInDatabaseError>(),
         DuplicateRowInSpreadsheetErrors = new List<DuplicateRowInSpreadsheetError>(),
         ExcelDataSheets = new List<UploadExcelSheet>(),
         UploadValidationFailures = new List<UploadValidationFailure>()
     };
     return batchUpload;
 }
 private void CheckDuplicateRowsInWorksheet(UploadJob job, BatchUpload batchUpload, ref DataTable batchDataTable)
 {
     if (batchUpload.DuplicateRowInSpreadsheetErrors.Count > 0)
     {
         var dataWithoutDuplicates = new FileDuplicationHandler().RemoveDuplicatesInBatch(batchDataTable);
         batchDataTable = dataWithoutDuplicates;
         _logger.Info("Job# {0}, There are duplicate rows in spreadsheet", job.Guid);
         _logger.Info("Job# {0}, Dupllicate rows removed", job.Guid);
     }
 }
        private bool CheckValidationFailures(UploadJob job, BatchUpload batchUpload)
        {
            if (batchUpload.UploadValidationFailures.Count <= 0) return true;

            jobStatus.FailedValidation(job);

            var error = ErrorBuilder.GetConversionError(job.Guid, batchUpload.UploadValidationFailures);
            _jobErrorRepository.Log(error);
            _logger.Info("Job# {0}, Data type conversion errors occurred ", job.Guid);
            return false;
        }
        private bool CheckDuplicateRowsInDatabase(UploadJob job, BatchUpload batchUpload)
        {
            if (batchUpload.DuplicateRowInDatabaseErrors.Count <= 0) return false;

            jobStatus.ConfirmationAwaited(job);

            var error = ErrorBuilder.GetDuplicateRowInDatabaseError(job.Guid,
                batchUpload.DuplicateRowInDatabaseErrors);
            _jobErrorRepository.Log(error);
            _logger.Info("Job# {0}, There are duplicate rows in database", job.Guid);

            return true;
        }
 public IList<CoreDataSetArchive> ArchivedRecords(BatchUpload batchUpload)
 {
     return ReaderFactory.GetProfilesReader().GetCoreDataArchiveForIndicatorIds(
         batchUpload.DuplicateRowInDatabaseErrors.Select(x => x.IndicatorId).ToList());
 }
        public BatchUpload UploadData(DataTable indicators, UploadJob job)
        {
            var batchUpload = new BatchUpload();

            for (int i = 0; i < indicators.Rows.Count; i++)
            {
                var upload = new BatchRowParser(indicators.Rows[i]).GetUploadDataModel();
                _coreDataRepository.InsertCoreData(upload.ToCoreDataSet(), job.Guid);
                batchUpload.DataToUpload.Add(upload);
            }

            var dataToUpload = batchUpload.DataToUpload;

            int uploadId = _loggingRepository.InsertUploadAudit(job.Guid, job.Username, dataToUpload.Count, job.Filename,
                WorksheetNames.BatchPholio);

            batchUpload.ShortFileName = Path.GetFileName(job.Filename);
            batchUpload.TotalDataRows = dataToUpload.Count;
            batchUpload.UploadBatchId = job.Guid;
            batchUpload.Id = uploadId;

            return batchUpload;
        }
        /// <summary>
        ///     Checks the spreadsheet for duplicate rows within itself.
        /// </summary>
        private static bool ValidateSpreadsheetForDuplicatedRows(BatchUpload spreadsheet)
        {
            IEnumerable<UploadDataModel> validateForRepetition =
                spreadsheet.DataToUpload.Where(
                    t =>
                        spreadsheet.DataToUpload.Count(
                            x =>
                                x.IndicatorId == t.IndicatorId && x.Year == t.Year && x.YearRange == t.YearRange &&
                                x.Quarter == t.Quarter && x.Month == t.Month && x.AgeId == t.AgeId && x.SexId == t.SexId &&
                                x.AreaCode == t.AreaCode && x.CategoryTypeId == t.CategoryTypeId &&
                                x.CategoryId == t.CategoryId) > 1);

            if (validateForRepetition.Any())
            {
                foreach (UploadDataModel row in validateForRepetition)
                {
                    var duplicate = new DuplicateRowInSpreadsheetError
                    {
                        RowNumber = row.RowNumber,
                        DuplicateRowMessage = "Indicator " + row.IndicatorId + " is duplicated. "
                    };
                    spreadsheet.DuplicateRowInSpreadsheetErrors.Add(duplicate);
                }
                return true;
            }
            return false;
        }