public async Task <HmrSubmissionObject> CreateSubmissionObjectAsync(SubmissionObjectCreateDto submission) { var submissionEntity = await AddAsync(submission); foreach (var row in submission.SubmissionRows) { submissionEntity.HmrSubmissionRows .Add(Mapper.Map <HmrSubmissionRow>(row)); } return(submissionEntity); }
private async Task MarkDuplicateRowAsync(SubmissionObjectCreateDto submission) { if (HasRowIdentifier) { await foreach (var row in _rowRepo.FindDuplicateFromLatestRecordsAsync(submission.SubmissionStreamId, (decimal)submission.ContractTermId, submission.SubmissionRows)) { row.RowStatusId = _statusService.RowDuplicate; } } else { await foreach (var row in _rowRepo.FindDuplicateFromAllRecordsAsync(submission.SubmissionStreamId, submission.SubmissionRows)) { row.RowStatusId = _statusService.RowDuplicate; } } }
private bool CheckFileContents(SubmissionObjectCreateDto submission, SubmissionStreamDto reportType, Stream stream, Dictionary <string, List <string> > errors) { var bytes = stream.ToBytes(); var size = bytes.Length; var maxSize = reportType.FileSizeLimit ?? Constants.MaxFileSize; if (size > maxSize) { errors.AddItem("FileSize", $"The file size exceeds the maximum size {maxSize / 1024 / 1024}MB."); return(true); } submission.FileHash = bytes.GetSha256Hash(); submission.DigitalRepresentation = bytes; return(false); }
public async Task <bool> IsDuplicateFileAsync(SubmissionObjectCreateDto submission) { //check only the previous submission regardless of success or failure in order to cover the following scenarios //S1. // 1. user submits a file #1 (success) // 2. user submits a file #2 (success) // 3. user submits a file #1 again // The net result should be file #1 submission success instead of duplicate file error. //S2. // 1. user submits a file #1 (fail while processing ) // 2. user submits a file #2 (success) // 3. user submits a file #1 again // The net result should be file #1 submission error instead of duplicate file error. var latestFile = await DbSet .Include(x => x.SubmissionStatus) .Where(x => x.SubmissionStreamId == submission.SubmissionStreamId && x.ServiceAreaNumber == submission.ServiceAreaNumber) .OrderByDescending(x => x.SubmissionObjectId) .FirstOrDefaultAsync(); if (latestFile == null) { return(false); } //if it was internal error, the user should be able to upload again. if (latestFile.SubmissionStatus.StatusCode == FileStatus.FileUnexpectedError) { return(false); } return(latestFile.FileHash == submission.FileHash); }
protected override async Task <bool> ParseRowsAsync(SubmissionObjectCreateDto submission, TextReader textReader, Dictionary <string, List <string> > errors) { using var csv = new CsvReader(textReader, CultureInfo.InvariantCulture); CsvHelperUtils.Config(errors, csv, false); csv.Configuration.RegisterClassMap <WildlifeRptInitCsvDtoMap>(); var serviceAreastrings = ConvertServiceAreaToStrings(submission.ServiceAreaNumber); var serviceArea = await _saService.GetServiceAreaByServiceAreaNumberAsyc(submission.ServiceAreaNumber); var headerValidated = false; var rows = new List <WildlifeRptInitCsvDto>(); while (csv.Read()) { WildlifeRptInitCsvDto row = null; try { row = csv.GetRecord <WildlifeRptInitCsvDto>(); if (!headerValidated) { if (!CheckCommonMandatoryFields(csv.Context.HeaderRecord, WildlifeReportHeaders.MandatoryFields, errors)) { return(false); } else { headerValidated = true; } } row.ServiceArea = serviceArea.ConvertToServiceAreaString(row.ServiceArea); rows.Add(row); } catch (TypeConverterException ex) { errors.AddItem(ex.MemberMapData.Member.Name, ex.Message); break; } catch (CsvHelper.MissingFieldException) { break; //handled in CsvHelperUtils } catch (CsvHelper.ReaderException ex) { _logger.LogWarning(ex.Message); errors.AddItem("Report Type", "Please make sure the report type selected is correct."); return(false); } catch (CsvHelperException ex) { _logger.LogInformation(ex.ToString()); throw; } catch (Exception ex) { _logger.LogError(ex.ToString()); throw; } if (!serviceAreastrings.Contains(row.ServiceArea)) { errors.AddItem("ServiceArea", $"The file contains service area which is not {serviceAreastrings[0]}."); return(false); } var line = csv.Context.RawRecord.RemoveLineBreak(); submission.SubmissionRows.Add(new SubmissionRowDto { RecordNumber = null, RowValue = line, RowHash = line.GetSha256Hash(), RowStatusId = _statusService.RowReceived, EndDate = row.AccidentDate ?? Constants.MinDate, RowNum = csv.Context.Row }); } if (errors.Count == 0) { Validate(rows, Entities.WildlifeReportInit, errors); } return(errors.Count == 0); }
protected virtual Task <bool> ParseRowsAsync(SubmissionObjectCreateDto submission, TextReader textReader, Dictionary <string, List <string> > errors) { throw new NotImplementedException(); }
private async Task <(Dictionary <string, List <string> > errors, SubmissionObjectCreateDto submission)> ValidateAndParseUploadFileAsync(FileUploadDto upload) { var errors = new Dictionary <string, List <string> >(); var reportType = await _streamService.GetSubmissionStreamByTableNameAsync(TableName); if (reportType == null) { throw new Exception($"The submission stream for {TableName} is not defined."); } var submission = new SubmissionObjectCreateDto(); submission.MimeTypeId = 1; submission.ServiceAreaNumber = upload.ServiceAreaNumber; submission.SubmissionStreamId = reportType.SubmissionStreamId; submission.SubmissionStatusId = _statusService.FileError; submission.FileName = ""; if (upload.ReportFile == null) { errors.AddItem("File", $"The file is null or empty."); return(errors, submission); } if (!upload.ReportFile.FileName.IsCsvFile()) { errors.AddItem("FileName", "The file is not a CSV file."); return(errors, submission); } submission.FileName = Path.GetFileName(upload.ReportFile.FileName).SanitizeFileName() + ".csv"; if (submission.FileName.Length > 100) { submission.FileName = submission.FileName.Substring(0, 90) + ".csv"; errors.AddItem("File", "the filename needs to be shorter than 100 characters"); submission.SubmissionStatusId = _statusService.FileError; return(errors, submission); } using var stream = upload.ReportFile.OpenReadStream(); using TextReader textReader = new StreamReader(stream, Encoding.UTF8); if (CheckFileContents(submission, reportType, stream, errors)) { return(errors, submission); } if (await _submissionRepo.IsDuplicateFileAsync(submission)) { errors.AddItem("File", "Duplicate file exists"); submission.SubmissionStatusId = _statusService.FileDuplicate; return(errors, submission); } if (!await ParseRowsAsync(submission, textReader, errors)) { return(errors, submission); } if (submission.SubmissionRows.Count == 0) { errors.AddItem("File", "The file contains no rows"); return(errors, submission); } if (HasDuplicateInFile(submission.SubmissionRows, errors)) { return(errors, submission); } var contract = await _contractRepo.GetContractTerm(submission.ServiceAreaNumber, submission.SubmissionRows.Max(x => x.EndDate)); if (contract == null) { submission.FileHash = null; //it's an error outside of the file; user can submit the same file again and the system should be able to accept it. errors.AddItem(DateFieldName, $"Cannot find the contract term for this file"); return(errors, submission); } if (contract.PartyId == null) { submission.FileHash = null; //it's an error outside of the file; user can submit the same file again and the system should be able to accept it. errors.AddItem(DateFieldName, $"Cannot find the party ID for this file"); return(errors, submission); } submission.PartyId = contract.PartyId; submission.ContractTermId = contract.ContractTermId; await MarkDuplicateRowAsync(submission); //set IsResubmitted await foreach (var resubmittedRecordNumber in _rowRepo.UpdateIsResubmitAsync(submission.SubmissionStreamId, (decimal)submission.ContractTermId, submission.SubmissionRows)) { } submission.SubmissionStatusId = _statusService.FileReceived; return(errors, submission); }