private async Task InsertOrUpdateMappingsForBank(IUnitOfWork unitOfWork, ImportBankEntity importBank, List <ImportMappingEntity> importMappings) { var existingImportMappings = await unitOfWork.ImportMappingRepository.FindAllTracked(x => x.ImportBankId == importBank.Id); foreach (var existingImportMapping in existingImportMappings) { var importMapping = importMappings.SingleOrDefault(x => x.ColumnIndex == existingImportMapping.ColumnIndex); if (importMapping != null) { existingImportMapping.ColumnName = importMapping.ColumnName; existingImportMapping.ColumnTypeId = importMapping.ColumnTypeId; existingImportMapping.FormatValue = importMapping.FormatValue; } else { unitOfWork.ImportMappingRepository.Delete(existingImportMapping); } } var newImportMappings = importMappings.Where(x => !existingImportMappings.Any(y => y.ColumnIndex == x.ColumnIndex)); foreach (var newImportMapping in newImportMappings) { newImportMapping.ImportBankId = importBank.Id; unitOfWork.ImportMappingRepository.Insert(newImportMapping); } }
public static ImportBankModel ToDto(this ImportBankEntity entity) { return(new ImportBankModel { Id = entity.Id, Name = entity.Name }); }
/// <summary> /// Process an import and converts it to a list of import rows /// </summary> /// <param name="genericRepository">Generic repository to use</param> /// <param name="fileInputStream">File input stream </param> /// <param name="userId">Id of the current user</param> /// <param name="importMappings">Import column mappings</param> /// <param name="importBank">Import bank entity</param> /// <param name="bankAccountId">Bank account id to use for processing</param> /// <returns>Created list of import rows</returns> public static async Task <IList <ImportRow> > CreateImportRowsFromFile(IUnitOfWork unitOfWork, Stream fileInputStream, int userId, IList <ImportMappingEntity> importMappings, ImportBankEntity importBank, int bankAccountId) { IList <ImportRow> importRows; using (var reader = new StreamReader(fileInputStream)) { // Convert the raw csv file to an import rows list importRows = ConvertFileToImportRows(reader.BaseStream, importBank, importMappings); } if (importRows != null && importRows.Any()) { var firstDate = importRows.Min(item => item.Transaction.Date); var lastDate = importRows.Max(item => item.Transaction.Date); var existingTransactions = await unitOfWork.TransactionRepository .FindAll(transaction => transaction.UserId == userId && transaction.Date >= firstDate && transaction.Date <= lastDate); foreach (var importRow in importRows) { var transactionExists = existingTransactions.Any(transaction => importRow.Transaction.Amount == transaction.Amount && importRow.Transaction.Name.Equals(transaction.Name.Trim(), StringComparison.CurrentCultureIgnoreCase) && importRow.Transaction.Date == transaction.Date && transaction.BankAccountId == bankAccountId); importRow.ExistsInDatabase = transactionExists; importRow.Import = !transactionExists; } var categoryMappings = await unitOfWork.CategoryMappingRepository.FindAll(item => item.Category.UserId == userId, "Category"); CategoryHandler.SetTransactionCategories(transactions: importRows.Select(item => item.Transaction), categoryMappings: categoryMappings); } return(importRows); }
/// <summary> /// Converts a file to a list of import rows /// </summary> /// <param name="fileStream">Filestream to use</param> /// <param name="importBank">Import bank entity with settings</param> /// <param name="importMappings">Import column mappings to use for mapping values</param> /// <returns>A created import row list</returns> private static IList <ImportRow> ConvertFileToImportRows(Stream fileStream, ImportBankEntity importBank, IList <ImportMappingEntity> importMappings) { IList <ImportRow> importRows = new List <ImportRow>(); using (var streamReader = new StreamReader(fileStream)) { using var csv = new CsvReader(streamReader, new Configuration { Delimiter = importBank.Delimiter }); // If the import contains a header first read this so we skip it in the loop if (importBank.ImportContainsHeader) { csv.Read(); } var importRowId = 0; // Keep going untill we are done while (csv.Read()) { // Create a new import row var importRow = new ImportRow { Transaction = new TransactionModel() }; var amountIsNegative = false; // Loop through fields for (var i = 0; csv.TryGetField <string>(i, out var rawValue); i++) { var importMapping = importMappings.SingleOrDefault(item => item.ColumnIndex == i); // If we have a mapping for this column, convert it if (importMapping != null) { if (importMapping.ColumnTypeId == ColumnType.AddSubtract) { amountIsNegative = ConvertRawToIsNegative(rawValue, importMapping.FormatValue); } else { ConvertRawColumn(rawValue, importMapping, importRow.Transaction); } } } if (amountIsNegative && importRow.Transaction.Amount > 0) { importRow.Transaction.Amount *= -1; } importRow.ImportRowId = importRowId++; importRows.Add(importRow); } } return(importRows); }
private async Task <ImportBankEntity> InsertOrUpdateImportBank(IUnitOfWork unitOfWork, ImportBankEntity importBankEntity) { var existingImportBank = await unitOfWork.ImportBankRepository.FindSingleTracked(x => x.Name == importBankEntity.Name && x.IsStandard); if (existingImportBank == null) { unitOfWork.ImportBankRepository.Insert(importBankEntity); await unitOfWork.SaveAsync(); return(importBankEntity); } else { existingImportBank.ImportContainsHeader = importBankEntity.ImportContainsHeader; existingImportBank.Delimiter = importBankEntity.Delimiter; return(existingImportBank); } }