private async Task ProcessImportFileQuery(IQueryable <ImportedFile> query) { // order is important for file processing var importFiles = query.OrderByDescending(f => f.OriginalFileName.StartsWith(_fileNameMatcher.AcctStatusReportNonEcaSqlLike)) .ThenByDescending(f => f.OriginalFileName.StartsWith(_fileNameMatcher.AcctStatusReportSqlLike)) .ThenByDescending(f => f.OriginalFileName.Contains(_fileNameMatcher.SytossClientInfoSqlLike)) .ThenByDescending(f => f.FileCreateDate) .ToList(); if (!importFiles.Any()) { GlobalLogger.LogInfo("Job has not found new files for importing. Waiting...", GetType().Name, true); } else { foreach (var file in importFiles) { try { await TryImportFile(file); } catch (Exception ex) { file.FileState = FileState.Imported; file.FileStatus = FileStatus.Failed; _importJobRepository.UpdateImportedFile(file); _importJobRepository.SaveChanges(); GlobalLogger.LogError($"Error while importing file {file?.OriginalFileName}.", ex, GetType().Name, true); } } } }
private async Task TryLoadFileAsync(ImportedFile file) { try { file.FileState = FileState.Loaded; file.FileStatus = FileStatus.Success; file.CopiedDate = DateTime.UtcNow; await _systemManager.SaveFileAsync(file); GlobalLogger.LogInfo($"File copied: {file.OriginalFileName} from ftp: [{file.FtpCredential.FtpName}].", GetType().Name, true); } catch (Exception ex) { file.FileStatus = FileStatus.Failed; GlobalLogger.LogError($"Error while copying file: {file.OriginalFileName} from ftp: [{file.FtpCredential.FtpName}].", ex, GetType().Name, true); } finally { _copyJobRepository.UpdateImportedFile(file); _copyJobRepository.SaveChanges(); } }
protected override async Task RunInternal() { SyncerInfoService.Log(GetType().Name, JobStatus.Started.ToString()); using (_deliveryJobRepo.BeginOperation()) { try { var filesToSend = _deliveryJobRepo.GetFilesToSendList(); if (!filesToSend.Any()) { GlobalLogger.LogInfo("Job has not found new files for uploading. Waiting...", GetType().Name, true); } else { var totalCount = 0; foreach (var fileUpload in filesToSend) { totalCount++; var originalFileName = _deliveryJobRepo.GetOriginalFileName(fileUpload.Id); var fileStream = _extractFileService.ExtractFile(originalFileName); try { var response = await _restApiService.GetResponseOfUploadAsync(originalFileName, fileStream); GlobalLogger.LogInfo($@"Response upload file: {originalFileName} to ZOHO api. | Status: {response.StatusCode} | ReasonPhrase: {response.ReasonPhrase} | RequestUri: {response.RequestMessage.RequestUri}", GetType().Name, true); fileUpload.IsSent = response.IsSuccessStatusCode; if (!response.IsSuccessStatusCode) { GlobalLogger.LogInfo($"File: {originalFileName} unsuccessfully uploaded | Status Code from ZOHO api: {response.StatusCode}", GetType().Name, true); continue; } fileUpload.SentDate = DateTime.UtcNow; GlobalLogger.LogInfo($"File: {originalFileName} successfully uploaded to ZOHO api.", GetType().Name, true); } catch (Exception ex) { GlobalLogger.LogError($"Error while upload file: {originalFileName} to ZOHO api.", ex, GetType().Name, true); } finally { _deliveryJobRepo.UpdateFileUpload(fileUpload); await _deliveryJobRepo.SaveChangesAsync(); } GlobalLogger.LogInfo($"Amount uploaded: [{totalCount}] of [{filesToSend.Count()}].", GetType().Name, true); } } } catch (Exception ex) { GlobalLogger.LogError($"Error while upload files to ZOHO api.", ex, this.GetType().Name, true); } } SyncerInfoService.Log(GetType().Name, JobStatus.Stopped.ToString()); }
private async Task <FileStatus> ProcessAcctStatusReportFileReport(ImportedFile file, string nameBaseNode, string sqlLikeExpression, Stream stream) { if (stream == null || stream.Length == 0) { return(await Task.FromResult(FileStatus.Failed)); } var tradeAccountsForAdding = new List <TradeAccount>(); var tradeAccountsForUpdating = new List <TradeAccount>(); if (stream.Position > 0) { stream.Position = 0; } var document = XDocument.Load(stream); var elements = document.Element(nameBaseNode)?.Element("Accounts")?.Elements("Account").ToList(); if (elements != null && elements.Any()) { elements.ForEach(e => { try { var tradeAccount = new TradeAccount { AccountName = _fileNameMatcher.GetCorrectAccountId(e.Attribute(XName.Get("id"))?.Value), MasterAccountId = file.MasterAccountId, AccountAlias = e.Attribute(XName.Get("alias"))?.Value, DateOpened = DateHelper.ParseDateTime(e.Attribute(XName.Get("date_opened"))?.Value), DateClosed = DateHelper.ParseDateTime(e.Attribute(XName.Get("date_closed"))?.Value), DateFunded = DateHelper.ParseDateTime(e.Attribute(XName.Get("date_funded"))?.Value), TradeStatus = e.Attribute(XName.Get("status"))?.Value, Mobile = e.Attribute(XName.Get("mobile"))?.Value }; if (_tradeAccountModel.Contains(tradeAccount.AccountName)) { tradeAccount.Id = _tradeAccountModel.GetById(tradeAccount.AccountName).Id; tradeAccount.ImportedFileId = file.Id; tradeAccountsForUpdating.Add(tradeAccount); } else { tradeAccount.ImportedFile = file; tradeAccountsForAdding.Add(tradeAccount); } } catch (Exception ex) { GlobalLogger.LogError($"Error while parsing TradeCash record. \nRow: {e}", ex, GetType().Name, true); } }); } try { _importJobRepository.AddRangeTradeAccounts(tradeAccountsForAdding); _importJobRepository.SaveChanges(); } catch (Exception ex) { GlobalLogger.LogError($"Error while adding TradeAccount records to DB.", ex, GetType().Name, true); return(await Task.FromResult(FileStatus.Failed)); } foreach (var tradeAccount in tradeAccountsForUpdating) { try { _importJobRepository.AddOrUpdateTradeAccount(tradeAccount); } catch (Exception ex) { GlobalLogger.LogError($"Error while updating TradeAccount record to DB.", ex, GetType().Name, true); return(await Task.FromResult(FileStatus.Failed)); } } _importJobRepository.SaveChanges(); GlobalLogger.LogInfo($"Trade Accounts: [{tradeAccountsForAdding.Count}] added.", GetType().Name, true); GlobalLogger.LogInfo($"Trade Accounts: [{tradeAccountsForUpdating.Count}] updated.", GetType().Name, true); RefillTradeAccounts(); GC.Collect(); return(await Task.FromResult(FileStatus.Success)); }
protected override async Task RunInternal() { SyncerInfoService.Log(GetType().Name, JobStatus.Started.ToString()); using (_ftpJobRepository.BeginOperation()) { var ftpCredentials = _ftpJobRepository.GetFtpCredentialList(); if (!ftpCredentials.Any()) { GlobalLogger.LogInfo("Job has not found any ftp credentials. Waiting...", GetType().Name, true); } else { var existingFiles = _ftpJobRepository.ImportedFileList(); var existingFileNames = existingFiles.Select(ef => ef.OriginalFileName); foreach (var cred in ftpCredentials) { var totalCount = 0; List <(string fileName, string accountName, DateTime creationDate)> newFiles; var accountNameList = cred.MasterAccounts.Select(m => m.AccountName).ToList(); try { newFiles = (await _ftpLoader.LoadFilesAsync(cred)) .Where(f => f.creationDate >= _fromDate && accountNameList.Contains(f.accountName) && !existingFileNames.Contains(f.fileName)) // order is important for file processing .OrderByDescending(f => _fileNameMatcher.IsAcctStatusReportNonEca(f.fileName)) .ThenByDescending(f => _fileNameMatcher.IsAcctStatusReport(f.fileName)) .ThenByDescending(f => f.creationDate) .ToList(); } catch (Exception ex) { GlobalLogger.LogError($"Error while trying to load files from ftp: [{cred.FtpName}]", ex, GetType().Name, true); return; } FilterFilesByMaxCreateDate(newFiles, existingFiles, _fileNameMatcher.AcctStatusReportNonEcaSqlLike, cred.MasterAccounts); FilterFilesByMaxCreateDate(newFiles, existingFiles, _fileNameMatcher.AcctStatusReportSqlLike, cred.MasterAccounts); FilterFilesByMaxCreateDate(newFiles, existingFiles, _fileNameMatcher.SytossClientInfoSqlLike, cred.MasterAccounts); if (!newFiles.Any()) { GlobalLogger.LogInfo($"Job has not found new files in [{cred.FtpName}]. Waiting...", GetType().Name, true); } else { GlobalLogger.LogInfo($"Job has found new: [{newFiles.Count()}] files on ftp: [{cred.FtpName}].", GetType().Name, true); var transitFiles = _ftpJobRepository.GetTransitFileList(); foreach (var file in newFiles) { if (!existingFiles.All(f => f.OriginalFileName != file.fileName)) { continue; } var newFile = new ImportedFile() { CreatedDate = DateTime.UtcNow, ModifiedDate = DateTime.UtcNow, OriginalFileName = file.fileName, FileCreateDate = file.creationDate, MasterAccount = cred.MasterAccounts.FirstOrDefault(a => a.AccountName == file.accountName), FtpCredential = cred, FileState = FileState.Registered, FileStatus = FileStatus.Success, RegisteredDate = DateTime.UtcNow }; if (transitFiles.Any(f => f.AccountName == file.accountName && file.fileName.Contains(f.OriginalFileName))) { newFile.FileUpload = new FileUpload(); } _ftpJobRepository.AddImportedFile(newFile); totalCount++; GlobalLogger.LogInfo($"New file: {newFile.OriginalFileName} registered from ftp: [{cred.FtpName}].", GetType().Name, true); } try { _ftpJobRepository.SaveChanges(); GlobalLogger.LogInfo($"Amount registered: [{totalCount}] files in DB from ftp: [{cred.FtpName}].", GetType().Name, true); } catch (Exception ex) { GlobalLogger.LogError($"Error while trying to save registered files from ftp: [{cred.FtpName}]", ex, GetType().Name, true); return; } } } } } SyncerInfoService.Log(GetType().Name, JobStatus.Stopped.ToString()); }
protected async override Task RunInternal() { SyncerInfoService.Log(GetType().Name, JobStatus.Started.ToString()); using (_copyJobRepository.BeginOperation()) { try { var registeredFiles = _copyJobRepository .RegisteredFilesQuery() // order is important for file processing .OrderByDescending(f => f.OriginalFileName.Contains(_fileNameMatcher.AcctStatusReportNonEcaSqlLike)) .ThenByDescending(f => f.OriginalFileName.Contains(_fileNameMatcher.AcctStatusReportSqlLike)) .ThenByDescending(f => f.OriginalFileName.Contains(_fileNameMatcher.SytossClientInfoSqlLike)) .ThenByDescending(f => f.FileCreateDate) .ToList(); if (!registeredFiles.Any()) { GlobalLogger.LogInfo("Job has not found new files for downloading. Waiting...", GetType().Name, true); } else { GlobalLogger.LogInfo($"Job has found new files: [{registeredFiles.Count()}] for downloading.", GetType().Name, true); var totalCount = 0; foreach (var file in registeredFiles) { totalCount++; await TryLoadFileAsync(file); GlobalLogger.LogInfo($"Amount copied: [{totalCount}] of [{registeredFiles.Count()}].", GetType().Name, true); } } var loadedFailedFiles = _copyJobRepository.LoadedFailedFileList(); if (!loadedFailedFiles.Any()) { GlobalLogger.LogInfo("Job has not found LOAD FAILED FILES. Waiting...", GetType().Name, true); } else { GlobalLogger.LogInfo($"Job has found LOAD FAILED FILES: [{loadedFailedFiles.Count()}].", GetType().Name, true); var failedCount = 0; foreach (var file in loadedFailedFiles) { failedCount++; await TryLoadFileAsync(file); GlobalLogger.LogInfo($"Amount of recopied (failed files): [{failedCount}] of [{loadedFailedFiles.Count()}].", GetType().Name, true); } } } catch (Exception ex) { GlobalLogger.LogError($"Error while copying files from ftp.", ex, GetType().Name, true); } } SyncerInfoService.Log(GetType().Name, JobStatus.Stopped.ToString()); }
private async Task TryImportFile(ImportedFile currentFile) { var newFileStatus = FileStatus.Success; try { if (currentFile != null) { GlobalLogger.LogInfo($"Start importing file: {currentFile.OriginalFileName} | Status: {currentFile.FileStatus}", GetType().Name, true); var streamFile = _extractFileService.ExtractFile(currentFile.OriginalFileName); if (_fileNameMatcher.IsFeesDataFile(currentFile.OriginalFileName)) { newFileStatus = await ProcessFeesFileReport(currentFile, streamFile); } else if (_fileNameMatcher.IsAcctStatusReportNonEca(currentFile.OriginalFileName)) { newFileStatus = await ProcessAcctStatusReportFileReport( currentFile, "AccountStatusReport", _fileNameMatcher.AcctStatusReportNonEcaSqlLike, streamFile); } else if (_fileNameMatcher.IsNavRegex(currentFile.OriginalFileName)) { newFileStatus = await ProcessNavFileReport(currentFile, streamFile); } else if (_fileNameMatcher.IsAcctStatusReport(currentFile.OriginalFileName)) { newFileStatus = await ProcessAcctStatusReportFileReport( currentFile, "ECAAccountStatusReport", _fileNameMatcher.AcctStatusReportSqlLike, streamFile); } else if (_fileNameMatcher.IsCashReport(currentFile.OriginalFileName)) { newFileStatus = await ProcessCashReport(currentFile, streamFile); } else if (_fileNameMatcher.IsTradeAsReport(currentFile.OriginalFileName)) { newFileStatus = await ProcessTradesAsReport(currentFile, streamFile); } else if (_fileNameMatcher.IsSytossClientInfo(currentFile.OriginalFileName)) { newFileStatus = await ProcessSytossClientInfoReport(currentFile, streamFile); } else if (_fileNameMatcher.IsSytossOpenPositions(currentFile.OriginalFileName)) { newFileStatus = await ProcessSytossOpenPositions(currentFile, streamFile); } else if (_fileNameMatcher.IsTradeExeReport(currentFile.OriginalFileName)) { newFileStatus = await ProcessTradesExeReport(currentFile, streamFile); } else if (_fileNameMatcher.IsInterestAccruaReport(currentFile.OriginalFileName)) { newFileStatus = await ProcessTradeInterestAccruaReport(currentFile, streamFile); } else if (_fileNameMatcher.IsTradeCommissionsDetReport(currentFile.OriginalFileName)) { newFileStatus = await ProcessTradeCommissionsReport(currentFile, streamFile); } } GlobalLogger.LogInfo($"File: {currentFile.OriginalFileName} has been imported.", GetType().Name, true); } catch (Exception ex) { newFileStatus = FileStatus.Failed; GlobalLogger.LogError($"Error while importing file {currentFile?.OriginalFileName}.", ex, GetType().Name, true); } finally { if (currentFile != null) { currentFile.FileState = FileState.Imported; currentFile.FileStatus = newFileStatus; if (newFileStatus == FileStatus.Success) { currentFile.ImportedDate = DateTime.UtcNow; } _importJobRepository.UpdateImportedFile(currentFile); _importJobRepository.SaveChanges(); } else { GlobalLogger.LogInfo("Job has not found new files for importing. Waiting...", GetType().Name, true); } } }