private static void MakeInternal(IZetboxContext ctx, ImportedFile obj, File doc) { // Clone blob, so it could be deleted doc.Blob = ctx.Find <Blob>(ctx.CreateBlob(ctx.GetFileInfo(obj.Blob.ID), obj.Blob.MimeType)); doc.Name = obj.Name; ctx.Delete(obj); }
/// <summary> /// Update an imported file in the Db. /// </summary> /// <param name="existing">The existing imported file event from the database</param> /// <param name="fileDescriptor"></param> /// <param name="surveyedUtc"></param> /// <param name="minZoom"></param> /// <param name="maxZoom"></param> /// <param name="fileCreatedUtc"></param> /// <param name="fileUpdatedUtc"></param> /// <param name="importedBy"></param> /// <param name="log"></param> /// <param name="serviceExceptionHandler"></param> /// <param name="projectRepo"></param> /// <returns></returns> public static async Task <UpdateImportedFileEvent> UpdateImportedFileInDb( ImportedFile existing, string fileDescriptor, DateTime?surveyedUtc, int minZoom, int maxZoom, DateTime fileCreatedUtc, DateTime fileUpdatedUtc, string importedBy, ILogger log, IServiceExceptionHandler serviceExceptionHandler, IProjectRepository projectRepo) { var nowUtc = DateTime.UtcNow; var updateImportedFileEvent = AutoMapperUtility.Automapper.Map <UpdateImportedFileEvent>(existing); updateImportedFileEvent.FileDescriptor = fileDescriptor; updateImportedFileEvent.SurveyedUtc = surveyedUtc; updateImportedFileEvent.MinZoomLevel = minZoom; updateImportedFileEvent.MaxZoomLevel = maxZoom; updateImportedFileEvent.FileCreatedUtc = fileCreatedUtc; // as per Barret 19th June 2017 updateImportedFileEvent.FileUpdatedUtc = fileUpdatedUtc; updateImportedFileEvent.ImportedBy = importedBy; updateImportedFileEvent.ActionUTC = nowUtc; log.LogInformation( $"UpdateImportedFileInDb. UpdateImportedFileEvent: {JsonConvert.SerializeObject(updateImportedFileEvent)}"); if (await projectRepo.StoreEvent(updateImportedFileEvent).ConfigureAwait(false) == 1) { return(updateImportedFileEvent); } serviceExceptionHandler.ThrowServiceException(HttpStatusCode.BadRequest, 52); return(updateImportedFileEvent); }
public static async Task <ImportedFile> GetImportedFileForProject (string projectUid, string fileName, ImportedFileType importedFileType, DateTime?surveyedUtc, ILogger log, IProjectRepository projectRepo, double?offset, Guid?parentUid) { var importedFiles = await ImportedFileRequestDatabaseHelper.GetImportedFiles(projectUid, log, projectRepo).ConfigureAwait(false); ImportedFile existing = null; if (importedFiles.Count > 0) { if (importedFileType == ImportedFileType.ReferenceSurface) { existing = importedFiles.FirstOrDefault (f => f.ImportedFileType == ImportedFileType.ReferenceSurface && f.ParentUid == parentUid.ToString() && f.Offset.EqualsToNearestMillimeter(offset)); } else { existing = importedFiles.FirstOrDefault( f => string.Equals(f.Name, fileName, StringComparison.OrdinalIgnoreCase) && f.ImportedFileType == importedFileType && ( importedFileType == ImportedFileType.SurveyedSurface && f.SurveyedUtc == surveyedUtc || importedFileType != ImportedFileType.SurveyedSurface )); } } return(existing); }
public static void MakeStaticFile(ImportedFile obj, MethodReturnEventArgs<StaticFile> e) { var ctx = obj.Context; var doc = ctx.Create<StaticFile>(); MakeInternal(ctx, obj, doc); e.Result = doc; }
private static void MakeInternal(IZetboxContext ctx, ImportedFile obj, File doc) { // Clone blob, so it could be deleted doc.Blob = ctx.Find<Blob>(ctx.CreateBlob(ctx.GetFileInfo(obj.Blob.ID), obj.Blob.MimeType)); doc.Name = obj.Name; ctx.Delete(obj); }
/// <summary> /// Сохранить файл /// </summary> /// <param name="file">Загружаемый файл</param> /// <param name="userIdentityid"></param> /// <param name="loadType"></param> /// <returns>Идентификатор файла в БД</returns> public async Task <ImportedFile> SaveImportedFile(HttpPostedFileBase file, int userIdentityid, EImportType loadType) { if (file == null) { throw new Exception("Входной файл не определен"); } var exchangeFileId = await SaveFile(file.InputStream, file.FileName, userIdentityid); using (var context = new ApplicationDbContext()) { //создаем сущность импортированного файла var importedFile = new ImportedFile { LoadType = loadType, LoadStatus = ELoadStatus.InProcess, ExchangeFile = await context.ExchangeFiles.FindAsync(exchangeFileId), Organization = context.Employee.Where(x => x.ApplicationUser.Id == userIdentityid) .Select(x => x.Organization) .FirstOrDefault() ?? context.Abonent.Where(x => x.ApplicationUser.Id == userIdentityid) .Select(x => x.Organization) .FirstOrDefault() }; context.ImportedFiles.Add(importedFile); await context.SaveChangesAsync(); return(importedFile); } }
private async Task <int> UndeleteImportedFile(ImportedFile importedFile, ImportedFile existing) { // this is an interfaces extension model used solely by ProjectMDM to allow a rollback of a DeleteImportedFile Log.LogDebug( $"ProjectRepository/UndeleteImportedFile: undeleting importedFile: {JsonConvert.SerializeObject(importedFile)}."); var upsertedCount = 0; if (existing != null) { Log.LogDebug($"ProjectRepository/UndeleteImportedFile: undeleting importedFile {importedFile.ImportedFileUid}"); const string update = @"UPDATE ImportedFile SET IsDeleted = 0 WHERE ImportedFileUID = @ImportedFileUid"; upsertedCount = await ExecuteWithAsyncPolicy(update, importedFile); Log.LogDebug( $"ProjectRepository/UndeleteImportedFile: upserted {upsertedCount} rows for: projectUid:{importedFile.ProjectUid} importedFileUid: {importedFile.ImportedFileUid}"); return(upsertedCount); } Log.LogDebug( $"ProjectRepository/UndeleteImportedFile: can't undelete as none existing ignored importedFile={importedFile.ImportedFileUid}."); return(upsertedCount); }
public static void MakeStaticFile(ImportedFile obj, MethodReturnEventArgs <StaticFile> e) { var ctx = obj.Context; var doc = ctx.Create <StaticFile>(); MakeInternal(ctx, obj, doc); e.Result = doc; }
public static void MakeReadonlyFile(ImportedFile obj, MethodReturnEventArgs<File> e) { var ctx = obj.Context; var doc = ctx.Create<File>(); MakeInternal(ctx, obj, doc); doc.IsFileReadonly = true; e.Result = doc; }
public static void HandleBlobChange(ImportedFile obj, MethodReturnEventArgs <Zetbox.App.Base.Blob> e, Zetbox.App.Base.Blob oldBlob, Zetbox.App.Base.Blob newBlob) { if (oldBlob != null && newBlob != oldBlob) { throw new InvalidOperationException("Changing blob on imported files is not allowed"); } e.Result = newBlob; }
public static void HandleBlobChange(ImportedFile obj, MethodReturnEventArgs<Zetbox.App.Base.Blob> e, Zetbox.App.Base.Blob oldBlob, Zetbox.App.Base.Blob newBlob) { if (oldBlob != null && newBlob != oldBlob) { throw new InvalidOperationException("Changing blob on imported files is not allowed"); } e.Result = newBlob; }
public static void MakeAccountStatement(ImportedFile obj, MethodReturnEventArgs <AccountStatement> e) { var ctx = obj.Context; var stmt = ctx.Create <AccountStatement>(); stmt.File = obj.MakeReadonlyFile(); stmt.File.AttachedTo.SetObject(stmt); e.Result = stmt; }
public async Task UpdateImportedFile_TRexHappyPath_ReferenceSurface() { var customHeaders = new HeaderDictionary(); var importedFileUid = Guid.NewGuid(); var parentUid = Guid.NewGuid(); var oldOffset = 1.5; var newOffset = 1.5; var importedFileId = 9999; var TCCFilePath = "/BC Data/Sites/Chch Test Site"; var fileName = "MoundRoadlinework.dxf"; var fileDescriptor = FileDescriptor.CreateFileDescriptor(_fileSpaceId, TCCFilePath, fileName); var existingImportedFile = new ImportedFile { ProjectUid = _projectUid.ToString(), ImportedFileUid = importedFileUid.ToString(), LegacyImportedFileId = 200000, ImportedFileType = ImportedFileType.ReferenceSurface, Name = fileName, FileDescriptor = JsonConvert.SerializeObject(fileDescriptor), Offset = oldOffset, ParentUid = parentUid.ToString() }; var importedFilesList = new List <ImportedFile> { existingImportedFile }; var updateImportedFile = new UpdateImportedFile( _projectUid, _shortRaptorProjectId, ImportedFileType.ReferenceSurface, null, DxfUnitsType.Meters, DateTime.UtcNow.AddHours(-45), DateTime.UtcNow.AddHours(-44), fileDescriptor, importedFileUid, importedFileId, "some folder", newOffset, "some file" ); var logger = ServiceProvider.GetRequiredService <ILoggerFactory>(); var serviceExceptionHandler = ServiceProvider.GetRequiredService <IServiceExceptionHandler>(); var tRexImportFileProxy = new Mock <ITRexImportFileProxy>(); tRexImportFileProxy.Setup(tr => tr.UpdateFile(It.IsAny <DesignRequest>(), It.IsAny <HeaderDictionary>())).ReturnsAsync(new ContractExecutionResult()); var projectRepo = new Mock <IProjectRepository>(); projectRepo.Setup(pr => pr.StoreEvent(It.IsAny <UpdateImportedFileEvent>())).ReturnsAsync(1); projectRepo.Setup(pr => pr.GetImportedFile(It.IsAny <string>())).ReturnsAsync(existingImportedFile); projectRepo.Setup(pr => pr.GetImportedFiles(It.IsAny <string>())).ReturnsAsync(importedFilesList); var executor = RequestExecutorContainerFactory .Build <UpdateImportedFileExecutor>(logger, null, serviceExceptionHandler, _customerUid.ToString(), _userUid.ToString(), _userEmailAddress, customHeaders, tRexImportFileProxy: tRexImportFileProxy.Object, projectRepo: projectRepo.Object); var result = await executor.ProcessAsync(updateImportedFile).ConfigureAwait(false) as ImportedFileDescriptorSingleResult; Assert.Equal(0, result.Code); Assert.NotNull(result.ImportedFileDescriptor); Assert.Equal(_projectUid.ToString(), result.ImportedFileDescriptor.ProjectUid); Assert.Equal(fileDescriptor.FileName, result.ImportedFileDescriptor.Name); Assert.Equal(newOffset, result.ImportedFileDescriptor.Offset); }
public async Task SaveFileAsync(ImportedFile file) { var filePath = Path.Combine(FolderName, file.OriginalFileName); if (File.Exists(filePath)) { File.Delete(filePath); } await DownloadFileAsync(filePath, file); }
private void DownloadFile(string path, ImportedFile file) { using (var fs = File.Create(path)) { using (var responseStream = _ftpLoader.LoadFileFromFtp(file)) { responseStream.CopyTo(fs); } fs.Flush(); } }
private async Task DownloadFileAsync(string path, ImportedFile file) { using (var fs = File.Create(path)) { using (var responseStream = await _ftpLoader.LoadFileFromFtpAsync(file)) { await responseStream.CopyToAsync(fs); } await fs.FlushAsync(); } }
private async Task <int> DeleteImportedFile(ImportedFile importedFile, ImportedFile existing, bool isDeletePermanently) { Log.LogDebug( $"ProjectRepository/DeleteImportedFile: deleting importedFile: {JsonConvert.SerializeObject(importedFile)} permanent flag:{isDeletePermanently}"); var upsertedCount = 0; if (existing != null) { if (importedFile.LastActionedUtc >= existing.LastActionedUtc) { if (isDeletePermanently) { Log.LogDebug( $"ProjectRepository/DeleteImportedFile: deleting importedFile permanently: {importedFile.ImportedFileUid}"); const string delete = @"DELETE FROM ImportedFile WHERE ImportedFileUID = @ImportedFileUid"; upsertedCount = await ExecuteWithAsyncPolicy(delete, importedFile); Log.LogDebug( $"ProjectRepository/DeleteImportedFile: deleted {upsertedCount} rows for: projectUid:{importedFile.ProjectUid} importedFileUid: {importedFile.ImportedFileUid}"); return(upsertedCount); } else { Log.LogDebug($"ProjectRepository/DeleteImportedFile: deleting importedFile {importedFile.ImportedFileUid}"); const string update = @"UPDATE ImportedFile SET IsDeleted = 1, LastActionedUTC = @LastActionedUtc WHERE ImportedFileUID = @ImportedFileUid"; upsertedCount = await ExecuteWithAsyncPolicy(update, importedFile); Log.LogDebug( $"ProjectRepository/DeleteImportedFile: upserted {upsertedCount} rows for: projectUid:{importedFile.ProjectUid} importedFileUid: {importedFile.ImportedFileUid}"); return(upsertedCount); } } } else { Log.LogDebug( $"ProjectRepository/DeleteImportedFile: can't delete as none existing, ignored. importedFile={importedFile.ImportedFileUid}. Can't create one as don't have enough info e.g.customerUID / type."); } return(upsertedCount); }
private async Task <int> UpdateImportedFile(ImportedFile importedFile, ImportedFile existing) { // The only thing which can be updated is a) the file content, and the LastActionedUtc. A file cannot be moved between projects/customers. // We don't store (a), and leave actionUTC as the more recent. var upsertedCount = 0; if (existing != null) { if (importedFile.LastActionedUtc >= existing.LastActionedUtc) { const string update = @"UPDATE ImportedFile SET FileDescriptor = @FileDescriptor, FileCreatedUTC = @FileCreatedUtc, FileUpdatedUTC = @FileUpdatedUtc, ImportedBy = @ImportedBy, SurveyedUTC = @SurveyedUtc, MinZoomLevel = @MinZoomLevel, MaxZoomLevel = @MaxZoomLevel, Offset = @Offset, LastActionedUTC = @LastActionedUtc WHERE ImportedFileUID = @ImportedFileUid"; upsertedCount = await ExecuteWithAsyncPolicy(update, importedFile); Log.LogDebug( $"ProjectRepository/UpdateImportedFile: updated {upsertedCount} rows for: projectUid:{importedFile.ProjectUid} importedFileUid: {importedFile.ImportedFileUid}"); // don't really care if this didn't pass as may already exist for create/update utc if (upsertedCount > 0) { await UpsertImportedFileHistory(importedFile); } } else { Log.LogDebug( $"ProjectRepository/UpdateImportedFile: old update event ignored importedFile {importedFile.ImportedFileUid}"); } } else { // can't create as don't know fk_ImportedFileTypeID, fk_DXFUnitsTypeID or customerUID Log.LogDebug( $"ProjectRepository/UpdateImportedFile: No ImportedFile exists {importedFile.ImportedFileUid}. Can't create one as don't have enough info e.g. customerUID/type"); } return(upsertedCount); }
public void SaveFile(ImportedFile file) { var filePath = Path.Combine(FolderName, file.OriginalFileName); if (!File.Exists(filePath)) { DownloadFile(filePath, file); } else { File.Delete(filePath); DownloadFile(filePath, file); } }
public Stream LoadFileFromFtp(ImportedFile file) { var ftpCred = file.FtpCredential; var request = (FtpWebRequest)WebRequest.Create(ftpCred.Url + file.OriginalFileName); request.Method = WebRequestMethods.Ftp.DownloadFile; request.Credentials = new NetworkCredential(ftpCred.UserName, ftpCred.UserPassword); var response = (FtpWebResponse)request.GetResponse(); var responseStream = response.GetResponseStream(); return(responseStream); }
public async Task <Stream> LoadFileFromFtpAsync(ImportedFile file) { var ftpCred = file.FtpCredential; var request = (FtpWebRequest)WebRequest.Create($"{ftpCred.Url}{file.OriginalFileName}"); request.Method = WebRequestMethods.Ftp.DownloadFile; request.Credentials = new NetworkCredential(ftpCred.UserName, ftpCred.UserPassword); var response = (FtpWebResponse)await request.GetResponseAsync(); var responseStream = response.GetResponseStream(); return(responseStream); }
public void SaveFile(ImportedFile file) { using (var responseStream = _ftpLoader.LoadFileFromFtp(file)) { CloudStorageAccount storageAccount = CloudStorageAccount.Parse(ConfigurationManager.AppSettings["AzureStorageConnectionString"]); CloudBlobClient blobServiceClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer containerClient = blobServiceClient.GetContainerReference(FolderName); containerClient.CreateIfNotExists(); var blockBlob = containerClient.GetBlockBlobReference(file.OriginalFileName); blockBlob.Properties.ContentType = "text/xml"; blockBlob.UploadFromStream(responseStream); } }
public static void MakeQuote(ImportedFile obj, MethodReturnEventArgs <PurchaseQuote> e) { var ctx = obj.Context; var dlg = _factory.CreateViewModel <DataObjectSelectionTaskViewModel.Factory>().Invoke(ctx, null, typeof(PurchaseQuote).GetObjectClass(_frozenCtx), null, (sel) => { if (sel != null) { var quote = (PurchaseQuote)sel.First().Object; quote.Document = obj.MakeFile(); quote.Document.AttachedTo.SetObject(quote); e.Result = quote; } }, null); _factory.ShowDialog(dlg); }
public static void MakeOtherExpense(ImportedFile obj, MethodReturnEventArgs <OtherExpenseReceipt> e) { var ctx = obj.Context; var dlg = _factory.CreateViewModel <DataObjectSelectionTaskViewModel.Factory>().Invoke(ctx, null, typeof(OtherExpenseReceipt).GetObjectClass(_frozenCtx), null, (sel) => { if (sel != null) { var receipt = (OtherExpenseReceipt)sel.First().Object; receipt.Document = obj.MakeReadonlyFile(); receipt.Document.AttachedTo.SetObject(receipt); e.Result = receipt; } }, null); _factory.ShowDialog(dlg); }
public static void AddToParty(ImportedFile obj, MethodReturnEventArgs <at.dasz.DocumentManagement.File> e) { var ctx = obj.Context; var dlg = _factory.CreateViewModel <DataObjectSelectionTaskViewModel.Factory>().Invoke(ctx, null, typeof(Party).GetObjectClass(_frozenCtx), null, (sel) => { if (sel != null) { var party = (Party)sel.First().Object; var file = obj.MakeFile(); file.AttachedTo.SetObject(party); party.Files.Add(file); e.Result = file; } }, null); _factory.ShowDialog(dlg); }
private async Task <int> UpsertImportedFileHistory(ImportedFile importedFile) { var insertedCount = 0; var importedFileHistoryExisting = (await QueryWithAsyncPolicy <ImportedFileHistoryItem> (@"SELECT fk_ImportedFileUID AS ImportedFileUid, FileCreatedUTC, FileUpdatedUTC, ImportedBy FROM ImportedFileHistory WHERE fk_ImportedFileUID = @ImportedFileUid", new { importedFile.ImportedFileUid } )).ToList(); bool alreadyExists = false; // comparing sql dateTimes to c# doesn't work if (importedFileHistoryExisting.Any()) { var newCreatedUtcRounded = RoundDateTimeToSeconds(importedFile.FileCreatedUtc); var newUpdatedUtcRounded = RoundDateTimeToSeconds(importedFile.FileUpdatedUtc); alreadyExists = importedFileHistoryExisting .Any(h => RoundDateTimeToSeconds(h.FileCreatedUtc) == newCreatedUtcRounded && RoundDateTimeToSeconds(h.FileUpdatedUtc) == newUpdatedUtcRounded); } if (!alreadyExists) { const string insert = @"INSERT ImportedFileHistory (fk_ImportedFileUID, FileCreatedUtc, FileUpdatedUtc, ImportedBy) VALUES (@ImportedFileUid, @FileCreatedUtc, @FileUpdatedUtc, @ImportedBy)"; insertedCount = await ExecuteWithAsyncPolicy(insert, importedFile); Log.LogDebug( $"ProjectRepository/UpsertImportedFileHistory: inserted {insertedCount} rows for: ImportedFileUid:{importedFile.ImportedFileUid} FileCreatedUTC: {importedFile.FileCreatedUtc} FileUpdatedUTC: {importedFile.FileUpdatedUtc}"); } else { Log.LogDebug( $"ProjectRepository/UpsertImportedFileHistory: History already exists ImportedFileUid:{importedFile.ImportedFileUid} FileCreatedUTC: {importedFile.FileCreatedUtc} FileUpdatedUTC: {importedFile.FileUpdatedUtc}"); } return(insertedCount); }
public async Task <ContractExecutionResult> DeleteImportedFileV6( [FromQuery] Guid projectUid, [FromQuery] Guid?importedFileUid, // for 3dpm imported files [FromServices] IPegasusClient pegasusClient, [FromServices] IWebClientWrapper webClient) { Logger.LogInformation($"{nameof(DeleteImportedFileV6)}: projectUid {projectUid} importedFileUid: {importedFileUid}"); await ValidateProjectId(projectUid.ToString()); var importedFiles = await ImportedFileRequestDatabaseHelper.GetImportedFiles(projectUid.ToString(), Logger, ProjectRepo).ConfigureAwait(false); ImportedFile existing = null; if (importedFiles.Count > 0) { existing = importedFiles.FirstOrDefault(f => f.ImportedFileUid == importedFileUid.ToString()); } if (existing == null) { ServiceExceptionHandler.ThrowServiceException(HttpStatusCode.BadRequest, 56); return(new ContractExecutionResult(ContractExecutionStatesEnum.InternalProcessingError, "shouldn't get here")); // to keep compiler happy } var deleteImportedFile = new DeleteImportedFile( projectUid, existing.ImportedFileType, JsonConvert.DeserializeObject <FileDescriptor>(existing.FileDescriptor), Guid.Parse(existing.ImportedFileUid), existing.ImportedFileId, existing.LegacyImportedFileId, DataOceanRootFolderId, existing.SurveyedUtc); var result = await WithServiceExceptionTryExecuteAsync(() => RequestExecutorContainerFactory .Build <DeleteImportedFileExecutor>( LoggerFactory, ConfigStore, ServiceExceptionHandler, CustomerUid, UserId, UserEmailAddress, customHeaders, persistantTransferProxyFactory : persistantTransferProxyFactory, filterServiceProxy : filterServiceProxy, tRexImportFileProxy : tRexImportFileProxy, projectRepo : ProjectRepo, dataOceanClient : DataOceanClient, authn : Authorization, pegasusClient : pegasusClient, cwsProjectClient : CwsProjectClient) .ProcessAsync(deleteImportedFile) ); await NotificationHubClient.Notify(new ProjectChangedNotification(projectUid)); Logger.LogInformation( $"{nameof(DeleteImportedFileV6)}: Completed successfully. projectUid {projectUid} importedFileUid: {importedFileUid}"); return(result); }
public void MapImportedFileRepoToResponse() { var request = new ImportedFile { ProjectUid = Guid.NewGuid().ToString(), ImportedFileUid = Guid.NewGuid().ToString(), CustomerUid = Guid.NewGuid().ToString(), ImportedFileType = ImportedFileType.Alignment, Name = "this is the filename.svl", FileDescriptor = JsonConvert.SerializeObject(FileDescriptor.CreateFileDescriptor(Guid.NewGuid().ToString(), "/customerUID/projectUID", "this is the filename.svl")), FileCreatedUtc = DateTime.UtcNow.AddDays(-2), FileUpdatedUtc = DateTime.UtcNow.AddDays(-1), ImportedBy = "*****@*****.**", SurveyedUtc = null, ParentUid = null, Offset = 0, IsDeleted = false, LastActionedUtc = DateTime.UtcNow }; var importedFileDescriptor = AutoMapperUtility.Automapper.Map <ImportedFileDescriptor>(request); Assert.Equal(request.ProjectUid, importedFileDescriptor.ProjectUid); Assert.Equal(request.ImportedFileUid, importedFileDescriptor.ImportedFileUid); Assert.Equal(request.CustomerUid, importedFileDescriptor.CustomerUid); Assert.Equal(request.ImportedFileType, importedFileDescriptor.ImportedFileType); Assert.Equal(request.Name, importedFileDescriptor.Name); Assert.Equal(request.FileCreatedUtc, importedFileDescriptor.FileCreatedUtc); Assert.Equal(request.FileUpdatedUtc, importedFileDescriptor.FileUpdatedUtc); Assert.Equal(request.ImportedBy, importedFileDescriptor.ImportedBy); Assert.Equal(request.SurveyedUtc, importedFileDescriptor.SurveyedUtc); Assert.Equal(request.ParentUid, importedFileDescriptor.ParentUid.HasValue ? "Fail assertion" : null); Assert.Equal(request.Offset, importedFileDescriptor.Offset); Assert.Equal(request.LastActionedUtc, importedFileDescriptor.ImportedUtc); Assert.True(importedFileDescriptor.IsActivated); // just make a copy file descriptor is only in the source file, not the destination var copyOfRequest = AutoMapperUtility.Automapper.Map <ImportedFile>(request); Assert.Equal(request.ProjectUid, copyOfRequest.ProjectUid); Assert.Equal(request.FileDescriptor, copyOfRequest.FileDescriptor); }
private async Task TryLoadFileAsync(ImportedFile file) { try { file.FileState = FileState.Loaded; file.FileStatus = FileStatus.Success; file.CopiedDate = DateTime.UtcNow; await _systemManager.SaveFileAsync(file); GlobalLogger.LogInfo($"File copied: {file.OriginalFileName} from ftp: [{file.FtpCredential.FtpName}].", GetType().Name, true); } catch (Exception ex) { file.FileStatus = FileStatus.Failed; GlobalLogger.LogError($"Error while copying file: {file.OriginalFileName} from ftp: [{file.FtpCredential.FtpName}].", ex, GetType().Name, true); } finally { _copyJobRepository.UpdateImportedFile(file); _copyJobRepository.SaveChanges(); } }
private async Task <int> UpsertImportedFile(ImportedFile importedFile, string eventType, bool isDeletePermanently = false) { var upsertedCount = 0; var existing = (await QueryWithAsyncPolicy <ImportedFile> (@"SELECT fk_ProjectUID as ProjectUID, ImportedFileUID, ImportedFileID, fk_CustomerUID as CustomerUID, fk_ImportedFileTypeID as ImportedFileType, Name, FileDescriptor, FileCreatedUTC, FileUpdatedUTC, ImportedBy, SurveyedUTC, fk_DXFUnitsTypeID as DxfUnitsType, MinZoomLevel, MaxZoomLevel, Offset, fk_ReferenceImportedFileUID as ParentUID, IsDeleted, LastActionedUTC FROM ImportedFile WHERE ImportedFileUID = @ImportedFileUid", new { importedFile.ImportedFileUid } )).FirstOrDefault(); if (eventType == "CreateImportedFileEvent") { upsertedCount = await CreateImportedFile(importedFile, existing); } if (eventType == "UpdateImportedFileEvent") { upsertedCount = await UpdateImportedFile(importedFile, existing); } if (eventType == "DeleteImportedFileEvent") { upsertedCount = await DeleteImportedFile(importedFile, existing, isDeletePermanently); } if (eventType == "UndeleteImportedFileEvent") { upsertedCount = await UndeleteImportedFile(importedFile, existing); } return(upsertedCount); }
public void MapImportedFileRepoToUpdateEvent() { var request = new ImportedFile { ProjectUid = Guid.NewGuid().ToString(), ImportedFileUid = Guid.NewGuid().ToString(), CustomerUid = Guid.NewGuid().ToString(), ImportedFileType = ImportedFileType.Alignment, Name = "this is the filename.svl", FileDescriptor = JsonConvert.SerializeObject(FileDescriptor.CreateFileDescriptor(Guid.NewGuid().ToString(), "/customerUID/projectUID", "this is the filename.svl")), FileCreatedUtc = DateTime.UtcNow.AddDays(-2), FileUpdatedUtc = DateTime.UtcNow.AddDays(-1), ImportedBy = "*****@*****.**", SurveyedUtc = null, ParentUid = null, Offset = 0, IsDeleted = false, LastActionedUtc = DateTime.UtcNow }; var updateImportedFileEvent = AutoMapperUtility.Automapper.Map <UpdateImportedFileEvent>(request); Assert.Equal(request.LastActionedUtc, updateImportedFileEvent.ActionUTC); Assert.Equal(request.FileCreatedUtc, updateImportedFileEvent.FileCreatedUtc); Assert.Equal(request.FileDescriptor, updateImportedFileEvent.FileDescriptor); Assert.Equal(request.FileUpdatedUtc, updateImportedFileEvent.FileUpdatedUtc); Assert.Equal(request.ImportedBy, updateImportedFileEvent.ImportedBy); Assert.Equal(request.ImportedFileUid, updateImportedFileEvent.ImportedFileUID.ToString()); Assert.Equal(request.ProjectUid, updateImportedFileEvent.ProjectUID.ToString()); Assert.Equal(request.SurveyedUtc, updateImportedFileEvent.SurveyedUtc); // just make a copy file descriptor is only in the source file, not the destination var copyOfRequest = AutoMapperUtility.Automapper.Map <ImportedFile>(request); Assert.Equal(request.ProjectUid, copyOfRequest.ProjectUid); Assert.Equal(request.FileDescriptor, copyOfRequest.FileDescriptor); }
private async Task <FileStatus> ProcessTradesAsReport(ImportedFile file, Stream stream) { if (stream == null || stream.Length == 0) { return(await Task.FromResult(FileStatus.Failed)); } var newTradeAsList = new List <TradeTradesAs>(); using (var reader = new StreamReader(stream)) { while (!reader.EndOfStream) { var row = await reader.ReadLineAsync(); if (!row.Trim().StartsWith("<AssetSummary ")) { continue; } try { var doc = XDocument.Load(new MemoryStream(Encoding.UTF8.GetBytes(row))); if (!doc.Elements().Any()) { continue; } var e = doc.Elements().First(); var tradeAccId = GetOrCreateTradeAccountId(new TradeAccount { AccountName = _fileNameMatcher.GetCorrectAccountId(e.Attribute(XName.Get("accountId"))?.Value), AccountAlias = e.Attribute(XName.Get("acctAlias"))?.Value, MasterAccountId = file.MasterAccountId, ImportedFile = file }); var newTradeAs = new TradeTradesAs { TradeAccountId = tradeAccId, ReportDate = file.FileCreateDate, Quantity = ParseDecimal(e.Attribute(XName.Get("quantity"))?.Value), AssetCategory = e.Attribute(XName.Get("assetCategory"))?.Value, ImportedFile = file }; newTradeAsList.Add(newTradeAs); } catch (Exception ex) { GlobalLogger.LogError($"Error while parsing TradeAs record. \nRow: {row}", ex, GetType().Name, true); } } } try { _importJobRepository.AddRangeTradeAs(newTradeAsList); _importJobRepository.SaveChanges(); } catch (Exception ex) { GlobalLogger.LogError($"Error while adding TradeAs records to DB.", ex, GetType().Name, true); return(await Task.FromResult(FileStatus.Failed)); } return(await Task.FromResult(FileStatus.Success)); }
private async Task <FileStatus> ProcessNavFileReport(ImportedFile file, Stream stream) { if (stream == null || stream.Length == 0) { return(await Task.FromResult(FileStatus.Failed)); } var newTradeNavList = new List <TradeNav>(); using (var reader = new StreamReader(stream)) { while (!reader.EndOfStream) { var row = await reader.ReadLineAsync(); if (!row.Trim().StartsWith("<EquitySummaryByReportDateInBase ")) { continue; } try { var doc = XDocument.Load(new MemoryStream(Encoding.UTF8.GetBytes(row))); if (!doc.Elements().Any()) { continue; } var e = doc.Elements().First(); var reportDate = DateHelper.ParseDate(e.Attribute(XName.Get("reportDate"))?.Value); if (reportDate == null) { continue; } var tradeAccId = GetOrCreateTradeAccountId(new TradeAccount { AccountName = _fileNameMatcher.GetCorrectAccountId(e.Attribute(XName.Get("accountId"))?.Value), AccountAlias = e.Attribute(XName.Get("acctAlias"))?.Value, MasterAccountId = file.MasterAccountId, ImportedFile = file }); var newTradeNav = new TradeNav { TradeAccountId = tradeAccId, ReportDate = reportDate.Value, Total = ParseDecimal(e.Attribute(XName.Get("total"))?.Value), Cash = ParseDecimal(e.Attribute(XName.Get("cash"))?.Value), Stock = ParseDecimal(e.Attribute(XName.Get("stock"))?.Value), Options = ParseDecimal(e.Attribute(XName.Get("options"))?.Value), Commodities = ParseDecimal(e.Attribute(XName.Get("commodities"))?.Value), InterestAccruals = ParseDecimal(e.Attribute(XName.Get("interestAccruals"))?.Value), TotalLong = ParseDecimal(e.Attribute(XName.Get("totalLong"))?.Value), CashLong = ParseDecimal(e.Attribute(XName.Get("cashLong"))?.Value), StockLong = ParseDecimal(e.Attribute(XName.Get("stockLong"))?.Value), OptionsLong = ParseDecimal(e.Attribute(XName.Get("optionsLong"))?.Value), CommoditiesLong = ParseDecimal(e.Attribute(XName.Get("commoditiesLong"))?.Value), InterestAccrualsLong = ParseDecimal(e.Attribute(XName.Get("interestAccrualsLong"))?.Value), TotalShort = ParseDecimal(e.Attribute(XName.Get("totalShort"))?.Value), CashShort = ParseDecimal(e.Attribute(XName.Get("cashShort"))?.Value), StockShort = ParseDecimal(e.Attribute(XName.Get("stockShort"))?.Value), OptionsShort = ParseDecimal(e.Attribute(XName.Get("optionsShort"))?.Value), CommoditiesShort = ParseDecimal(e.Attribute(XName.Get("commoditiesShort"))?.Value), InterestAccrualsShort = ParseDecimal(e.Attribute(XName.Get("interestAccrualsShort"))?.Value), ImportedFile = file }; newTradeNavList.Add(newTradeNav); } catch (Exception ex) { GlobalLogger.LogError($"Error while parsing TradeNav record. \nRow: {row}", ex, GetType().Name, true); } } } try { _importJobRepository.AddRangeTradeNav(newTradeNavList); _importJobRepository.SaveChanges(); } catch (Exception ex) { GlobalLogger.LogError($"Error while adding TradeNav records to DB.", ex, GetType().Name, true); return(await Task.FromResult(FileStatus.Failed)); } return(await Task.FromResult(FileStatus.Success)); }
private async Task <FileStatus> ProcessNavFileReport(ImportedFile file, Stream stream) { if (stream == null || stream.Length == 0) { return(await Task.FromResult(FileStatus.Failed)); } var newTradeNavList = new List <TradeNav>(); using (var reader = new StreamReader(stream)) { while (!reader.EndOfStream) { var row = await reader.ReadLineAsync(); if (!row.Trim().StartsWith("<EquitySummaryByReportDateInBase ")) { continue; } try { var doc = XDocument.Load(new MemoryStream(Encoding.UTF8.GetBytes(row))); if (!doc.Elements().Any()) { continue; } var e = doc.Elements().First(); var reportDate = DateHelper.ParseDate(e.Attribute(XName.Get("reportDate"))?.Value); if (reportDate == null) { continue; } var tradeAccId = GetOrCreateTradeAccountId(new TradeAccount { AccountName = _fileNameMatcher.GetCorrectAccountId(e.Attribute(XName.Get("accountId"))?.Value), AccountAlias = e.Attribute(XName.Get("acctAlias"))?.Value, MasterAccountId = file.MasterAccountId }); var newTradeNav = new TradeNav { TradeAccountId = tradeAccId, ReportDate = reportDate.Value, Total = ParseDecimal(e.Attribute(XName.Get("total"))?.Value) }; newTradeNavList.Add(newTradeNav); } catch (Exception ex) { Console.WriteLine($"Error while parsing TradeNav record. \nRow: {row} \nException: {ex}"); loggerException.Error(this.GetErrorLogMessage(ex)); } } } { var counter = 0; foreach (var tradeNav in newTradeNavList) { try { _importJobRepository.AddTradeNav(tradeNav); if (counter % 100 == 0) { Console.WriteLine($"Handled {counter} of {newTradeNavList.Count} trade nav records"); logger.Log(LogLevel.Info, $"import$ Handled {counter} of {newTradeNavList.Count} trade nav records"); } } catch (Exception ex) { Console.WriteLine($"Error while adding TradeNav record to DB. Exception: {ex}"); loggerException.Error(this.GetErrorLogMessage(ex)); } counter++; } } _importJobRepository.SaveChanges(); return(await Task.FromResult(FileStatus.Success)); }