public void DataOceanFileNameShouldContainAGuid(string fileName) { var fileUid = Guid.NewGuid(); var dataOceanFileName = DataOceanFileUtil.DataOceanFileName(fileName, false, fileUid, null); Assert.StartsWith(fileUid.ToString(), dataOceanFileName); }
private async Task JoinDataOceanTiles(FileData dxfFile, MasterDataModels.Point tileTopLeft, MasterDataModels.Point tileBottomRight, Image <Rgba32> tileBitmap, int zoomLevel) { var fileName = DataOceanFileUtil.DataOceanFileName(dxfFile.Name, dxfFile.ImportedFileType == ImportedFileType.SurveyedSurface || dxfFile.ImportedFileType == ImportedFileType.GeoTiff, Guid.Parse(dxfFile.ImportedFileUid), dxfFile.SurveyedUtc); fileName = DataOceanFileUtil.GeneratedFileName(fileName, dxfFile.ImportedFileType); var dataOceanFileUtil = new DataOceanFileUtil($"{DataOceanUtil.PathSeparator}{dataOceanRootFolder}{dxfFile.Path}{DataOceanUtil.PathSeparator}{fileName}"); log.LogDebug($"{nameof(JoinDataOceanTiles)}: fileName: {fileName} dataOceanFileUtil.FullFileName {dataOceanFileUtil.FullFileName}"); for (int yTile = (int)tileTopLeft.y; yTile <= (int)tileBottomRight.y; yTile++) { for (int xTile = (int)tileTopLeft.x; xTile <= (int)tileBottomRight.x; xTile++) { var targetFile = dataOceanFileUtil.GetTileFileName(zoomLevel, yTile, xTile); log.LogDebug($"JoinDxfTiles: getting tile {targetFile}"); var file = await dataOceanClient.GetFile(targetFile, authn.CustomHeaders()); if (file != null) { Image <Rgba32> tile = Image.Load <Rgba32>(file); Point offset = new Point( (xTile - (int)tileTopLeft.x) * MasterDataModels.WebMercatorProjection.TILE_SIZE, (yTile - (int)tileTopLeft.y) * MasterDataModels.WebMercatorProjection.TILE_SIZE); tileBitmap.Mutate(ctx => ctx.DrawImage(tile, PixelBlenderMode.Normal, 1f, offset)); } } } }
public void DataOceanFileNameForGeotiffShouldContainSurveyedUtc() { var fileUid = Guid.NewGuid(); var surveyedUtc = DateTime.UtcNow; var dataOceanFileName = DataOceanFileUtil.DataOceanFileName("some name.tif", true, fileUid, surveyedUtc); Assert.StartsWith(fileUid.ToString(), dataOceanFileName); var datePart = Path.GetFileNameWithoutExtension(dataOceanFileName).Substring(fileUid.ToString().Length); Assert.False(string.IsNullOrEmpty(datePart)); }
public void DataOceanReplaceTileNameWithGuid() { // for TileService: JoinDataOceanTiles() var rootFolder = "rootFolder"; var dxfFilePath = "/e72bd187-0679-11e4-a8c5-005056835dd5/14e5df3c-b090-4d50-878a-7dc7be49c7dc"; var dxfFileName = "The LineworkFileName IsHere–.dxf"; var dxfFileImportedFileUid = Guid.NewGuid().ToString(); var dxfFileImportedFileType = ImportedFileType.Linework; DateTime?dxfFileSurveyedUtc = null; var expectedDataOceanPathAndFileName = $"/{rootFolder}/e72bd187-0679-11e4-a8c5-005056835dd5/14e5df3c-b090-4d50-878a-7dc7be49c7dc/{dxfFileImportedFileUid}.dxf"; var fileName = DataOceanFileUtil.DataOceanFileName(dxfFileName, dxfFileImportedFileType == ImportedFileType.SurveyedSurface || dxfFileImportedFileType == ImportedFileType.GeoTiff, Guid.Parse(dxfFileImportedFileUid), dxfFileSurveyedUtc); fileName = DataOceanFileUtil.GeneratedFileName(fileName, dxfFileImportedFileType); var builtDataOceanUtil = new DataOceanFileUtil($"{DataOceanUtil.PathSeparator}{rootFolder}{dxfFilePath}{DataOceanUtil.PathSeparator}{fileName}"); Assert.Equal(expectedDataOceanPathAndFileName, builtDataOceanUtil.FullFileName); }
/// <summary> /// Pass Coordinate System to TRex and save a copy in DataOcean for DXF tile generation. /// </summary> private async Task SaveCoordinateSystem(Guid projectUid, string coordinateSystemFileName, byte[] coordinateSystemFileContent) { //Save to DataOcean for DXF tile generation var rootFolder = configStore.GetValueString("DATA_OCEAN_ROOT_FOLDER_ID"); if (string.IsNullOrEmpty(rootFolder)) { serviceExceptionHandler.ThrowServiceException(HttpStatusCode.InternalServerError, 115); } using (var ms = new MemoryStream(coordinateSystemFileContent)) { await DataOceanHelper.WriteFileToDataOcean( ms, rootFolder, customerUid, projectUid.ToString(), DataOceanFileUtil.DataOceanFileName(coordinateSystemFileName, false, projectUid, null), log, serviceExceptionHandler, dataOceanClient, authn, projectUid, configStore); } //Save in TRex CoordinateSystemSettingsResult coordinateSystemSettingsResult = await productivity3dV1ProxyCoord .CoordinateSystemPost(projectUid, coordinateSystemFileContent, coordinateSystemFileName, customHeaders); var message = string.Format($"Sending coordinate system to TRex returned code: {0} Message {1}.", coordinateSystemSettingsResult?.Code ?? -1, coordinateSystemSettingsResult?.Message ?? "coordinateSystemSettingsResult == null"); log.LogDebug(message); if (coordinateSystemSettingsResult == null || coordinateSystemSettingsResult.Code != 0 /* TASNodeErrorStatus.asneOK */) { log.LogCritical($"Failed to save coordinate system file in TRex for project {projectUid}"); } }
/// <summary> /// Deletes file via Trex /// </summary> protected override async Task <ContractExecutionResult> ProcessAsyncEx <T>(T item) { var deleteImportedFile = CastRequestObjectTo <DeleteImportedFile>(item, errorCode: 68); await CheckIfUsedInFilter(deleteImportedFile); await CheckIfHasReferenceSurfacesAsync(deleteImportedFile); // DB change must be made before productivity3dV2ProxyNotification.DeleteFile is called as it calls back here to get list of Active files //Don't think the above comment applies any more ! var deleteImportedFileEvent = await ImportedFileRequestDatabaseHelper.DeleteImportedFileInDb (deleteImportedFile.ProjectUid, deleteImportedFile.ImportedFileUid, serviceExceptionHandler, projectRepo); ImportedFileInternalResult importedFileInternalResult = null; if (deleteImportedFile.IsTRexDesignFileType) { //Now delete in TRex await ImportedFileRequestHelper.NotifyTRexDeleteFile(deleteImportedFile.ProjectUid, deleteImportedFile.ImportedFileType, deleteImportedFile.FileDescriptor.FileName, deleteImportedFile.ImportedFileUid, deleteImportedFile.SurveyedUtc, log, customHeaders, serviceExceptionHandler, tRexImportFileProxy); //and from s3 bucket ProjectRequestHelper.DeleteFileFromS3Repository( deleteImportedFile.ProjectUid.ToString(), deleteImportedFile.FileDescriptor.FileName, deleteImportedFile.ImportedFileType == ImportedFileType.SurveyedSurface, deleteImportedFile.SurveyedUtc, log, serviceExceptionHandler, persistantTransferProxyFactory.NewProxy(TransferProxyType.DesignImport)); } if (deleteImportedFile.ImportedFileType == ImportedFileType.Linework || deleteImportedFile.ImportedFileType == ImportedFileType.GeoTiff) { var dataOceanFileName = DataOceanFileUtil.DataOceanFileName(deleteImportedFile.FileDescriptor.FileName, deleteImportedFile.ImportedFileType == ImportedFileType.SurveyedSurface || deleteImportedFile.ImportedFileType == ImportedFileType.GeoTiff, deleteImportedFile.ImportedFileUid, deleteImportedFile.SurveyedUtc); importedFileInternalResult = await DataOceanHelper.DeleteFileFromDataOcean( dataOceanFileName, deleteImportedFile.DataOceanRootFolder, customerUid, deleteImportedFile.ProjectUid, deleteImportedFile.ImportedFileUid, log, dataOceanClient, authn, configStore); var tasks = new List <Task>(); //delete generated DXF tiles var dxfFileName = DataOceanFileUtil.GeneratedFileName(dataOceanFileName, deleteImportedFile.ImportedFileType); var dataOceanPath = DataOceanFileUtil.DataOceanPath(deleteImportedFile.DataOceanRootFolder, customerUid, deleteImportedFile.ProjectUid.ToString()); var fullFileName = $"{dataOceanPath}{Path.DirectorySeparatorChar}{dxfFileName}"; tasks.Add(pegasusClient.DeleteTiles(fullFileName, DataOceanHelper.CustomHeaders(authn))); await Task.WhenAll(tasks); } if (importedFileInternalResult != null) { await ImportedFileRequestDatabaseHelper.UndeleteImportedFile (deleteImportedFile.ProjectUid, deleteImportedFile.ImportedFileUid, serviceExceptionHandler, projectRepo); serviceExceptionHandler.ThrowServiceException(importedFileInternalResult.StatusCode, importedFileInternalResult.ErrorNumber, importedFileInternalResult.ResultCode, importedFileInternalResult.ErrorMessage1); } return(new ContractExecutionResult()); }
/// <summary> /// Adds file via Trex /// </summary> protected override async Task <ContractExecutionResult> ProcessAsyncEx <T>(T item) { var importedFile = CastRequestObjectTo <CreateImportedFile>(item, errorCode: 68); await ImportedFileRequestDatabaseHelper.CheckIfParentSurfaceExistsAsync(importedFile.ImportedFileType, importedFile.ParentUid, serviceExceptionHandler, projectRepo); // need to write to Db prior to // notifying TRex as Trex needs the ImportedFileUid var createImportedFileEvent = await ImportedFileRequestDatabaseHelper.CreateImportedFileinDb( Guid.Parse(customerUid), importedFile.ProjectUid, importedFile.ImportedFileType, importedFile.DxfUnitsType, importedFile.FileName, importedFile.SurveyedUtc, JsonConvert.SerializeObject(importedFile.FileDescriptor), importedFile.FileCreatedUtc, importedFile.FileUpdatedUtc, userEmailAddress, log, serviceExceptionHandler, projectRepo, importedFile.ParentUid, importedFile.Offset, importedFile.ImportedFileUid); if (importedFile.IsTRexDesignFileType) { await ImportedFileRequestHelper.NotifyTRexAddFile(importedFile.ProjectUid, importedFile.ImportedFileType, importedFile.FileName, createImportedFileEvent.ImportedFileUID, importedFile.SurveyedUtc, log, customHeaders, serviceExceptionHandler, tRexImportFileProxy, projectRepo); } if (importedFile.ImportedFileType == ImportedFileType.Linework || importedFile.ImportedFileType == ImportedFileType.GeoTiff) { var project = ProjectRequestHelper.GetProject(importedFile.ProjectUid, new Guid(customerUid), new Guid(userId), log, serviceExceptionHandler, cwsProjectClient, customHeaders); var existing = projectRepo.GetImportedFile(createImportedFileEvent.ImportedFileUID.ToString()); await Task.WhenAll(project, existing); var dcFileName = importedFile.ImportedFileType == ImportedFileType.GeoTiff ? null : DataOceanFileUtil.DataOceanFileName(project.Result.CoordinateSystemFileName, false, importedFile.ProjectUid, null); //Generate raster tiles var jobRequest = TileGenerationRequestHelper.CreateRequest( importedFile.ImportedFileType, customerUid, importedFile.ProjectUid.ToString(), existing.Result.ImportedFileUid, importedFile.DataOceanRootFolder, importedFile.DataOceanFileName, dcFileName, importedFile.DxfUnitsType, importedFile.SurveyedUtc); await schedulerProxy.ScheduleVSSJob(jobRequest, customHeaders); } var fileDescriptor = new ImportedFileDescriptorSingleResult( (await ImportedFileRequestDatabaseHelper .GetImportedFileList(importedFile.ProjectUid.ToString(), log, userId, projectRepo)) .ToImmutableList() .First(f => f.ImportedFileUid == createImportedFileEvent.ImportedFileUID.ToString()) ); log.LogInformation( $"CreateImportedFileV4. completed successfully. Response: {JsonConvert.SerializeObject(fileDescriptor)}"); return(fileDescriptor); }
/// <summary> /// Common file processing method used by all importedFile endpoints. /// </summary> protected async Task <ImportedFileDescriptorSingleResult> UpsertFileInternal( string filename, Stream fileStream, Guid projectUid, ImportedFileType importedFileType, DxfUnitsType dxfUnitsType, DateTime fileCreatedUtc, DateTime fileUpdatedUtc, DateTime?surveyedUtc, ISchedulerProxy schedulerProxy, Guid?parentUid = null, double?offset = null) { ImportedFileDescriptorSingleResult importedFile = null; var existing = await ImportedFileRequestDatabaseHelper .GetImportedFileForProject (projectUid.ToString(), filename, importedFileType, surveyedUtc, Logger, ProjectRepo, offset, parentUid) .ConfigureAwait(false); var creating = existing == null; Logger.LogInformation( creating ? $"{nameof(UpsertFileInternal)}. file doesn't exist already in DB: {filename} projectUid {projectUid} ImportedFileType: {importedFileType} surveyedUtc {(surveyedUtc == null ? "N/A" : surveyedUtc.ToString())} parentUid {parentUid} offset: {offset}" : $"{nameof(UpsertFileInternal)}. file exists already in DB. Will be updated: {JsonConvert.SerializeObject(existing)}"); FileDescriptor fileDescriptor = null; var importedFileUid = creating ? Guid.NewGuid() : Guid.Parse(existing.ImportedFileUid); var dataOceanFileName = DataOceanFileUtil.DataOceanFileName(filename, importedFileType == ImportedFileType.SurveyedSurface || importedFileType == ImportedFileType.GeoTiff, importedFileUid, surveyedUtc); if (importedFileType == ImportedFileType.ReferenceSurface) { //FileDescriptor not used for reference surface but validation requires values fileDescriptor = FileDescriptor.CreateFileDescriptor("Not applicable", "Not applicable", filename); } else { if (IsTRexDesignFileType(importedFileType)) { fileDescriptor = ProjectRequestHelper.WriteFileToS3Repository( fileStream, projectUid.ToString(), filename, importedFileType == ImportedFileType.SurveyedSurface, surveyedUtc, Logger, ServiceExceptionHandler, persistantTransferProxyFactory.NewProxy(TransferProxyType.DesignImport)); } //This is needed for ATs. fileDescriptor = FileDescriptor.CreateFileDescriptor( FileSpaceId, $"/{CustomerUid}/{projectUid}", filename); if (importedFileType == ImportedFileType.Linework || importedFileType == ImportedFileType.GeoTiff) { //save copy to DataOcean await DataOceanHelper.WriteFileToDataOcean( fileStream, DataOceanRootFolderId, CustomerUid, projectUid.ToString(), dataOceanFileName, Logger, ServiceExceptionHandler, DataOceanClient, Authorization, importedFileUid, ConfigStore); } } if (creating) { var createImportedFile = new CreateImportedFile( projectUid, filename, fileDescriptor, importedFileType, surveyedUtc, dxfUnitsType, fileCreatedUtc, fileUpdatedUtc, DataOceanRootFolderId, parentUid, offset, importedFileUid, dataOceanFileName); importedFile = await WithServiceExceptionTryExecuteAsync(() => RequestExecutorContainerFactory .Build <CreateImportedFileExecutor>( LoggerFactory, ConfigStore, ServiceExceptionHandler, CustomerUid, UserId, UserEmailAddress, customHeaders, productivity3dV2ProxyCompaction : Productivity3dV2ProxyCompaction, persistantTransferProxyFactory : persistantTransferProxyFactory, tRexImportFileProxy : tRexImportFileProxy, projectRepo : ProjectRepo, dataOceanClient : DataOceanClient, authn : Authorization, schedulerProxy : schedulerProxy, cwsProjectClient : CwsProjectClient) .ProcessAsync(createImportedFile) ) as ImportedFileDescriptorSingleResult; Logger.LogInformation( $"{nameof(UpsertFileInternal)}: Create completed successfully. Response: {JsonConvert.SerializeObject(importedFile)}"); } else { // this also validates that this customer has access to the projectUid var project = await ProjectRequestHelper.GetProject(projectUid, new Guid(CustomerUid), new Guid(UserId), Logger, ServiceExceptionHandler, CwsProjectClient, customHeaders); var importedFileUpsertEvent = new UpdateImportedFile( projectUid, project.ShortRaptorProjectId, importedFileType, (importedFileType == ImportedFileType.SurveyedSurface || importedFileType == ImportedFileType.GeoTiff) ? surveyedUtc : null, dxfUnitsType, fileCreatedUtc, fileUpdatedUtc, fileDescriptor, Guid.Parse(existing?.ImportedFileUid), existing.ImportedFileId, DataOceanRootFolderId, offset, dataOceanFileName); importedFile = await WithServiceExceptionTryExecuteAsync(() => RequestExecutorContainerFactory .Build <UpdateImportedFileExecutor>( LoggerFactory, ConfigStore, ServiceExceptionHandler, CustomerUid, UserId, UserEmailAddress, customHeaders, productivity3dV2ProxyCompaction : Productivity3dV2ProxyCompaction, tRexImportFileProxy : tRexImportFileProxy, projectRepo : ProjectRepo, dataOceanClient : DataOceanClient, authn : Authorization, schedulerProxy : schedulerProxy, cwsProjectClient : CwsProjectClient) .ProcessAsync(importedFileUpsertEvent) ) as ImportedFileDescriptorSingleResult; Logger.LogInformation( $"{nameof(UpsertFileInternal)}: Update completed successfully. Response: {JsonConvert.SerializeObject(importedFile)}"); } await NotificationHubClient.Notify(new ProjectChangedNotification(projectUid)); return(importedFile); }
protected override async Task <ContractExecutionResult> ProcessAsyncEx <T>(T item) { List <FileData> files = null; int zoomLevel = 0; Point topLeftTile = null; int numTiles = 0; BoundingBox2DLatLon bbox = null; if (item is DxfTileRequest request) { files = request.files?.ToList(); bbox = request.bbox; //Calculate zoom level zoomLevel = TileServiceUtils.CalculateZoomLevel(request.bbox.TopRightLat - request.bbox.BottomLeftLat, request.bbox.TopRightLon - request.bbox.BottomLeftLon); log.LogDebug("DxfTileExecutor: BBOX differences {0} {1} {2}", request.bbox.TopRightLat - request.bbox.BottomLeftLat, request.bbox.TopRightLon - request.bbox.BottomLeftLon, zoomLevel); numTiles = TileServiceUtils.NumberOfTiles(zoomLevel); Point topLeftLatLng = new Point(request.bbox.TopRightLat.LatRadiansToDegrees(), request.bbox.BottomLeftLon.LonRadiansToDegrees()); topLeftTile = WebMercatorProjection.LatLngToTile(topLeftLatLng, numTiles); log.LogDebug($"DxfTileExecutor: zoomLevel={zoomLevel}, numTiles={numTiles}, xtile={topLeftTile.x}, ytile={topLeftTile.y}"); } else if (item is DxfTile3dRequest request3d) { files = request3d.files?.ToList(); zoomLevel = request3d.zoomLevel; numTiles = TileServiceUtils.NumberOfTiles(zoomLevel); topLeftTile = new Point { x = request3d.xTile, y = request3d.yTile }; } else { ThrowRequestTypeCastException <DxfTileRequest>(); } log.LogDebug($"DxfTileExecutor: {files?.Count ?? 0} files"); //Short circuit overlaying if there no files to overlay as ForAll is an expensive operation if (files == null || !files.Any()) { byte[] emptyOverlayData; using (var bitmap = new Image <Rgba32>(WebMercatorProjection.TILE_SIZE, WebMercatorProjection.TILE_SIZE)) { emptyOverlayData = bitmap.BitmapToByteArray(); } return(new TileResult(emptyOverlayData)); } log.LogDebug(string.Join(",", files.Select(f => f.Name).ToList())); var tileList = new List <byte[]>(); const string DATA_OCEAN_ROOT_FOLDER_ID_KEY = "DATA_OCEAN_ROOT_FOLDER_ID"; var dataOceanRootFolder = configStore.GetValueString(DATA_OCEAN_ROOT_FOLDER_ID_KEY); if (string.IsNullOrEmpty(dataOceanRootFolder)) { throw new ArgumentException($"Missing environment variable {DATA_OCEAN_ROOT_FOLDER_ID_KEY}"); } //For GeoTIFF files, use the latest version of a file var geoTiffFiles = files.Where(x => x.ImportedFileType == ImportedFileType.GeoTiff).ToList(); if (geoTiffFiles.Any()) { //Find any with multiple versions and remove old ones from the list var latestFiles = geoTiffFiles.GroupBy(g => g.Name).Select(g => g.OrderBy(o => o.SurveyedUtc).Last()).ToList(); foreach (var geoTiffFile in geoTiffFiles) { if (!latestFiles.Contains(geoTiffFile)) { files.Remove(geoTiffFile); } } } var fileTasks = files.Select(async file => { //foreach (var file in request.files) //Check file type to see if it has tiles if (file.ImportedFileType == ImportedFileType.Linework || file.ImportedFileType == ImportedFileType.GeoTiff) { var fullPath = DataOceanFileUtil.DataOceanPath(dataOceanRootFolder, file.CustomerUid, file.ProjectUid); var fileName = DataOceanFileUtil.DataOceanFileName(file.Name, file.ImportedFileType == ImportedFileType.SurveyedSurface || file.ImportedFileType == ImportedFileType.GeoTiff, Guid.Parse(file.ImportedFileUid), file.SurveyedUtc); fileName = DataOceanFileUtil.GeneratedFileName(fileName, file.ImportedFileType); if (zoomLevel >= file.MinZoomLevel) { byte[] tileData = null; if (zoomLevel <= file.MaxZoomLevel || file.MaxZoomLevel == 0) //0 means not calculated { tileData = await GetTileAtRequestedZoom(topLeftTile, zoomLevel, fullPath, fileName); } else if (zoomLevel - file.MaxZoomLevel <= 5) //Don't try to scale if the difference is too excessive { tileData = await GetTileAtHigherZoom(topLeftTile, zoomLevel, fullPath, fileName, file.MaxZoomLevel, numTiles); } else { log.LogDebug( "DxfTileExecutor: difference between requested and maximum zooms too large; not even going to try to scale tile"); } if (tileData != null && tileData.Length > 0) { tileList.Add(tileData); } } } }); await Task.WhenAll(fileTasks); log.LogDebug($"DxfTileExecutor: Overlaying {tileList.Count} tiles"); byte[] overlayData = TileOverlay.OverlayTiles(tileList); return(new TileResult(overlayData)); }
/// <summary> /// Processes the Upsert /// </summary> protected override async Task <ContractExecutionResult> ProcessAsyncEx <T>(T item) { var importedFile = CastRequestObjectTo <UpdateImportedFile>(item, errorCode: 68); var existingImportedFile = await projectRepo.GetImportedFile(importedFile.ImportedFileUid.ToString()); if (existingImportedFile == null) { throw new ServiceException(HttpStatusCode.BadRequest, new ContractExecutionResult(ProjectErrorCodesProvider.GetErrorNumberwithOffset(122), ProjectErrorCodesProvider.FirstNameWithOffset(122))); } if (importedFile.IsTRexDesignFileType) { await ImportedFileRequestHelper.NotifyTRexUpdateFile(importedFile.ProjectUid, importedFile.ImportedFileType, importedFile.FileDescriptor.FileName, importedFile.ImportedFileUid, importedFile.SurveyedUtc, log, customHeaders, serviceExceptionHandler, tRexImportFileProxy); } if (importedFile.ImportedFileType == ImportedFileType.Linework || importedFile.ImportedFileType == ImportedFileType.GeoTiff) { string dcFileName = null; if (importedFile.ImportedFileType == ImportedFileType.Linework) { var project = await ProjectRequestHelper.GetProject(importedFile.ProjectUid, new Guid(customerUid), new Guid(userId), log, serviceExceptionHandler, cwsProjectClient, customHeaders); dcFileName = DataOceanFileUtil.DataOceanFileName(project.CoordinateSystemFileName, false, importedFile.ProjectUid, null); } var jobRequest = TileGenerationRequestHelper.CreateRequest( importedFile.ImportedFileType, customerUid, importedFile.ProjectUid.ToString(), existingImportedFile.ImportedFileUid, importedFile.DataOceanRootFolder, importedFile.DataOceanFileName, dcFileName, importedFile.DxfUnitsTypeId, importedFile.SurveyedUtc); await schedulerProxy.ScheduleVSSJob(jobRequest, customHeaders); } // if all succeeds, update Db and put update to kafka que var updateImportedFileEvent = await ImportedFileRequestDatabaseHelper.UpdateImportedFileInDb(existingImportedFile, existingImportedFile.FileDescriptor, importedFile.SurveyedUtc, existingImportedFile.MinZoomLevel, existingImportedFile.MaxZoomLevel, importedFile.FileCreatedUtc, importedFile.FileUpdatedUtc, userEmailAddress, log, serviceExceptionHandler, projectRepo); var fileDescriptor = new ImportedFileDescriptorSingleResult( (await ImportedFileRequestDatabaseHelper.GetImportedFileList(importedFile.ProjectUid.ToString(), log, userId, projectRepo)) .ToImmutableList() .FirstOrDefault(f => f.ImportedFileUid == importedFile.ImportedFileUid.ToString()) ); log.LogInformation( $"UpdateImportedFileExecutor. entry {(fileDescriptor.ImportedFileDescriptor == null ? "not " : "")}retrieved from DB : {JsonConvert.SerializeObject(fileDescriptor)}"); return(fileDescriptor); }