private async Task JoinDataOceanTiles(FileData dxfFile, MasterDataModels.Point tileTopLeft, MasterDataModels.Point tileBottomRight, Image <Rgba32> tileBitmap, int zoomLevel) { var fileName = DataOceanFileUtil.DataOceanFileName(dxfFile.Name, dxfFile.ImportedFileType == ImportedFileType.SurveyedSurface || dxfFile.ImportedFileType == ImportedFileType.GeoTiff, Guid.Parse(dxfFile.ImportedFileUid), dxfFile.SurveyedUtc); fileName = DataOceanFileUtil.GeneratedFileName(fileName, dxfFile.ImportedFileType); var dataOceanFileUtil = new DataOceanFileUtil($"{DataOceanUtil.PathSeparator}{dataOceanRootFolder}{dxfFile.Path}{DataOceanUtil.PathSeparator}{fileName}"); log.LogDebug($"{nameof(JoinDataOceanTiles)}: fileName: {fileName} dataOceanFileUtil.FullFileName {dataOceanFileUtil.FullFileName}"); for (int yTile = (int)tileTopLeft.y; yTile <= (int)tileBottomRight.y; yTile++) { for (int xTile = (int)tileTopLeft.x; xTile <= (int)tileBottomRight.x; xTile++) { var targetFile = dataOceanFileUtil.GetTileFileName(zoomLevel, yTile, xTile); log.LogDebug($"JoinDxfTiles: getting tile {targetFile}"); var file = await dataOceanClient.GetFile(targetFile, authn.CustomHeaders()); if (file != null) { Image <Rgba32> tile = Image.Load <Rgba32>(file); Point offset = new Point( (xTile - (int)tileTopLeft.x) * MasterDataModels.WebMercatorProjection.TILE_SIZE, (yTile - (int)tileTopLeft.y) * MasterDataModels.WebMercatorProjection.TILE_SIZE); tileBitmap.Mutate(ctx => ctx.DrawImage(tile, PixelBlenderMode.Normal, 1f, offset)); } } } }
public async Task CanGenerateGeoTiffTilesFailToCreateExecution() { //Set up DataOcean stuff var expectedTopFolderResult = new DataOceanDirectory { Id = Guid.NewGuid(), Name = topLevelFolderName }; var expectedFileResult = new DataOceanFile { Id = Guid.NewGuid(), Name = geoTiffFileName, ParentId = expectedTopFolderResult.Id }; var subFolderPath = new DataOceanFileUtil(geoTiffFullName).GeneratedTilesFolder; var dataOceanMock = new Mock <IDataOceanClient>(); dataOceanMock.Setup(d => d.GetFileId(geoTiffFullName, null)).ReturnsAsync(expectedFileResult.Id); dataOceanMock.Setup(d => d.MakeFolder(subFolderPath, null)).ReturnsAsync(true); dataOceanMock.Setup(d => d.GetFolderId($"{DataOceanUtil.PathSeparator}{topLevelFolderName}", null)).ReturnsAsync(expectedTopFolderResult.Id); //Set up Pegasus stuff var config = serviceProvider.GetRequiredService <Common.Abstractions.Configuration.IConfigurationStore>(); var pegasusBaseUrl = config.GetValueString("PEGASUS_URL"); var baseRoute = "/api/executions"; var createExecutionUrl = $"{pegasusBaseUrl}{baseRoute}"; var gracefulMock = new Mock <IWebRequest>(); gracefulMock .Setup(g => g.ExecuteRequest <PegasusExecutionResult>(createExecutionUrl, It.IsAny <MemoryStream>(), null, HttpMethod.Post, null, 0, false)).ReturnsAsync((PegasusExecutionResult)null); await ProcessWithFailure(gracefulMock, dataOceanMock, $"Failed to create execution for {geoTiffFullName}", false); }
public void DataOceanFileNameShouldContainAGuid(string fileName) { var fileUid = Guid.NewGuid(); var dataOceanFileName = DataOceanFileUtil.DataOceanFileName(fileName, false, fileUid, null); Assert.StartsWith(fileUid.ToString(), dataOceanFileName); }
private Task <TileMetadata> CanGenerateDxfTiles(string status) { //Set up DataOcean stuff var expectedTopFolderResult = new DataOceanDirectory { Id = Guid.NewGuid(), Name = topLevelFolderName }; var expectedDcFileResult = new DataOceanFile { Id = Guid.NewGuid(), Name = dcFileName, ParentId = expectedTopFolderResult.Id }; var expectedDxfFileResult = new DataOceanFile { Id = Guid.NewGuid(), Name = dxfFileName, ParentId = expectedTopFolderResult.Id }; var subFolderPath = new DataOceanFileUtil(dxfFullName).GeneratedTilesFolder; var parts = subFolderPath.Split(DataOceanUtil.PathSeparator); var subFolderName = parts[parts.Length - 1]; var dataOceanMock = new Mock <IDataOceanClient>(); dataOceanMock.Setup(d => d.GetFileId(dcFullName, null)).ReturnsAsync(expectedDcFileResult.Id); dataOceanMock.Setup(d => d.GetFileId(dxfFullName, null)).ReturnsAsync(expectedDxfFileResult.Id); dataOceanMock.Setup(d => d.MakeFolder(subFolderPath, null)).ReturnsAsync(true); dataOceanMock.Setup(d => d.GetFolderId($"{DataOceanUtil.PathSeparator}{topLevelFolderName}", null)).ReturnsAsync(expectedTopFolderResult.Id); //Set up Pegasus stuff var units = DxfUnitsType.UsSurveyFeet.ToString(); var expectedExecution = NewDxfPegasusExecution(expectedDcFileResult, expectedDxfFileResult, subFolderName, units, status); var expectedExecutionResult = new PegasusExecutionResult { Execution = expectedExecution }; var expectedExecutionAttemptResult = new PegasusExecutionAttemptResult { ExecutionAttempt = new PegasusExecutionAttempt { Id = Guid.NewGuid(), Status = ExecutionStatus.EXECUTING } }; var config = serviceProvider.GetRequiredService <Common.Abstractions.Configuration.IConfigurationStore>(); var pegasusBaseUrl = config.GetValueString("PEGASUS_URL"); var baseRoute = "/api/executions"; var createExecutionUrl = $"{pegasusBaseUrl}{baseRoute}"; var startExecutionUrl = $"{pegasusBaseUrl}{baseRoute}/{expectedExecution.Id}/start"; var executionStatusUrl = $"{pegasusBaseUrl}{baseRoute}/{expectedExecution.Id}"; var gracefulMock = new Mock <IWebRequest>(); gracefulMock .Setup(g => g.ExecuteRequest <PegasusExecutionResult>(createExecutionUrl, It.IsAny <MemoryStream>(), null, HttpMethod.Post, null, 0, false)).ReturnsAsync(expectedExecutionResult); gracefulMock .Setup(g => g.ExecuteRequest <PegasusExecutionAttemptResult>(startExecutionUrl, null, null, HttpMethod.Post, null, 0, false)).ReturnsAsync(expectedExecutionAttemptResult); gracefulMock .Setup(g => g.ExecuteRequest <PegasusExecutionResult>(executionStatusUrl, null, null, HttpMethod.Get, null, 0, false)).ReturnsAsync(expectedExecutionResult); return(ProcessWithSuccess(gracefulMock, dataOceanMock, subFolderPath, true)); }
/// <summary> /// Deletes generated tiles for the given file /// </summary> /// <returns>True if successfully deleted otherwise false</returns> public Task <bool> DeleteTiles(string fullFileName, IHeaderDictionary customHeaders) { //In DataOcean this is actually a multi-file not a folder var tileFullFileName = new DataOceanFileUtil(fullFileName).GeneratedTilesFolder; //To avoid 2 traversals just try the delete anyway without checking for existance. return(dataOceanClient.DeleteFile(tileFullFileName, customHeaders)); }
public void DataOceanPath_is_constructed_correctly() { const string rootFolder = "rootFolder"; var customerUid = Guid.NewGuid().ToString(); var projectUid = Guid.NewGuid().ToString(); var strResult = DataOceanFileUtil.DataOceanPath(rootFolder, customerUid, projectUid); Assert.Equal($"{DataOceanUtil.PathSeparator}{rootFolder}{DataOceanUtil.PathSeparator}{customerUid}{DataOceanUtil.PathSeparator}{projectUid}", strResult); }
public void DataOceanFileNameForGeotiffShouldContainSurveyedUtc() { var fileUid = Guid.NewGuid(); var surveyedUtc = DateTime.UtcNow; var dataOceanFileName = DataOceanFileUtil.DataOceanFileName("some name.tif", true, fileUid, surveyedUtc); Assert.StartsWith(fileUid.ToString(), dataOceanFileName); var datePart = Path.GetFileNameWithoutExtension(dataOceanFileName).Substring(fileUid.ToString().Length); Assert.False(string.IsNullOrEmpty(datePart)); }
public void CanGetTileMetadataFileName(string extension) { var pathAndName = $"{DataOceanUtil.PathSeparator}dev{DataOceanUtil.PathSeparator}folder-one{DataOceanUtil.PathSeparator}folder-two{DataOceanUtil.PathSeparator}dummy"; var fullFileName = $"{pathAndName}.{extension}"; var file = new DataOceanFileUtil(fullFileName); var metadataName = file.TilesMetadataFileName; var expectedName = "dxf".Equals(extension, StringComparison.OrdinalIgnoreCase) ? "tiles" : "xyz"; var expectedMetadata = $"{pathAndName}{DataOceanFileUtil.GENERATED_TILE_FOLDER_SUFFIX}/tiles/{expectedName}.json"; Assert.Equal(expectedMetadata, metadataName); }
/// <summary> /// Writes the importedFile to DataOcean as a create or update, /// if it already exists, old version will be deleted first. /// </summary> public static async Task WriteFileToDataOcean( Stream fileContents, string rootFolder, string customerUid, string projectUid, string dataOceanFileName, ILogger log, IServiceExceptionHandler serviceExceptionHandler, IDataOceanClient dataOceanClient, ITPaaSApplicationAuthentication authn, Guid fileUid, IConfigurationStore configStore) { var dataOceanEnabled = configStore.GetValueBool("ENABLE_DATA_OCEAN", false); if (dataOceanEnabled) { if (!dataOceanFileName.StartsWith(fileUid.ToString())) { throw new ServiceException(HttpStatusCode.InternalServerError, new ContractExecutionResult(ContractExecutionStatesEnum.InternalProcessingError, $"Invalid DataOcean file name {dataOceanFileName}")); } var customHeaders = authn.CustomHeaders(); var dataOceanPath = DataOceanFileUtil.DataOceanPath(rootFolder, customerUid, projectUid); var ccPutFileResult = false; var folderAlreadyExists = false; try { log.LogInformation($"{nameof(WriteFileToDataOcean)}: dataOceanPath: '{dataOceanPath}', dataOceanFileName: '{dataOceanFileName}'"); folderAlreadyExists = await dataOceanClient.FolderExists(dataOceanPath, customHeaders); if (!folderAlreadyExists) { await dataOceanClient.MakeFolder(dataOceanPath, customHeaders); } ccPutFileResult = await dataOceanClient.PutFile(dataOceanPath, dataOceanFileName, fileContents, customHeaders); } catch (Exception e) { serviceExceptionHandler.ThrowServiceException(HttpStatusCode.InternalServerError, 57, "dataOceanClient.PutFile", e.Message); } if (!ccPutFileResult) { serviceExceptionHandler.ThrowServiceException(HttpStatusCode.InternalServerError, 116); } log.LogInformation($"{nameof(WriteFileToDataOcean)}: dataOceanFileName '{dataOceanFileName}' written to DataOcean, folderAlreadyExists: {folderAlreadyExists}"); } else { log.LogInformation($"{nameof(WriteFileToDataOcean)}: File not saved. DataOcean disabled"); } }
public void CanDeleteTiles(string fileName, bool success) { var fullName = $"{DataOceanUtil.PathSeparator}{topLevelFolderName}{DataOceanUtil.PathSeparator}{fileName}"; var gracefulMock = new Mock <IWebRequest>(); var dataOceanMock = new Mock <IDataOceanClient>(); var tileFolderFullName = new DataOceanFileUtil(fullName).GeneratedTilesFolder; dataOceanMock.Setup(d => d.DeleteFile(tileFolderFullName, null)).ReturnsAsync(success); serviceCollection.AddTransient(g => gracefulMock.Object); serviceCollection.AddTransient(g => dataOceanMock.Object); var serviceProvider2 = serviceCollection.BuildServiceProvider(); var client = serviceProvider2.GetRequiredService <IPegasusClient>(); var result = client.DeleteTiles(fullName, null).Result; Assert.Equal(success, result); }
/// <summary> /// Deletes the importedFile from DataOcean /// </summary> public static async Task <ImportedFileInternalResult> DeleteFileFromDataOcean( string fileName, string rootFolder, string customerUid, Guid projectUid, Guid importedFileUid, ILogger log, IDataOceanClient dataOceanClient, ITPaaSApplicationAuthentication authn, IConfigurationStore configStore) { var dataOceanEnabled = configStore.GetValueBool("ENABLE_DATA_OCEAN", false); if (dataOceanEnabled) { var dataOceanPath = DataOceanFileUtil.DataOceanPath(rootFolder, customerUid, projectUid.ToString()); var fullFileName = $"{dataOceanPath}{Path.DirectorySeparatorChar}{fileName}"; log.LogInformation($"{nameof(DeleteFileFromDataOcean)}: fullFileName {JsonConvert.SerializeObject(fullFileName)}"); var customHeaders = authn.CustomHeaders(); bool ccDeleteFileResult; try { ccDeleteFileResult = await dataOceanClient.DeleteFile(fullFileName, customHeaders); } catch (Exception e) { log.LogError(e, $"{nameof(DeleteFileFromDataOcean)}: failed for {fileName} (importedFileUid:{importedFileUid}) with exception {e.Message}"); return(ImportedFileInternalResult.CreateImportedFileInternalResult(HttpStatusCode.InternalServerError, 57, "dataOceanClient.DeleteFile", e.Message)); } if (!ccDeleteFileResult) { log.LogWarning( $"{nameof(DeleteFileFromDataOcean)}: failed to delete {fileName} (importedFileUid:{importedFileUid})."); //Not an error if it doesn't delete the file? //return ImportedFileInternalResult.CreateImportedFileInternalResult(HttpStatusCode.InternalServerError, 117); } } else { log.LogInformation($"{nameof(DeleteFileFromDataOcean)}: File not deleted. DataOcean disabled"); } return(null); }
public void DataOceanReplaceTileNameWithGuid() { // for TileService: JoinDataOceanTiles() var rootFolder = "rootFolder"; var dxfFilePath = "/e72bd187-0679-11e4-a8c5-005056835dd5/14e5df3c-b090-4d50-878a-7dc7be49c7dc"; var dxfFileName = "The LineworkFileName IsHere–.dxf"; var dxfFileImportedFileUid = Guid.NewGuid().ToString(); var dxfFileImportedFileType = ImportedFileType.Linework; DateTime?dxfFileSurveyedUtc = null; var expectedDataOceanPathAndFileName = $"/{rootFolder}/e72bd187-0679-11e4-a8c5-005056835dd5/14e5df3c-b090-4d50-878a-7dc7be49c7dc/{dxfFileImportedFileUid}.dxf"; var fileName = DataOceanFileUtil.DataOceanFileName(dxfFileName, dxfFileImportedFileType == ImportedFileType.SurveyedSurface || dxfFileImportedFileType == ImportedFileType.GeoTiff, Guid.Parse(dxfFileImportedFileUid), dxfFileSurveyedUtc); fileName = DataOceanFileUtil.GeneratedFileName(fileName, dxfFileImportedFileType); var builtDataOceanUtil = new DataOceanFileUtil($"{DataOceanUtil.PathSeparator}{rootFolder}{dxfFilePath}{DataOceanUtil.PathSeparator}{fileName}"); Assert.Equal(expectedDataOceanPathAndFileName, builtDataOceanUtil.FullFileName); }
/// <summary> /// Pass Coordinate System to TRex and save a copy in DataOcean for DXF tile generation. /// </summary> private async Task SaveCoordinateSystem(Guid projectUid, string coordinateSystemFileName, byte[] coordinateSystemFileContent) { //Save to DataOcean for DXF tile generation var rootFolder = configStore.GetValueString("DATA_OCEAN_ROOT_FOLDER_ID"); if (string.IsNullOrEmpty(rootFolder)) { serviceExceptionHandler.ThrowServiceException(HttpStatusCode.InternalServerError, 115); } using (var ms = new MemoryStream(coordinateSystemFileContent)) { await DataOceanHelper.WriteFileToDataOcean( ms, rootFolder, customerUid, projectUid.ToString(), DataOceanFileUtil.DataOceanFileName(coordinateSystemFileName, false, projectUid, null), log, serviceExceptionHandler, dataOceanClient, authn, projectUid, configStore); } //Save in TRex CoordinateSystemSettingsResult coordinateSystemSettingsResult = await productivity3dV1ProxyCoord .CoordinateSystemPost(projectUid, coordinateSystemFileContent, coordinateSystemFileName, customHeaders); var message = string.Format($"Sending coordinate system to TRex returned code: {0} Message {1}.", coordinateSystemSettingsResult?.Code ?? -1, coordinateSystemSettingsResult?.Message ?? "coordinateSystemSettingsResult == null"); log.LogDebug(message); if (coordinateSystemSettingsResult == null || coordinateSystemSettingsResult.Code != 0 /* TASNodeErrorStatus.asneOK */) { log.LogCritical($"Failed to save coordinate system file in TRex for project {projectUid}"); } }
/// <summary> /// Adds file via Trex /// </summary> protected override async Task <ContractExecutionResult> ProcessAsyncEx <T>(T item) { var importedFile = CastRequestObjectTo <CreateImportedFile>(item, errorCode: 68); await ImportedFileRequestDatabaseHelper.CheckIfParentSurfaceExistsAsync(importedFile.ImportedFileType, importedFile.ParentUid, serviceExceptionHandler, projectRepo); // need to write to Db prior to // notifying TRex as Trex needs the ImportedFileUid var createImportedFileEvent = await ImportedFileRequestDatabaseHelper.CreateImportedFileinDb( Guid.Parse(customerUid), importedFile.ProjectUid, importedFile.ImportedFileType, importedFile.DxfUnitsType, importedFile.FileName, importedFile.SurveyedUtc, JsonConvert.SerializeObject(importedFile.FileDescriptor), importedFile.FileCreatedUtc, importedFile.FileUpdatedUtc, userEmailAddress, log, serviceExceptionHandler, projectRepo, importedFile.ParentUid, importedFile.Offset, importedFile.ImportedFileUid); if (importedFile.IsTRexDesignFileType) { await ImportedFileRequestHelper.NotifyTRexAddFile(importedFile.ProjectUid, importedFile.ImportedFileType, importedFile.FileName, createImportedFileEvent.ImportedFileUID, importedFile.SurveyedUtc, log, customHeaders, serviceExceptionHandler, tRexImportFileProxy, projectRepo); } if (importedFile.ImportedFileType == ImportedFileType.Linework || importedFile.ImportedFileType == ImportedFileType.GeoTiff) { var project = ProjectRequestHelper.GetProject(importedFile.ProjectUid, new Guid(customerUid), new Guid(userId), log, serviceExceptionHandler, cwsProjectClient, customHeaders); var existing = projectRepo.GetImportedFile(createImportedFileEvent.ImportedFileUID.ToString()); await Task.WhenAll(project, existing); var dcFileName = importedFile.ImportedFileType == ImportedFileType.GeoTiff ? null : DataOceanFileUtil.DataOceanFileName(project.Result.CoordinateSystemFileName, false, importedFile.ProjectUid, null); //Generate raster tiles var jobRequest = TileGenerationRequestHelper.CreateRequest( importedFile.ImportedFileType, customerUid, importedFile.ProjectUid.ToString(), existing.Result.ImportedFileUid, importedFile.DataOceanRootFolder, importedFile.DataOceanFileName, dcFileName, importedFile.DxfUnitsType, importedFile.SurveyedUtc); await schedulerProxy.ScheduleVSSJob(jobRequest, customHeaders); } var fileDescriptor = new ImportedFileDescriptorSingleResult( (await ImportedFileRequestDatabaseHelper .GetImportedFileList(importedFile.ProjectUid.ToString(), log, userId, projectRepo)) .ToImmutableList() .First(f => f.ImportedFileUid == createImportedFileEvent.ImportedFileUID.ToString()) ); log.LogInformation( $"CreateImportedFileV4. completed successfully. Response: {JsonConvert.SerializeObject(fileDescriptor)}"); return(fileDescriptor); }
/// <summary> /// Common file processing method used by all importedFile endpoints. /// </summary> protected async Task <ImportedFileDescriptorSingleResult> UpsertFileInternal( string filename, Stream fileStream, Guid projectUid, ImportedFileType importedFileType, DxfUnitsType dxfUnitsType, DateTime fileCreatedUtc, DateTime fileUpdatedUtc, DateTime?surveyedUtc, ISchedulerProxy schedulerProxy, Guid?parentUid = null, double?offset = null) { ImportedFileDescriptorSingleResult importedFile = null; var existing = await ImportedFileRequestDatabaseHelper .GetImportedFileForProject (projectUid.ToString(), filename, importedFileType, surveyedUtc, Logger, ProjectRepo, offset, parentUid) .ConfigureAwait(false); var creating = existing == null; Logger.LogInformation( creating ? $"{nameof(UpsertFileInternal)}. file doesn't exist already in DB: {filename} projectUid {projectUid} ImportedFileType: {importedFileType} surveyedUtc {(surveyedUtc == null ? "N/A" : surveyedUtc.ToString())} parentUid {parentUid} offset: {offset}" : $"{nameof(UpsertFileInternal)}. file exists already in DB. Will be updated: {JsonConvert.SerializeObject(existing)}"); FileDescriptor fileDescriptor = null; var importedFileUid = creating ? Guid.NewGuid() : Guid.Parse(existing.ImportedFileUid); var dataOceanFileName = DataOceanFileUtil.DataOceanFileName(filename, importedFileType == ImportedFileType.SurveyedSurface || importedFileType == ImportedFileType.GeoTiff, importedFileUid, surveyedUtc); if (importedFileType == ImportedFileType.ReferenceSurface) { //FileDescriptor not used for reference surface but validation requires values fileDescriptor = FileDescriptor.CreateFileDescriptor("Not applicable", "Not applicable", filename); } else { if (IsTRexDesignFileType(importedFileType)) { fileDescriptor = ProjectRequestHelper.WriteFileToS3Repository( fileStream, projectUid.ToString(), filename, importedFileType == ImportedFileType.SurveyedSurface, surveyedUtc, Logger, ServiceExceptionHandler, persistantTransferProxyFactory.NewProxy(TransferProxyType.DesignImport)); } //This is needed for ATs. fileDescriptor = FileDescriptor.CreateFileDescriptor( FileSpaceId, $"/{CustomerUid}/{projectUid}", filename); if (importedFileType == ImportedFileType.Linework || importedFileType == ImportedFileType.GeoTiff) { //save copy to DataOcean await DataOceanHelper.WriteFileToDataOcean( fileStream, DataOceanRootFolderId, CustomerUid, projectUid.ToString(), dataOceanFileName, Logger, ServiceExceptionHandler, DataOceanClient, Authorization, importedFileUid, ConfigStore); } } if (creating) { var createImportedFile = new CreateImportedFile( projectUid, filename, fileDescriptor, importedFileType, surveyedUtc, dxfUnitsType, fileCreatedUtc, fileUpdatedUtc, DataOceanRootFolderId, parentUid, offset, importedFileUid, dataOceanFileName); importedFile = await WithServiceExceptionTryExecuteAsync(() => RequestExecutorContainerFactory .Build <CreateImportedFileExecutor>( LoggerFactory, ConfigStore, ServiceExceptionHandler, CustomerUid, UserId, UserEmailAddress, customHeaders, productivity3dV2ProxyCompaction : Productivity3dV2ProxyCompaction, persistantTransferProxyFactory : persistantTransferProxyFactory, tRexImportFileProxy : tRexImportFileProxy, projectRepo : ProjectRepo, dataOceanClient : DataOceanClient, authn : Authorization, schedulerProxy : schedulerProxy, cwsProjectClient : CwsProjectClient) .ProcessAsync(createImportedFile) ) as ImportedFileDescriptorSingleResult; Logger.LogInformation( $"{nameof(UpsertFileInternal)}: Create completed successfully. Response: {JsonConvert.SerializeObject(importedFile)}"); } else { // this also validates that this customer has access to the projectUid var project = await ProjectRequestHelper.GetProject(projectUid, new Guid(CustomerUid), new Guid(UserId), Logger, ServiceExceptionHandler, CwsProjectClient, customHeaders); var importedFileUpsertEvent = new UpdateImportedFile( projectUid, project.ShortRaptorProjectId, importedFileType, (importedFileType == ImportedFileType.SurveyedSurface || importedFileType == ImportedFileType.GeoTiff) ? surveyedUtc : null, dxfUnitsType, fileCreatedUtc, fileUpdatedUtc, fileDescriptor, Guid.Parse(existing?.ImportedFileUid), existing.ImportedFileId, DataOceanRootFolderId, offset, dataOceanFileName); importedFile = await WithServiceExceptionTryExecuteAsync(() => RequestExecutorContainerFactory .Build <UpdateImportedFileExecutor>( LoggerFactory, ConfigStore, ServiceExceptionHandler, CustomerUid, UserId, UserEmailAddress, customHeaders, productivity3dV2ProxyCompaction : Productivity3dV2ProxyCompaction, tRexImportFileProxy : tRexImportFileProxy, projectRepo : ProjectRepo, dataOceanClient : DataOceanClient, authn : Authorization, schedulerProxy : schedulerProxy, cwsProjectClient : CwsProjectClient) .ProcessAsync(importedFileUpsertEvent) ) as ImportedFileDescriptorSingleResult; Logger.LogInformation( $"{nameof(UpsertFileInternal)}: Update completed successfully. Response: {JsonConvert.SerializeObject(importedFile)}"); } await NotificationHubClient.Notify(new ProjectChangedNotification(projectUid)); return(importedFile); }
protected override async Task <ContractExecutionResult> ProcessAsyncEx <T>(T item) { List <FileData> files = null; int zoomLevel = 0; Point topLeftTile = null; int numTiles = 0; BoundingBox2DLatLon bbox = null; if (item is DxfTileRequest request) { files = request.files?.ToList(); bbox = request.bbox; //Calculate zoom level zoomLevel = TileServiceUtils.CalculateZoomLevel(request.bbox.TopRightLat - request.bbox.BottomLeftLat, request.bbox.TopRightLon - request.bbox.BottomLeftLon); log.LogDebug("DxfTileExecutor: BBOX differences {0} {1} {2}", request.bbox.TopRightLat - request.bbox.BottomLeftLat, request.bbox.TopRightLon - request.bbox.BottomLeftLon, zoomLevel); numTiles = TileServiceUtils.NumberOfTiles(zoomLevel); Point topLeftLatLng = new Point(request.bbox.TopRightLat.LatRadiansToDegrees(), request.bbox.BottomLeftLon.LonRadiansToDegrees()); topLeftTile = WebMercatorProjection.LatLngToTile(topLeftLatLng, numTiles); log.LogDebug($"DxfTileExecutor: zoomLevel={zoomLevel}, numTiles={numTiles}, xtile={topLeftTile.x}, ytile={topLeftTile.y}"); } else if (item is DxfTile3dRequest request3d) { files = request3d.files?.ToList(); zoomLevel = request3d.zoomLevel; numTiles = TileServiceUtils.NumberOfTiles(zoomLevel); topLeftTile = new Point { x = request3d.xTile, y = request3d.yTile }; } else { ThrowRequestTypeCastException <DxfTileRequest>(); } log.LogDebug($"DxfTileExecutor: {files?.Count ?? 0} files"); //Short circuit overlaying if there no files to overlay as ForAll is an expensive operation if (files == null || !files.Any()) { byte[] emptyOverlayData; using (var bitmap = new Image <Rgba32>(WebMercatorProjection.TILE_SIZE, WebMercatorProjection.TILE_SIZE)) { emptyOverlayData = bitmap.BitmapToByteArray(); } return(new TileResult(emptyOverlayData)); } log.LogDebug(string.Join(",", files.Select(f => f.Name).ToList())); var tileList = new List <byte[]>(); const string DATA_OCEAN_ROOT_FOLDER_ID_KEY = "DATA_OCEAN_ROOT_FOLDER_ID"; var dataOceanRootFolder = configStore.GetValueString(DATA_OCEAN_ROOT_FOLDER_ID_KEY); if (string.IsNullOrEmpty(dataOceanRootFolder)) { throw new ArgumentException($"Missing environment variable {DATA_OCEAN_ROOT_FOLDER_ID_KEY}"); } //For GeoTIFF files, use the latest version of a file var geoTiffFiles = files.Where(x => x.ImportedFileType == ImportedFileType.GeoTiff).ToList(); if (geoTiffFiles.Any()) { //Find any with multiple versions and remove old ones from the list var latestFiles = geoTiffFiles.GroupBy(g => g.Name).Select(g => g.OrderBy(o => o.SurveyedUtc).Last()).ToList(); foreach (var geoTiffFile in geoTiffFiles) { if (!latestFiles.Contains(geoTiffFile)) { files.Remove(geoTiffFile); } } } var fileTasks = files.Select(async file => { //foreach (var file in request.files) //Check file type to see if it has tiles if (file.ImportedFileType == ImportedFileType.Linework || file.ImportedFileType == ImportedFileType.GeoTiff) { var fullPath = DataOceanFileUtil.DataOceanPath(dataOceanRootFolder, file.CustomerUid, file.ProjectUid); var fileName = DataOceanFileUtil.DataOceanFileName(file.Name, file.ImportedFileType == ImportedFileType.SurveyedSurface || file.ImportedFileType == ImportedFileType.GeoTiff, Guid.Parse(file.ImportedFileUid), file.SurveyedUtc); fileName = DataOceanFileUtil.GeneratedFileName(fileName, file.ImportedFileType); if (zoomLevel >= file.MinZoomLevel) { byte[] tileData = null; if (zoomLevel <= file.MaxZoomLevel || file.MaxZoomLevel == 0) //0 means not calculated { tileData = await GetTileAtRequestedZoom(topLeftTile, zoomLevel, fullPath, fileName); } else if (zoomLevel - file.MaxZoomLevel <= 5) //Don't try to scale if the difference is too excessive { tileData = await GetTileAtHigherZoom(topLeftTile, zoomLevel, fullPath, fileName, file.MaxZoomLevel, numTiles); } else { log.LogDebug( "DxfTileExecutor: difference between requested and maximum zooms too large; not even going to try to scale tile"); } if (tileData != null && tileData.Length > 0) { tileList.Add(tileData); } } } }); await Task.WhenAll(fileTasks); log.LogDebug($"DxfTileExecutor: Overlaying {tileList.Count} tiles"); byte[] overlayData = TileOverlay.OverlayTiles(tileList); return(new TileResult(overlayData)); }
/// <summary> /// Generates raster tiles using the Pegasus API and stores them in the data ocean. /// The source is either a DXF file or a GeoTIFF file. /// </summary> /// <param name="fileName">The path and file name of the source file</param> /// <param name="createExecutionMessage">The details of tile generation for Pegasus</param> /// <param name="customHeaders"></param> /// <returns>Metadata for the generated tiles including the zoom range</returns> private async Task <TileMetadata> GenerateTiles(string fileName, CreateExecutionMessage createExecutionMessage, IHeaderDictionary customHeaders, Action <IHeaderDictionary> setJobIdAction) { Log.LogDebug($"Pegasus execution: {JsonConvert.SerializeObject(createExecutionMessage)}"); TileMetadata metadata = null; //Delete any old tiles. To avoid 2 traversals just try the delete anyway without checking for existence. await DeleteTiles(fileName, customHeaders); //In DataOcean this is actually a multifile not a folder string tileFolderFullName = new DataOceanFileUtil(fileName).GeneratedTilesFolder; //Get the parent folder id var parts = tileFolderFullName.Split(DataOceanUtil.PathSeparator); var tileFolderName = parts[parts.Length - 1]; var parentPath = tileFolderFullName.Substring(0, tileFolderFullName.Length - tileFolderName.Length - 1); var parentId = await dataOceanClient.GetFolderId(parentPath, customHeaders); //Set common parameters createExecutionMessage.Execution.Parameters.ParentId = parentId; createExecutionMessage.Execution.Parameters.Name = tileFolderName; createExecutionMessage.Execution.Parameters.TileOrder = TILE_ORDER; createExecutionMessage.Execution.Parameters.MultiFile = "true"; createExecutionMessage.Execution.Parameters.Public = "false"; const string baseRoute = "/api/executions"; var payload = JsonConvert.SerializeObject(createExecutionMessage); PegasusExecutionResult executionResult; using (var ms = new MemoryStream(Encoding.UTF8.GetBytes(payload))) { executionResult = await gracefulClient.ExecuteRequest <PegasusExecutionResult>($"{pegasusBaseUrl}{baseRoute}", ms, customHeaders, HttpMethod.Post); } if (executionResult == null) { throw new ServiceException(HttpStatusCode.InternalServerError, new ContractExecutionResult(ContractExecutionStatesEnum.InternalProcessingError, $"Failed to create execution for {fileName}")); } setJobIdAction?.Invoke(new HeaderDictionary { { PEGASUS_LOG_JOBID_KEY, executionResult.Execution.Id.ToString() } }); //2. Start the execution Log.LogDebug($"Starting execution for {fileName}"); var executionRoute = $"{baseRoute}/{executionResult.Execution.Id}"; var startExecutionRoute = $"{executionRoute}/start"; var startResult = await gracefulClient.ExecuteRequest <PegasusExecutionAttemptResult>($"{pegasusBaseUrl}{startExecutionRoute}", null, customHeaders, HttpMethod.Post); if (startResult == null) { throw new ServiceException(HttpStatusCode.InternalServerError, new ContractExecutionResult(ContractExecutionStatesEnum.InternalProcessingError, $"Failed to start execution for {fileName}")); } //3. Monitor status of execution until done Log.LogDebug($"Monitoring execution status for {fileName}"); var endJob = DateTime.Now + TimeSpan.FromMinutes(executionTimeout); var done = false; var success = true; while (!done && DateTime.Now <= endJob) { if (executionWaitInterval > 0) { await Task.Delay(executionWaitInterval); } var policyResult = await Policy .Handle <Exception>() .WaitAndRetryAsync( 3, attempt => TimeSpan.FromMilliseconds(1000), (exception, calculatedWaitDuration) => { Log.LogError(exception, $"PollyAsync: Failed attempt to query Pegasus. Jobid {executionResult.Execution.Id.ToString()}"); }) .ExecuteAndCaptureAsync(async() => { Log.LogDebug($"Executing monitoring request for {fileName} and jobid {executionResult.Execution.Id.ToString()}"); executionResult = await gracefulClient.ExecuteRequest <PegasusExecutionResult>($"{pegasusBaseUrl}{executionRoute}", null, customHeaders, HttpMethod.Get); var status = executionResult.Execution.ExecutionStatus; success = string.Compare(status, ExecutionStatus.FINISHED, StringComparison.OrdinalIgnoreCase) == 0 || string.Compare(status, ExecutionStatus.SUCCEEDED, StringComparison.OrdinalIgnoreCase) == 0; if (string.Compare(status, ExecutionStatus.FAILED, StringComparison.OrdinalIgnoreCase) == 0) { //Try to retrieve why it failed var jobEventsStream = await gracefulClient.ExecuteRequestAsStreamContent($"{pegasusBaseUrl}{executionRoute}/events", HttpMethod.Get, customHeaders); if (jobEventsStream != null) { var jobEvents = await jobEventsStream.ReadAsStringAsync(); Log.LogError($"Pegasus job {executionResult.Execution.Id} failed to execute with the events: {jobEvents}"); setJobIdAction?.Invoke(new HeaderDictionary { { PEGASUS_LOG_EVENTS_KEY, jobEvents } }); } else { Log.LogDebug($"Unable to resolve jobEventsStream for execution id {executionResult.Execution.Id}"); } } done = success || string.Compare(status, ExecutionStatus.FAILED, StringComparison.OrdinalIgnoreCase) == 0; setJobIdAction?.Invoke(new HeaderDictionary { { PEGASUS_LOG_RESULT_KEY, status } }); Log.LogDebug($"Execution status {status} for {fileName} and jobid {executionResult.Execution.Id.ToString()}"); }); if (policyResult.FinalException != null) { Log.LogCritical(policyResult.FinalException, $"TileGeneration PollyAsync: {GetType().FullName} failed with exception for jobid {executionResult.Execution.Id.ToString()}: "); throw policyResult.FinalException; } } if (!done) { Log.LogInformation($"{nameof(GenerateTiles)} timed out: {fileName}"); } else if (!success) { Log.LogInformation($"{nameof(GenerateTiles)} failed: {fileName}"); throw new ServiceException(HttpStatusCode.InternalServerError, new ContractExecutionResult(ContractExecutionStatesEnum.InternalProcessingError, $"Failed to generate tiles for {fileName}")); } if (success) { /* * Can't delete as not mutable * * //4. Delete the execution * Log.LogDebug($"Deleting execution for {dxfFileName}"); * await gracefulClient.ExecuteRequest($"{pegasusBaseUrl}{executionRoute}", null, customHeaders, HttpMethod.Delete, null, 0, false); */ //5. Get the zoom range from the tile metadata file var metadataFileName = new DataOceanFileUtil(fileName).TilesMetadataFileName; Log.LogDebug($"Getting tiles metadata for {metadataFileName}"); var stream = await dataOceanClient.GetFile(metadataFileName, customHeaders); using (var sr = new StreamReader(stream)) using (var jtr = new JsonTextReader(sr)) { metadata = new JsonSerializer().Deserialize <TileMetadata>(jtr); } } Log.LogInformation($"{nameof(GenerateTiles)}: returning {(metadata == null ? "null" : JsonConvert.SerializeObject(metadata))}"); return(metadata); }
/// <summary> /// Deletes file via Trex /// </summary> protected override async Task <ContractExecutionResult> ProcessAsyncEx <T>(T item) { var deleteImportedFile = CastRequestObjectTo <DeleteImportedFile>(item, errorCode: 68); await CheckIfUsedInFilter(deleteImportedFile); await CheckIfHasReferenceSurfacesAsync(deleteImportedFile); // DB change must be made before productivity3dV2ProxyNotification.DeleteFile is called as it calls back here to get list of Active files //Don't think the above comment applies any more ! var deleteImportedFileEvent = await ImportedFileRequestDatabaseHelper.DeleteImportedFileInDb (deleteImportedFile.ProjectUid, deleteImportedFile.ImportedFileUid, serviceExceptionHandler, projectRepo); ImportedFileInternalResult importedFileInternalResult = null; if (deleteImportedFile.IsTRexDesignFileType) { //Now delete in TRex await ImportedFileRequestHelper.NotifyTRexDeleteFile(deleteImportedFile.ProjectUid, deleteImportedFile.ImportedFileType, deleteImportedFile.FileDescriptor.FileName, deleteImportedFile.ImportedFileUid, deleteImportedFile.SurveyedUtc, log, customHeaders, serviceExceptionHandler, tRexImportFileProxy); //and from s3 bucket ProjectRequestHelper.DeleteFileFromS3Repository( deleteImportedFile.ProjectUid.ToString(), deleteImportedFile.FileDescriptor.FileName, deleteImportedFile.ImportedFileType == ImportedFileType.SurveyedSurface, deleteImportedFile.SurveyedUtc, log, serviceExceptionHandler, persistantTransferProxyFactory.NewProxy(TransferProxyType.DesignImport)); } if (deleteImportedFile.ImportedFileType == ImportedFileType.Linework || deleteImportedFile.ImportedFileType == ImportedFileType.GeoTiff) { var dataOceanFileName = DataOceanFileUtil.DataOceanFileName(deleteImportedFile.FileDescriptor.FileName, deleteImportedFile.ImportedFileType == ImportedFileType.SurveyedSurface || deleteImportedFile.ImportedFileType == ImportedFileType.GeoTiff, deleteImportedFile.ImportedFileUid, deleteImportedFile.SurveyedUtc); importedFileInternalResult = await DataOceanHelper.DeleteFileFromDataOcean( dataOceanFileName, deleteImportedFile.DataOceanRootFolder, customerUid, deleteImportedFile.ProjectUid, deleteImportedFile.ImportedFileUid, log, dataOceanClient, authn, configStore); var tasks = new List <Task>(); //delete generated DXF tiles var dxfFileName = DataOceanFileUtil.GeneratedFileName(dataOceanFileName, deleteImportedFile.ImportedFileType); var dataOceanPath = DataOceanFileUtil.DataOceanPath(deleteImportedFile.DataOceanRootFolder, customerUid, deleteImportedFile.ProjectUid.ToString()); var fullFileName = $"{dataOceanPath}{Path.DirectorySeparatorChar}{dxfFileName}"; tasks.Add(pegasusClient.DeleteTiles(fullFileName, DataOceanHelper.CustomHeaders(authn))); await Task.WhenAll(tasks); } if (importedFileInternalResult != null) { await ImportedFileRequestDatabaseHelper.UndeleteImportedFile (deleteImportedFile.ProjectUid, deleteImportedFile.ImportedFileUid, serviceExceptionHandler, projectRepo); serviceExceptionHandler.ThrowServiceException(importedFileInternalResult.StatusCode, importedFileInternalResult.ErrorNumber, importedFileInternalResult.ResultCode, importedFileInternalResult.ErrorMessage1); } return(new ContractExecutionResult()); }
/// <summary> /// Processes the Upsert /// </summary> protected override async Task <ContractExecutionResult> ProcessAsyncEx <T>(T item) { var importedFile = CastRequestObjectTo <UpdateImportedFile>(item, errorCode: 68); var existingImportedFile = await projectRepo.GetImportedFile(importedFile.ImportedFileUid.ToString()); if (existingImportedFile == null) { throw new ServiceException(HttpStatusCode.BadRequest, new ContractExecutionResult(ProjectErrorCodesProvider.GetErrorNumberwithOffset(122), ProjectErrorCodesProvider.FirstNameWithOffset(122))); } if (importedFile.IsTRexDesignFileType) { await ImportedFileRequestHelper.NotifyTRexUpdateFile(importedFile.ProjectUid, importedFile.ImportedFileType, importedFile.FileDescriptor.FileName, importedFile.ImportedFileUid, importedFile.SurveyedUtc, log, customHeaders, serviceExceptionHandler, tRexImportFileProxy); } if (importedFile.ImportedFileType == ImportedFileType.Linework || importedFile.ImportedFileType == ImportedFileType.GeoTiff) { string dcFileName = null; if (importedFile.ImportedFileType == ImportedFileType.Linework) { var project = await ProjectRequestHelper.GetProject(importedFile.ProjectUid, new Guid(customerUid), new Guid(userId), log, serviceExceptionHandler, cwsProjectClient, customHeaders); dcFileName = DataOceanFileUtil.DataOceanFileName(project.CoordinateSystemFileName, false, importedFile.ProjectUid, null); } var jobRequest = TileGenerationRequestHelper.CreateRequest( importedFile.ImportedFileType, customerUid, importedFile.ProjectUid.ToString(), existingImportedFile.ImportedFileUid, importedFile.DataOceanRootFolder, importedFile.DataOceanFileName, dcFileName, importedFile.DxfUnitsTypeId, importedFile.SurveyedUtc); await schedulerProxy.ScheduleVSSJob(jobRequest, customHeaders); } // if all succeeds, update Db and put update to kafka que var updateImportedFileEvent = await ImportedFileRequestDatabaseHelper.UpdateImportedFileInDb(existingImportedFile, existingImportedFile.FileDescriptor, importedFile.SurveyedUtc, existingImportedFile.MinZoomLevel, existingImportedFile.MaxZoomLevel, importedFile.FileCreatedUtc, importedFile.FileUpdatedUtc, userEmailAddress, log, serviceExceptionHandler, projectRepo); var fileDescriptor = new ImportedFileDescriptorSingleResult( (await ImportedFileRequestDatabaseHelper.GetImportedFileList(importedFile.ProjectUid.ToString(), log, userId, projectRepo)) .ToImmutableList() .FirstOrDefault(f => f.ImportedFileUid == importedFile.ImportedFileUid.ToString()) ); log.LogInformation( $"UpdateImportedFileExecutor. entry {(fileDescriptor.ImportedFileDescriptor == null ? "not " : "")}retrieved from DB : {JsonConvert.SerializeObject(fileDescriptor)}"); return(fileDescriptor); }