private ContractExecutionResult ProcessTagFile(CompactionTagFileRequest request, string method) { if (request == null) { _logger.LogWarning("Empty request passed"); return(ContractExecutionResult.ErrorResult("Empty Request")); } request.Validate(); _logger.LogInformation($"Received Tag File (via {method}) with filename: {request.FileName}. TCC Org: {request.OrgId}. Data Length: {request.Data.Length}"); using (var data = new MemoryStream(request.Data)) { _logger.LogInformation($"Uploading Tag File {request.FileName}"); var path = GetS3Key(method, request.FileName, request.OrgId); // S3 needs a full path including file, but TCC needs a path and filename as two separate variables var s3FullPath = path + request.FileName; _transferProxyFactory.NewProxy(TransferProxyType.TagFileGatewayArchive).Upload(data, s3FullPath); _logger.LogInformation($"Successfully uploaded Tag File {request.FileName}"); } return(new ContractExecutionResult(0)); }
/// <summary> /// Constructor with dependency injection /// </summary> public ExportJob(IApiClient apiClient, ITransferProxyFactory transferProxyfactory, ILoggerFactory logger) { _log = logger.CreateLogger <ExportJob>(); _apiClient = apiClient; _transferProxyFactory = transferProxyfactory; _transferProxy = _transferProxyFactory.NewProxy(TransferProxyType.Temporary); }
public async Task <ScheduleJobResult> BackgroundUpload( FlowFile file, [FromQuery] Guid projectUid, [FromQuery] ImportedFileType importedFileType, [FromQuery] DxfUnitsType dxfUnitsType, [FromQuery] DateTime fileCreatedUtc, [FromQuery] DateTime fileUpdatedUtc, [FromQuery] DateTime?surveyedUtc, [FromServices] ISchedulerProxy scheduler, [FromServices] ITransferProxyFactory transferProxyFactory) { if (importedFileType == ImportedFileType.ReferenceSurface) { ServiceExceptionHandler.ThrowServiceException(HttpStatusCode.BadRequest, 122); } FlowJsFileImportDataValidator.ValidateUpsertImportedFileRequest( file, projectUid, importedFileType, dxfUnitsType, fileCreatedUtc, fileUpdatedUtc, UserEmailAddress, surveyedUtc, null, null); Logger.LogInformation( $"{nameof(BackgroundUpload)}: file: {file.flowFilename} path {file.path} projectUid {projectUid} ImportedFileType: {importedFileType} " + $"DxfUnitsType: {dxfUnitsType} surveyedUtc {(surveyedUtc == null ? "N/A" : surveyedUtc.ToString())}"); if (string.Equals(Request.Method, HttpMethod.Post.ToString(), StringComparison.OrdinalIgnoreCase)) { await ValidateFileDoesNotExist(projectUid.ToString(), file.flowFilename, importedFileType, surveyedUtc, null, null); } var s3Path = $"project/importedfile/{Guid.NewGuid()}.dat"; var fileStream = System.IO.File.Open(file.path, FileMode.Open, FileAccess.Read); var transferProxy = transferProxyFactory.NewProxy(TransferProxyType.Temporary); transferProxy.Upload(fileStream, s3Path); var baseUrl = Request.Host.ToUriComponent(); // The QueryString will have values in it, so it's safe to add extra queries with the & as opposed to ?, then & var callbackUrl = $"http://{baseUrl}/internal/v6/importedfile{Request.QueryString}"; callbackUrl += $"&filename={WebUtility.UrlEncode(file.flowFilename)}&awsFilePath={WebUtility.UrlEncode(s3Path)}"; Logger.LogInformation($"{nameof(BackgroundUpload)}: baseUrl {callbackUrl}"); var executionTimeout = ConfigStore.GetValueInt("PEGASUS_EXECUTION_TIMEOUT_MINS", 5) * 60000;//minutes converted to millisecs var request = new ScheduleJobRequest { Filename = file.flowFilename, Method = "GET", // match the internal upload Method Url = callbackUrl, Timeout = executionTimeout }; request.SetStringPayload(string.Empty); var headers = Request.Headers.GetCustomHeaders(); return(await scheduler.ScheduleBackgroundJob(request, headers)); }
public FileStreamResult GetExportJobResult(string jobId) { var status = GetJobStatus(jobId); if (string.IsNullOrEmpty(status.Key)) { throw new ServiceException(HttpStatusCode.BadRequest, new ContractExecutionResult(ContractExecutionStatesEnum.ValidationError, $"Missing job download link for {jobId}")); } return(transferProxyFactory.NewProxy(TransferProxyType.Export).Download(status.Key).Result); }
public async Task <ImportedFileDescriptorSingleResult> InternalImportedFileV6( [FromQuery] string filename, [FromQuery] string awsFilePath, [FromQuery] Guid projectUid, [FromQuery] ImportedFileType importedFileType, [FromQuery] DxfUnitsType dxfUnitsType, [FromQuery] DateTime fileCreatedUtc, [FromQuery] DateTime fileUpdatedUtc, [FromQuery] DateTime?surveyedUtc, [FromServices] ITransferProxyFactory transferProxyFactory, [FromServices] ISchedulerProxy schedulerProxy) { if (importedFileType == ImportedFileType.ReferenceSurface) { ServiceExceptionHandler.ThrowServiceException(HttpStatusCode.BadRequest, 122); } ImportedFileDescriptorSingleResult importedFileResult = null; var transferProxy = transferProxyFactory.NewProxy(TransferProxyType.Temporary); Logger.LogInformation( $"{nameof(InternalImportedFileV6)}:. filename: {filename} awspath {awsFilePath} projectUid {projectUid} ImportedFileType: {importedFileType} " + $"DxfUnitsType: {dxfUnitsType} surveyedUtc {(surveyedUtc == null ? "N/A" : surveyedUtc.ToString())}"); // Retrieve the stored file from AWS var fileResult = await transferProxy.Download(awsFilePath); if (fileResult == null) { ServiceExceptionHandler.ThrowServiceException(HttpStatusCode.InternalServerError, 55); } using (var ms = new MemoryStream()) { // Depending on the size of the file in S3, the stream returned may or may not support seeking // Which we need to TCC to know the length of the file (can't find the length, if you can't seek). // To solve this, we have to download the entire stream here and copy to memory. // Allowing TCC to upload the file. // Not the best solution for extra large files, but TCC doesn't support uploading without file size AFAIK fileResult.FileStream.CopyTo(ms); importedFileResult = await UpsertFileInternal(filename, ms, projectUid, importedFileType, dxfUnitsType, fileCreatedUtc, fileUpdatedUtc, surveyedUtc, schedulerProxy); } Logger.LogInformation( $"{nameof(InternalImportedFileV6)}: Completed successfully. Response: {JsonConvert.SerializeObject(importedFileResult)}"); return(importedFileResult); }
public async Task <FileResult> GetMapTileDataTtm( [FromQuery] Guid projectUid, [FromQuery] Guid?filterUid, [FromQuery] Guid?designUid, [FromQuery] DisplayMode mode, [FromServices] IPreferenceProxy prefProxy, [FromServices] ITRexCompactionDataProxy tRexCompactionDataProxy, #if RAPTOR [FromServices] IASNodeClient raptorClient, #endif [FromServices] IProductionDataRequestFactory requestFactory) { const double SURFACE_EXPORT_TOLERANCE = 0.05; const byte COORDS_ARRAY_LENGTH = 3; var tins = new List <TrimbleTINModel>(); var projectTask = ((RaptorPrincipal)User).GetProject(projectUid); var projectSettings = GetProjectSettingsTargets(projectUid); var userPreferences = prefProxy.GetUserPreferences(GetUserId(), CustomHeaders); var filter = GetCompactionFilter(projectUid, filterUid); var designTask = GetAndValidateDesignDescriptor(projectUid, designUid); if (userPreferences == null) { throw new ServiceException(HttpStatusCode.BadRequest, new ContractExecutionResult(ContractExecutionStatesEnum.FailedToGetResults, "Failed to retrieve preferences for current user")); } await Task.WhenAll(projectTask, projectSettings, userPreferences, designTask); var project = projectTask.Result; var design = designTask.Result; // Get the terrain mesh var exportRequest = requestFactory.Create <ExportRequestHelper>(r => r .ProjectUid(projectUid) .ProjectId(project.ShortRaptorProjectId) .Headers(CustomHeaders) .ProjectSettings(projectSettings.Result) .Filter(filter.Result)) .SetUserPreferences(userPreferences.Result) #if RAPTOR .SetRaptorClient(raptorClient) #endif .SetProjectDescriptor(project) .CreateExportRequest( null, //startUtc, null, //endUtc, CoordType.LatLon, ExportTypes.SurfaceExport, "test.zip", true, false, OutputTypes.VedaAllPasses, string.Empty, SURFACE_EXPORT_TOLERANCE); exportRequest.Validate(); // First get the export of production data from Raptor // comes in a zip file var result = await WithServiceExceptionTryExecuteAsync(() => RequestExecutorContainerFactory.Build <CompactionExportExecutor>(LoggerFactory, #if RAPTOR raptorClient, #endif configStore : ConfigStore, trexCompactionDataProxy : tRexCompactionDataProxy, customHeaders : CustomHeaders, userId : GetUserId(), fileImportProxy : FileImportProxy) .ProcessAsync(exportRequest)) as CompactionExportResult; if (result != null) { var zipStream = (await transferProxyFactory.NewProxy(TransferProxyType.Temporary).Download(result.DownloadLink)).FileStream; // If we didn't get a valid file, then we failed to read the ttm from raptor if (zipStream == null) { throw new ServiceException(HttpStatusCode.BadRequest, new ContractExecutionResult(ContractExecutionStatesEnum.FailedToGetResults, "Failed to retrieve data")); } using (var archive = new ZipArchive(zipStream)) { // The zip file will have exactly one file in it if (archive.Entries.Count == 1) { try { var tin = new TrimbleTINModel(); using (var stream = archive.Entries[0].Open() as DeflateStream) using (var ms = new MemoryStream()) { // Unzip the file, copy to memory as the TIN file needs the byte array, and stream stream.CopyTo(ms); ms.Seek(0, SeekOrigin.Begin); tin.LoadFromStream(ms, ms.GetBuffer()); tins.Add(tin); } } catch (TTMFileReadException e) { // Not valid, continue Log.LogWarning(e, "Failed to parse ttm in zip file"); } } } } // If we didn't get a valid file, then we failed to read the ttm from raptor if (tins.Count == 0) { throw new ServiceException(HttpStatusCode.BadRequest, new ContractExecutionResult(ContractExecutionStatesEnum.FailedToGetResults, "Failed to retrieve data")); } // If we have a design request, get the ttm and add it for parsing if (design != null) { //TODO: This used to get the file from TCC. This code to get from s3 needs testing. //Leave for now as this end point is not currently supported. // Retrieve the stored file from AWS var s3FullPath = $"{projectUid}/{design.File.FileName}"; var transferProxy = transferProxyFactory.NewProxy(TransferProxyType.Temporary); var fileResult = await transferProxy.Download(s3FullPath); if (fileResult?.FileStream != null) { using (var ms = new MemoryStream()) { fileResult.FileStream.CopyTo(ms); ms.Seek(0, SeekOrigin.Begin); var tin = new TrimbleTINModel(); tin.LoadFromStream(ms, ms.GetBuffer()); tins.Add(tin); } } } // Calculating the bounding box for the model (including design if supplied) var minEasting = tins.Select(t => t.Header.MinimumEasting).Min(); var maxEasting = tins.Select(t => t.Header.MaximumEasting).Max(); var minNorthing = tins.Select(t => t.Header.MinimumNorthing).Min(); var maxNorthing = tins.Select(t => t.Header.MaximumNorthing).Max(); var centerEasting = (maxEasting + minEasting) / 2.0; var centerNorthing = (maxNorthing + minNorthing) / 2.0; TwoDConversionCoordinate[] convertedCoordinates; #if RAPTOR if (UseTRexGateway("ENABLE_TREX_GATEWAY_TILES")) { #endif var conversionCoordinates = new [] { new TwoDConversionCoordinate(minEasting, minNorthing), new TwoDConversionCoordinate(maxEasting, maxNorthing), new TwoDConversionCoordinate(centerEasting, centerNorthing) } ; var conversionRequest = new CoordinateConversionRequest(projectUid, TwoDCoordinateConversionType.NorthEastToLatLon, conversionCoordinates); var conversionResult = await trexCompactionDataProxy.SendDataPostRequest <CoordinateConversionResult, CoordinateConversionRequest>(conversionRequest, "/coordinateconversion", CustomHeaders); if (conversionResult.Code != 0 || conversionResult.ConversionCoordinates.Length != COORDS_ARRAY_LENGTH) { throw new ServiceException(HttpStatusCode.BadRequest, new ContractExecutionResult(ContractExecutionStatesEnum.FailedToGetResults, "Failed to retrieve long lat for boundary")); } convertedCoordinates = conversionResult.ConversionCoordinates; #if RAPTOR } else { var points = new TWGS84FenceContainer { FencePoints = new[] { TWGS84Point.Point(minEasting, minNorthing), TWGS84Point.Point(maxEasting, maxNorthing), TWGS84Point.Point(centerEasting, centerNorthing), } }; // Convert the northing easting values to long lat values var res = raptorClient.GetGridCoordinates(project.LegacyProjectId, points, TCoordConversionType.ctNEEtoLLH, out var coordPointList); if (res != TCoordReturnCode.nercNoError) { throw new ServiceException(HttpStatusCode.BadRequest, new ContractExecutionResult(ContractExecutionStatesEnum.FailedToGetResults, "Failed to retrieve long lat for boundary")); } convertedCoordinates = coordPointList.Points.Coords.Select(c => new TwoDConversionCoordinate(c.X, c.Y)).ToArray(); } #endif // The values returned from Raptor/TRex are in rads, where we need degrees for the bbox var minLat = convertedCoordinates[0].Y * Coordinates.RADIANS_TO_DEGREES; var minLng = convertedCoordinates[0].X * Coordinates.RADIANS_TO_DEGREES; var maxLat = convertedCoordinates[1].Y * Coordinates.RADIANS_TO_DEGREES; var maxLng = convertedCoordinates[1].X * Coordinates.RADIANS_TO_DEGREES; var centerLat = convertedCoordinates[2].Y * Coordinates.RADIANS_TO_DEGREES; var centerLng = convertedCoordinates[2].X * Coordinates.RADIANS_TO_DEGREES; var bbox = $"{minLat},{minLng},{maxLat},{maxLng}"; var outputStream = new MemoryStream(); using (var zipArchive = new ZipArchive(outputStream, ZipArchiveMode.Create, true)) { var textureZipEntry = zipArchive.CreateEntry("texture.png"); using (var stream = textureZipEntry.Open()) { // Write the texture to the zip var textureFileStream = await GetTexture(projectUid, designUid, projectSettings.Result, filter.Result, mode, bbox); textureFileStream.FileStream.CopyTo(stream); } // Write the model to the zip var modelZipEntry = zipArchive.CreateEntry("model.obj"); using (var stream = modelZipEntry.Open()) { var modelFileStream = ConvertMultipleToObj(tins, centerEasting, centerNorthing); modelFileStream.FileStream.CopyTo(stream); } // Add some metadata to help with positioning of the model var metaDataEntry = zipArchive.CreateEntry("metadata.json"); using (var stream = metaDataEntry.Open()) { var metaData = new { Minimum = new { Lat = minLat, Lng = minLng }, Maximum = new { Lat = maxLat, Lng = maxLng }, Center = new { Lat = centerLat, Lng = centerLng }, HasDesign = design != null }; var bytes = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(metaData)); stream.Write(bytes, 0, bytes.Length); } } // Don't forget to seek back, or else the content length will be 0 outputStream.Seek(0, SeekOrigin.Begin); return(new FileStreamResult(outputStream, ContentTypeConstants.ApplicationZip)); }
/// <summary> /// Common file processing method used by all importedFile endpoints. /// </summary> protected async Task <ImportedFileDescriptorSingleResult> UpsertFileInternal( string filename, Stream fileStream, Guid projectUid, ImportedFileType importedFileType, DxfUnitsType dxfUnitsType, DateTime fileCreatedUtc, DateTime fileUpdatedUtc, DateTime?surveyedUtc, ISchedulerProxy schedulerProxy, Guid?parentUid = null, double?offset = null) { ImportedFileDescriptorSingleResult importedFile = null; var existing = await ImportedFileRequestDatabaseHelper .GetImportedFileForProject (projectUid.ToString(), filename, importedFileType, surveyedUtc, Logger, ProjectRepo, offset, parentUid) .ConfigureAwait(false); var creating = existing == null; Logger.LogInformation( creating ? $"{nameof(UpsertFileInternal)}. file doesn't exist already in DB: {filename} projectUid {projectUid} ImportedFileType: {importedFileType} surveyedUtc {(surveyedUtc == null ? "N/A" : surveyedUtc.ToString())} parentUid {parentUid} offset: {offset}" : $"{nameof(UpsertFileInternal)}. file exists already in DB. Will be updated: {JsonConvert.SerializeObject(existing)}"); FileDescriptor fileDescriptor = null; var importedFileUid = creating ? Guid.NewGuid() : Guid.Parse(existing.ImportedFileUid); var dataOceanFileName = DataOceanFileUtil.DataOceanFileName(filename, importedFileType == ImportedFileType.SurveyedSurface || importedFileType == ImportedFileType.GeoTiff, importedFileUid, surveyedUtc); if (importedFileType == ImportedFileType.ReferenceSurface) { //FileDescriptor not used for reference surface but validation requires values fileDescriptor = FileDescriptor.CreateFileDescriptor("Not applicable", "Not applicable", filename); } else { if (IsTRexDesignFileType(importedFileType)) { fileDescriptor = ProjectRequestHelper.WriteFileToS3Repository( fileStream, projectUid.ToString(), filename, importedFileType == ImportedFileType.SurveyedSurface, surveyedUtc, Logger, ServiceExceptionHandler, persistantTransferProxyFactory.NewProxy(TransferProxyType.DesignImport)); } //This is needed for ATs. fileDescriptor = FileDescriptor.CreateFileDescriptor( FileSpaceId, $"/{CustomerUid}/{projectUid}", filename); if (importedFileType == ImportedFileType.Linework || importedFileType == ImportedFileType.GeoTiff) { //save copy to DataOcean await DataOceanHelper.WriteFileToDataOcean( fileStream, DataOceanRootFolderId, CustomerUid, projectUid.ToString(), dataOceanFileName, Logger, ServiceExceptionHandler, DataOceanClient, Authorization, importedFileUid, ConfigStore); } } if (creating) { var createImportedFile = new CreateImportedFile( projectUid, filename, fileDescriptor, importedFileType, surveyedUtc, dxfUnitsType, fileCreatedUtc, fileUpdatedUtc, DataOceanRootFolderId, parentUid, offset, importedFileUid, dataOceanFileName); importedFile = await WithServiceExceptionTryExecuteAsync(() => RequestExecutorContainerFactory .Build <CreateImportedFileExecutor>( LoggerFactory, ConfigStore, ServiceExceptionHandler, CustomerUid, UserId, UserEmailAddress, customHeaders, productivity3dV2ProxyCompaction : Productivity3dV2ProxyCompaction, persistantTransferProxyFactory : persistantTransferProxyFactory, tRexImportFileProxy : tRexImportFileProxy, projectRepo : ProjectRepo, dataOceanClient : DataOceanClient, authn : Authorization, schedulerProxy : schedulerProxy, cwsProjectClient : CwsProjectClient) .ProcessAsync(createImportedFile) ) as ImportedFileDescriptorSingleResult; Logger.LogInformation( $"{nameof(UpsertFileInternal)}: Create completed successfully. Response: {JsonConvert.SerializeObject(importedFile)}"); } else { // this also validates that this customer has access to the projectUid var project = await ProjectRequestHelper.GetProject(projectUid, new Guid(CustomerUid), new Guid(UserId), Logger, ServiceExceptionHandler, CwsProjectClient, customHeaders); var importedFileUpsertEvent = new UpdateImportedFile( projectUid, project.ShortRaptorProjectId, importedFileType, (importedFileType == ImportedFileType.SurveyedSurface || importedFileType == ImportedFileType.GeoTiff) ? surveyedUtc : null, dxfUnitsType, fileCreatedUtc, fileUpdatedUtc, fileDescriptor, Guid.Parse(existing?.ImportedFileUid), existing.ImportedFileId, DataOceanRootFolderId, offset, dataOceanFileName); importedFile = await WithServiceExceptionTryExecuteAsync(() => RequestExecutorContainerFactory .Build <UpdateImportedFileExecutor>( LoggerFactory, ConfigStore, ServiceExceptionHandler, CustomerUid, UserId, UserEmailAddress, customHeaders, productivity3dV2ProxyCompaction : Productivity3dV2ProxyCompaction, tRexImportFileProxy : tRexImportFileProxy, projectRepo : ProjectRepo, dataOceanClient : DataOceanClient, authn : Authorization, schedulerProxy : schedulerProxy, cwsProjectClient : CwsProjectClient) .ProcessAsync(importedFileUpsertEvent) ) as ImportedFileDescriptorSingleResult; Logger.LogInformation( $"{nameof(UpsertFileInternal)}: Update completed successfully. Response: {JsonConvert.SerializeObject(importedFile)}"); } await NotificationHubClient.Notify(new ProjectChangedNotification(projectUid)); return(importedFile); }