/// <summary> /// Send request to the FileImportV4 controller /// </summary> public FileDataSingleResult SendRequestToFileImportV4(string uriRoot, ImportedFileDescriptor fileDescr, string fullFileName, ImportOptions importOptions = new ImportOptions(), bool uploadToTCC = false) { _log.LogInformation(Method.In()); var createdDt = fileDescr.FileCreatedUtc.ToUniversalTime().ToString("o"); var updatedDt = fileDescr.FileUpdatedUtc.ToUniversalTime().ToString("o"); var uri = $"{uriRoot}?projectUid={fileDescr.ProjectUid}&importedFileType={fileDescr.ImportedFileTypeName}" + $"&fileCreatedUtc={createdDt}&fileUpdatedUtc={updatedDt}&uploadToTcc={uploadToTCC}"; switch (fileDescr.ImportedFileType) { case ImportedFileType.SurveyedSurface: uri = $"{uri}&SurveyedUtc={fileDescr.SurveyedUtc:yyyy-MM-ddTHH:mm:ss.fffffff}"; break; case ImportedFileType.Linework: uri = $"{uri}&DxfUnitsType={fileDescr.DxfUnitsType}"; break; } if (importOptions.QueryParams != null) { foreach (var param in importOptions.QueryParams) { uri = $"{uri}&{param}"; } } var response = UploadFileToWebApi( fullFileName, uri, fileDescr, importOptions.HttpMethod); try { if (response != null) { return(JsonConvert.DeserializeObject <FileDataSingleResult>(response, new JsonSerializerSettings { DateTimeZoneHandling = DateTimeZoneHandling.Unspecified, NullValueHandling = NullValueHandling.Ignore })); } } catch (Exception exception) { _log.LogError(exception, response); } return(null); }
/// <summary> /// Upload a single file to the web api /// </summary> private string UploadFileToWebApi(string fullFileName, string uri, ImportedFileDescriptor fileDescriptor, HttpMethod httpMethod) { _log.LogInformation($"{Method.In()} | Filename: {fullFileName}, CustomerUid: {fileDescriptor.CustomerUid}"); try { var name = new DirectoryInfo(fullFileName).Name; var bytes = File.ReadAllBytes(fullFileName); var fileSize = bytes.Length; var chunks = (int)Math.Max(Math.Floor((double)fileSize / CHUNK_SIZE), 1); string result = null; if (_maxFileSize > 0 && fileSize > _maxFileSize) { _log.LogWarning($"Skipping file {fullFileName}, exceeds MAX_FILE_SIZE of {_maxFileSize} bytes"); return(null); } _log.LogInformation($"{Method.Info()} | {httpMethod.Method}, Uri: {uri}"); for (var offset = 0; offset < chunks; offset++) { _log.LogInformation($"{Method.Info()} | {fileDescriptor.Name}: {(int)Math.Round((double)(100 * offset) / chunks)}% completed "); var startByte = offset * CHUNK_SIZE; var endByte = Math.Min(fileSize, (offset + 1) * CHUNK_SIZE); if (fileSize - endByte < CHUNK_SIZE) { // The last chunk will be bigger than the chunk size but less than 2*chunkSize endByte = fileSize; } var currentChunkSize = endByte - startByte; var boundaryIdentifier = Guid.NewGuid().ToString(); var flowFileUpload = SetAllAttributesForFlowFile(fileSize, name, offset + 1, chunks, currentChunkSize); var currentBytes = bytes.Skip(startByte).Take(currentChunkSize).ToArray(); var contentType = $"multipart/form-data; boundary={BOUNDARY_START}{boundaryIdentifier}"; using (var content = new MemoryStream()) { FormatTheContentDisposition(flowFileUpload, currentBytes, name, $"{BOUNDARY_START + BOUNDARY_BLOCK_DELIMITER}{boundaryIdentifier}", content); result = DoHttpRequest(uri, httpMethod, content.ToArray(), fileDescriptor, contentType); } } //The last chunk should have the result return(result); } catch (Exception ex) { return(ex.Message); } }
public RestClient(ILoggerFactory loggerFactory, ITPaaSApplicationAuthentication authentication, IEnvironmentHelper environmentHelper) { _log = loggerFactory.CreateLogger <HttpClient>(); _log.LogInformation(Method.In()); _bearerToken = authentication.GetApplicationBearerToken(); _jwtToken = environmentHelper.GetVariable("JWT_TOKEN", 1); _httpClient = new HttpClient(); _httpClient.DefaultRequestHeaders.Add("pragma", "no-cache"); }
private async Task <bool> ResolveCoordinateSystemFromRaptor(MigrationJob job) { _log.LogInformation($"{Method.In()} Resolving project {job.Project.ProjectUID} CSIB from Raptor"); var logMessage = $"Failed to fetch coordinate system file '{job.Project.CustomerUID}/{job.Project.ProjectUID}/{job.Project.CoordinateSystemFileName}' from TCC."; _migrationDb.Insert(new MigrationMessage(job.Project.ProjectUID, logMessage), Table.Warnings); _log.LogWarning(logMessage); // Get the the CSIB for the project from Raptor. var csibResponse = await _csibAgent.GetCSIBForProject(job.Project); var csib = csibResponse.CSIB; if (csibResponse.Code != 0) { const string errorMessage = "Failed to resolve CSIB from Raptor"; _migrationDb.SetResolveCSIBMessage(Table.Projects, job.Project.ProjectUID, csib); _migrationDb.Insert(new MigrationMessage(job.Project.ProjectUID, errorMessage), Table.Errors); _log.LogWarning(errorMessage); return(false); } _migrationDb.SetProjectCSIB(Table.Projects, job.Project.ProjectUID, csib); var coordSysInfo = await _csibAgent.GetCoordSysInfoFromCSIB64(job.Project, csib); var dcFileContent = await _csibAgent.GetCalibrationFileForCoordSysId(job.Project, coordSysInfo["coordinateSystem"]["id"].ToString()); var coordSystemFileContent = Encoding.UTF8.GetBytes(dcFileContent); using (var stream = new MemoryStream(coordSystemFileContent)) { if (SaveDCFileToDisk(job, stream)) { return(true); } } _log.LogError("Failed to resolve coordinate system information from Raptor"); return(false); }
public CalibrationFileAgent(ILoggerFactory loggerFactory, ILiteDbAgent liteDbAgent, IConfigurationStore configStore, IEnvironmentHelper environmentHelper, IFileRepository fileRepo, IWebApiUtils webApiUtils, ICSIBAgent csibAgent, IDataOceanAgent dataOceanAgent, IMemoryCache memoryCache) { _log = loggerFactory.CreateLogger <CalibrationFileAgent>(); _log.LogInformation(Method.In()); _migrationDb = liteDbAgent; _webApiUtils = webApiUtils; _csibAgent = csibAgent; _dataOceanAgent = dataOceanAgent; _fileRepo = fileRepo; _cache = memoryCache; _cache.Set(COORDINATE_SYSTEM_FILES_KEY, new List <string>()); _projectApiUrl = environmentHelper.GetVariable("PROJECT_API_URL", 1); _fileSpaceId = environmentHelper.GetVariable("TCCFILESPACEID", 48); _tempFolder = Path.Combine( environmentHelper.GetVariable("TEMPORARY_FOLDER", 2), "DataOceanMigrationTmp", environmentHelper.GetVariable("MIGRATION_ENVIRONMENT", 2)); _updateProjectCoordinateSystemFile = configStore.GetValueBool("UPDATE_PROJECT_COORDINATE_SYSTEM_FILE", defaultValue: false); }
/// <summary> /// Downloads the coordinate system file for a given project. /// </summary> private async Task <bool> DownloadCoordinateSystemFileFromTCC(MigrationJob job) { _log.LogInformation($"{Method.In()} Downloading coord system file '{job.Project.CoordinateSystemFileName}' from TCC"); Stream memStream = null; try { memStream = await DownloadFile(job, job.Project.CoordinateSystemFileName); if (memStream == null) { memStream = await DownloadFile(job, job.Project.LegacyCustomerID + ".dc"); } if (memStream == null) { memStream = await DownloadFile(job, job.Project.LegacyProjectID + ".dc"); } if (memStream == null) { return(false); } return(SaveDCFileToDisk(job, memStream)); } catch (Exception exception) { _log.LogError(exception, $"Unexpected error processing calibration file for project {job.Project.ProjectUID}"); return(false); } finally { memStream?.Dispose(); } }
/// <summary> /// Saves the DC file content to disk; for testing purposes only so we can eyeball the content. /// </summary> private bool SaveDCFileToDisk(MigrationJob job, Stream dcFileContent) { _log.LogDebug($"{Method.In()} Writing coordinate system file for project {job.Project.ProjectUID}"); if (dcFileContent == null || dcFileContent.Length <= 0) { _log.LogDebug($"{Method.Info()} Error: Null stream provided for dcFileContent for project '{job.Project.ProjectUID}'"); return(false); } using (var memoryStream = new MemoryStream()) { dcFileContent.CopyTo(memoryStream); var dcFileArray = memoryStream.ToArray(); var projectionType = GetProjectionTypeCode(job, dcFileArray); var coordinateSystemInfo = new MigrationCoordinateSystemInfo { ProjectUid = job.Project.ProjectUID, DxfUnitsType = GetDxfUnitsType(dcFileArray), ProjectionTypeCode = projectionType.id, ProjectionName = projectionType.name }; job.CoordinateSystemFileBytes = dcFileArray; _migrationDb.Update( job.Project.LegacyProjectID, (MigrationProject x) => { x.CoordinateSystemInfo = coordinateSystemInfo; x.CalibrationFile = new CalibrationFile { Content = Encoding.Default.GetString(dcFileArray) }; }, tableName: Table.Projects); _migrationDb.Insert(coordinateSystemInfo); } try { var dcFilePath = Path.Combine(_tempFolder, job.Project.CustomerUID, job.Project.ProjectUID); Directory.CreateDirectory(dcFilePath); var coordinateSystemFilename = job.Project.CoordinateSystemFileName; if (string.IsNullOrEmpty(coordinateSystemFilename) || coordinateSystemFilename.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0) { coordinateSystemFilename = "ProjectCalibrationFile.dc"; } var tempFileName = Path.Combine(dcFilePath, coordinateSystemFilename); using (var fileStream = File.Create(tempFileName)) { dcFileContent.Seek(0, SeekOrigin.Begin); dcFileContent.CopyTo(fileStream); _log.LogInformation($"{Method.Info()} Completed writing DC file '{tempFileName}' for project {job.Project.ProjectUID}"); return(true); } } catch (Exception exception) { _log.LogError(exception, $"{Method.Info()} Error writing DC file for project {job.Project.ProjectUID}"); } return(false); }
private async Task <bool> ResolveCoordinateSystemFromDataOcean(MigrationJob job) { _log.LogInformation($"{Method.In()} Resolving project {job.Project.ProjectUID} coordination file from DataOcean."); _cache.TryGetValue(COORDINATE_SYSTEM_FILES_KEY, out List <string> fileList); if (fileList.Contains(job.Project.ProjectUID)) { _log.LogDebug($"Resolving DataOcean calibration file from cache for project {job.Project.ProjectUID}."); return(true); } // Resolve the customer id from DataOcean using the .Name, our CustomerUid value. if (!_cache.TryGetValue(job.Project.CustomerUID, out DataOceanDirectory customer)) { var directoryResponse = await _dataOceanAgent.GetCustomerByName(job.Project.CustomerUID); if (!directoryResponse.Directories.Any()) { _log.LogWarning($"Unable to resolve DataOcean customer {job.Project.CustomerUID}, project {job.Project.ProjectUID}"); return(false); } _cache.Set(job.Project.CustomerUID, customer); customer = directoryResponse.Directories[0]; } // Resolve the project folder's id from DataOcean using the .Name, our ProjectUid value. if (!_cache.TryGetValue(job.Project.ProjectUID, out DataOceanDirectory project)) { var dirProjectResponse = await _dataOceanAgent.GetProjectForCustomerById(customer.Id.ToString(), job.Project.ProjectUID); if (!dirProjectResponse.Directories.Any()) { _log.LogWarning($"Unable to resolve DataOcean project folder {job.Project.ProjectUID} for customer {job.Project.CustomerUID}."); return(false); } _cache.Set(job.Project.ProjectUID, project); project = dirProjectResponse.Directories[0]; } // Iterate all files, 25 at a time, until we find a .DC or .CAL file. long metaKeyOffset = -1; do { var dirFilesResponse = await _dataOceanAgent.GetFilesForProjectById(project.Id.ToString(), metaKeyOffset); if (dirFilesResponse.Files.Length == 0) { return(false); } foreach (var file in dirFilesResponse.Files) { if (!file.Path.EndsWith(job.Project.ProjectUID + ".dc", StringComparison.OrdinalIgnoreCase) && !file.Path.EndsWith(job.Project.ProjectUID + ".cal", StringComparison.OrdinalIgnoreCase)) { continue; } fileList.Add(job.Project.ProjectUID); _migrationDb.Update(job.Project.LegacyProjectID, (MigrationProject x) => { x.HasValidDcFile = true; }, Table.Projects); return(true); } if (dirFilesResponse.Files.Length < 25) { return(false); } // Setup the Key_Offset for the next DataOcean request; moving us to the next page of results. metaKeyOffset = dirFilesResponse.Meta.Key_Offset; } while (true); }
/// <summary> /// Downloads the file from TCC and if successful uploads it through the Project service. /// </summary> private async Task <(bool success, FileDataSingleResult file)> MigrateFile(ImportedFileDescriptor file, Project project) { if (_ignoredFiles != null && _ignoredFiles.Contains(file.ImportedFileUid)) { Log.LogWarning($"{Method.Info()} Migrating file '{file.Name}', Uid: {file.ImportedFileUid} aborted, found in exclusion list."); return(success : false, file : null); } Log.LogInformation($"{Method.In()} Migrating file '{file.Name}', Uid: {file.ImportedFileUid}"); string tempFileName; using (var fileContents = await FileRepo.GetFile(_fileSpaceId, $"{file.Path}/{file.Name}")) { _database.Update(file.LegacyFileId, (MigrationFile x) => x.MigrationState = MigrationState.InProgress, Table.Files); if (fileContents == null) { var message = $"Failed to fetch file '{file.Name}' ({file.LegacyFileId}), not found"; _database.Update(file.LegacyFileId, (MigrationFile x) => x.MigrationState = MigrationState.FileNotFound, Table.Files); _database.Insert(new MigrationMessage(file.ProjectUid, message)); Log.LogWarning($"{Method.Out()} {message}"); return(success : true, file : null); } var tempPath = Path.Combine(_tempFolder, file.CustomerUid, file.ProjectUid, file.ImportedFileUid); Directory.CreateDirectory(tempPath); tempFileName = Path.Combine(tempPath, file.Name); Log.LogInformation($"{Method.Info()} Creating temporary file '{tempFileName}' for file {file.ImportedFileUid}"); if (_downloadProjectFiles) { using (var tempFile = new FileStream(tempFileName, FileMode.Create)) { fileContents.CopyTo(tempFile); _database.Update( file.LegacyFileId, (MigrationFile x) => { // ReSharper disable once AccessToDisposedClosure x.Length = tempFile.Length; }, tableName: Table.Files); } } } var result = new FileDataSingleResult(); if (_downloadProjectFiles && _uploadProjectFiles) { Log.LogInformation($"{Method.Info()} Uploading file {file.ImportedFileUid}"); result = ImportFile.SendRequestToFileImportV4( _uploadFileApiUrl, file, tempFileName, new ImportOptions(HttpMethod.Put)); _database.Update(file.LegacyFileId, (MigrationFile x) => x.MigrationState = MigrationState.Completed, Table.Files); _database.Update(project.LegacyProjectID, (MigrationProject x) => x.UploadedFileCount += 1, Table.Projects); } else { var skippedMessage = $"Skipped because DOWNLOAD_PROJECT_FILES={_downloadProjectFiles} && UPLOAD_PROJECT_FILES={_uploadProjectFiles}"; _database.Update(file.LegacyFileId, (MigrationFile x) => { x.MigrationState = MigrationState.Skipped; x.MigrationStateMessage = skippedMessage; }, Table.Files); Log.LogDebug($"{Method.Info("DEBUG")} {skippedMessage}"); } Log.LogInformation($"{Method.Out()} File {file.ImportedFileUid} update result {result.Code} {result.Message}"); return(success : true, file : result); }
/// <summary> /// Migrate all elgible files for a given project. /// </summary> private async Task <bool> MigrateProject(MigrationJob job) { var migrationResult = MigrationState.Unknown; var migrationStateMessage = ""; try { Log.LogInformation($"{Method.In()} Migrating project {job.Project.ProjectUID}, Name: '{job.Project.Name}'"); _database.SetMigrationState(job, MigrationState.InProgress, null); // Resolve coordinate system file first; all projects regardless of whether they have files need to have // their calibration file present in DataOcean post migration. var result = await DcFileAgent.ResolveProjectCoordinateSystemFile(job); if (!result) { migrationResult = MigrationState.Failed; migrationStateMessage = "Unable to resolve coordinate system file"; Log.LogError($"{Method.Info()} {migrationStateMessage} for project {job.Project.ProjectUID}, aborting project migration"); return(false); } // Resolve imported files for current project. var filesResult = await ImportFile.GetImportedFilesFromWebApi($"{_importedFileApiUrl}?projectUid={job.Project.ProjectUID}", job.Project); if (filesResult == null) { Log.LogInformation($"{Method.Info()} Failed to fetch imported files for project {job.Project.ProjectUID}, aborting project migration"); _database.SetMigrationState(job, MigrationState.Failed, "Failed to fetch imported file list"); migrationStateMessage = "Failed to fetch imported file list"; return(false); } if (filesResult.ImportedFileDescriptors == null || filesResult.ImportedFileDescriptors.Count == 0) { Log.LogInformation($"{Method.Info()} Project {job.Project.ProjectUID} contains no imported files, aborting project migration"); _database.SetMigrationState(job, MigrationState.Skipped, "No imported files"); _database.Update(MigrationInfoId, (MigrationInfo x) => x.ProjectsWithNoFiles += 1); migrationStateMessage = "Project contains no imported files"; migrationResult = MigrationState.Completed; } else { // We have files, and a valid coordinate system, continue processing. var selectedFiles = filesResult.ImportedFileDescriptors .Where(f => MigrationFileTypes.Contains(f.ImportedFileType)) .ToList(); _database.Update(job.Project.LegacyProjectID, (MigrationProject x) => { x.TotalFileCount = filesResult.ImportedFileDescriptors.Count; x.EligibleFileCount = selectedFiles.Count; }, Table.Projects); Log.LogInformation($"{Method.Info()} Found {selectedFiles.Count} eligible files out of {filesResult.ImportedFileDescriptors.Count} total to migrate for {job.Project.ProjectUID}"); if (selectedFiles.Count == 0) { Log.LogInformation($"{Method.Info()} Project {job.Project.ProjectUID} contains no eligible files, skipping project migration"); _database.Update(MigrationInfoId, (MigrationInfo x) => x.ProjectsWithNoEligibleFiles += 1); migrationStateMessage = "Project contains no eligible files"; migrationResult = MigrationState.Completed; } var fileTasks = new List <Task <(bool, FileDataSingleResult)> >(); foreach (var file in selectedFiles) { // Check to sort out bad data; found in development database. if (file.CustomerUid != job.Project.CustomerUID) { Log.LogError($"{Method.Info("ERROR")} CustomerUid ({file.CustomerUid}) for ImportedFile ({file.ImportedFileUid}) doesn't match associated project: {job.Project.ProjectUID}"); continue; } var migrationFile = _database.Find <MigrationFile>(Table.Files, file.LegacyFileId); if (migrationFile == null) { _database.Insert(new MigrationFile(file), Table.Files); } else { if (migrationFile.MigrationState == MigrationState.Completed || migrationFile.MigrationState == MigrationState.Skipped && !_reProcessSkippedFiles) { Log.LogInformation($"{Method.Info()} Skipping file {file.ImportedFileUid}, migrationState={Enum.GetName(typeof(MigrationState), migrationFile.MigrationState)?.ToUpper()} and REPROCESS_SKIPPED_FILES={_reProcessSkippedFiles}"); continue; } } var migrationResultObj = MigrateFile(file, job.Project); fileTasks.Add(migrationResultObj); if (fileTasks.Count != THROTTLE_ASYNC_FILE_UPLOAD_JOBS) { continue; } var completed = await Task.WhenAny(fileTasks); fileTasks.Remove(completed); } await Task.WhenAll(fileTasks); var importedFilesResult = fileTasks.All(t => t.Result.Item1); migrationResult = importedFilesResult ? MigrationState.Completed : MigrationState.Failed; if (!_uploadProjectFiles) { migrationResult = MigrationState.Unknown; } Log.LogInformation($"{Method.Out()} Project '{job.Project.Name}' ({job.Project.ProjectUID}) {(importedFilesResult ? "succeeded" : "failed")}"); migrationStateMessage = importedFilesResult ? "Success" : "failed"; return(importedFilesResult); } } catch (Exception exception) { Log.LogError(exception, $"{Method.Info()} Error processing project {job.Project.ProjectUID}"); } finally { _database.SetMigrationState(job, migrationResult, migrationStateMessage); Action <MigrationInfo> migrationResultAction; switch (migrationResult) { case MigrationState.Skipped: migrationResultAction = x => x.ProjectsSkipped += 1; break; case MigrationState.Completed: migrationResultAction = x => x.ProjectsCompleted += 1; break; case MigrationState.Failed: migrationResultAction = x => x.ProjectsFailed += 1; break; default: if (!_uploadProjectFiles) { migrationResultAction = x => x.ProjectsSkipped += 1; break; } throw new Exception($"Invalid migrationResult state for project {job.Project.ProjectUID}"); } _database.Update(MigrationInfoId, migrationResultAction); } return(false); }
/// <summary> /// Multi purpose HttpClient request wrapper. /// </summary> public async Task <TResponse> SendHttpClientRequest <TResponse>( string uri, HttpMethod method, string acceptHeader, string contentType, string customerUid = null, string requestBodyJson = null, byte[] payloadData = null, bool setJWTHeader = true) where TResponse : class { _log.LogInformation($"{Method.In()} URI: {method} {uri}"); var request = GetRequestMessage(method, uri); try { request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue(acceptHeader)); if (!string.IsNullOrEmpty(customerUid)) { request.Headers.Add("X-VisionLink-CustomerUid", customerUid); } request.Headers.Add("Authorization", $"Bearer {_bearerToken}"); if (setJWTHeader) { request.Headers.Add("X-JWT-Assertion", _jwtToken); } if (requestBodyJson != null || payloadData != null) { switch (acceptHeader) { case MediaType.APPLICATION_JSON: { request.Content = new StringContent(requestBodyJson, Encoding.UTF8, contentType); break; } case MediaType.MULTIPART_FORM_DATA: { contentType = $"multipart/form-data; boundary={BOUNDARY_START}{Guid.NewGuid().ToString()}"; throw new NotImplementedException(); } default: { throw new Exception($"Unsupported content type '{contentType}'"); } } } var response = await _httpClient.SendAsync(request); var receiveStream = response.Content.ReadAsStreamAsync().Result; string responseBody; using (var readStream = new StreamReader(receiveStream, Encoding.UTF8)) { responseBody = readStream.ReadToEnd(); } switch (response.StatusCode) { case HttpStatusCode.Unauthorized: { Debugger.Break(); break; } case HttpStatusCode.OK: { _log.LogInformation($"{Method.Info()} Status [{response.StatusCode}] Body: '{responseBody}'"); break; } case HttpStatusCode.InternalServerError: case HttpStatusCode.NotFound: { _log.LogError($"{Method.Info()} Status [{response.StatusCode}] Body: '{responseBody}'"); Debugger.Break(); break; } default: { _log.LogDebug($"{Method.Info()} Status [{response.StatusCode}] URI: '{request.RequestUri.AbsoluteUri}', Body: '{responseBody}'"); break; } } switch (response.Content.Headers.ContentType.MediaType) { case MediaType.APPLICATION_JSON: { return(JsonConvert.DeserializeObject <TResponse>(responseBody)); } case MediaType.TEXT_PLAIN: case MediaType.APPLICATION_OCTET_STREAM: { return(await response.Content.ReadAsStringAsync() as TResponse); } default: { throw new Exception($"Unsupported content type '{response.Content.Headers.ContentType.MediaType}'"); } } } catch (Exception exception) { _log.LogError($"{Method.Info("ERROR")} {method} URI: '{request.RequestUri.AbsoluteUri}', Exception: {exception.GetBaseException()}"); } finally { request.Dispose(); } return(null); }