/// <summary> /// Resolve the coordinate system file from either TCC or what we know of it from Raptor. /// </summary> public async Task <bool> ResolveProjectCoordinateSystemFile(MigrationJob job) { if (await ResolveCoordinateSystemFromDataOcean(job)) { return(true); } if (string.IsNullOrEmpty(job.Project.CoordinateSystemFileName)) { _log.LogDebug($"Project '{job.Project.ProjectUID}' contains NULL CoordinateSystemFileName field."); if (!await ResolveCoordinateSystemFromRaptor(job)) { return(false); } } else { var fileDownloadResult = await DownloadCoordinateSystemFileFromTCC(job); if (!fileDownloadResult) { if (!await ResolveCoordinateSystemFromRaptor(job)) { return(false); } } } _log.LogInformation("Successfully resolved coordinate system information from Raptor"); // Push the file information back to the Project service and by proxy DataOcean. var projectUpdateResult = await UpdateProjectCoordinateSystemInfo(job); if (!projectUpdateResult) { _log.LogError($"Unable to update Project database with new coordinate system file data for project {job.Project.ProjectUID}."); return(false); } _migrationDb.Update( job.Project.LegacyProjectID, (MigrationProject x) => { x.DcFilename = job.Project.CoordinateSystemFileName; x.HasValidDcFile = !string.IsNullOrEmpty(job.Project.CoordinateSystemFileName); }, tableName: Table.Projects); // Wait for the coordinate system file to be pushed to DataOcean, then recheck it's present. await Task.Delay(2000); return(await ResolveCoordinateSystemFromDataOcean(job)); }
public async Task MigrateFilesForAllActiveProjects() { Log.LogInformation($"{Method.Info()} Fetching projects..."); var projects = (await ProjectRepo.GetActiveProjects()).ToList(); Log.LogInformation($"{Method.Info()} Found {projects.Count} projects"); var inputProjects = _appSettings.GetSection("Projects") .Get <string[]>(); var ignoredProjects = _appSettings.GetSection("IgnoredProjects") .Get <string[]>(); _ignoredFiles = _appSettings.GetSection("IgnoredFiles") .Get <string[]>(); // Are we processing only a subset of projects from the appSettings::Projects array? if (inputProjects != null && inputProjects.Any()) { Log.LogInformation($"{Method.Info()} Found {inputProjects.Length} input projects to process."); var tmpProjects = new List <Project>(inputProjects.Length); foreach (var projectUid in inputProjects) { var project = projects.Find(x => x.ProjectUID == projectUid); if (project != null) { Log.LogInformation($"{Method.Info()} Adding {project.ProjectUID}"); tmpProjects.Add(project); } } if (!projects.Any()) { Log.LogInformation($"{Method.Info()} Unable to resolve any projects to process, exiting."); return; } DropTables(); projects = tmpProjects; } else { if (!_resumeMigration) { DropTables(); if (Directory.Exists(_tempFolder)) { Log.LogDebug($"{Method.Info()} Removing temporary files from {_tempFolder}"); Directory.Delete(_tempFolder, recursive: true); } } } if (!_resumeMigration) { MigrationInfoId = _database.Insert(new MigrationInfo()); } var projectCount = Math.Min(projects.Count, _capMigrationCount); var projectTasks = new List <Task <bool> >(projectCount); _database.Update(MigrationInfoId, (MigrationInfo x) => x.ProjectsTotal = projectCount); var projectsProcessed = 0; var processedProjects = new List <string>(); foreach (var project in projects) { if (ignoredProjects != null && ignoredProjects.Contains(project.ProjectUID)) { Log.LogInformation($"{Method.Info()} Ignoring project {project.ProjectUID}; found in IgnoredProjects list."); continue; } var projectRecord = _database.Find <MigrationProject>(Table.Projects, project.LegacyProjectID); if (projectRecord == null) { Log.LogInformation($"{Method.Info()} Creating new migration record for project {project.ProjectUID}"); _database.Insert(new MigrationProject(project)); } else { Log.LogInformation($"{Method.Info()} Found migration record for project {project.ProjectUID}"); // TODO Check completed=true & eligibleFiles > 0 && uploadedFiles=0; should retry. if (projectRecord.MigrationState == MigrationState.Completed && !_reProcessSkippedFiles) { Log.LogInformation($"{Method.Info()} Skipping project {project.ProjectUID}, marked as COMPLETED"); continue; } if (projectRecord.MigrationState != MigrationState.Completed) { if (!_reProcessFailedProjects) { Log.LogInformation($"{Method.Info()} Not reprocessing {Enum.GetName(typeof(MigrationState), projectRecord.MigrationState)?.ToUpper()} project {project.ProjectUID}"); continue; } } Log.LogInformation($"{Method.Info()} Resuming migration for project {project.ProjectUID}, marked as {Enum.GetName(typeof(MigrationState), projectRecord.MigrationState)?.ToUpper()}"); } var job = new MigrationJob { Project = project, IsRetryAttempt = projectRecord != null }; if (projectsProcessed <= _capMigrationCount) { processedProjects.Add(job.Project.ProjectUID); projectTasks.Add(MigrateProject(job)); } if (projectTasks.Count <= THROTTLE_ASYNC_PROJECT_JOBS && projectsProcessed < _capMigrationCount - 1) { continue; } var completed = await Task.WhenAny(projectTasks); projectTasks.Remove(completed); _database.IncrementProjectMigrationCounter(project); projectsProcessed += 1; Log.LogInformation("Migration Progress:"); Log.LogInformation($" Processed: {projectsProcessed}"); Log.LogInformation($" In Flight: {projectTasks.Count}"); Log.LogInformation($" Remaining: {projectCount - projectsProcessed}"); if (projectsProcessed >= _capMigrationCount) { Log.LogInformation($"{Method.Info()} Reached maxium number of projects to process, exiting."); break; } } await Task.WhenAll(projectTasks); // DIAGNOSTIC RUNTIME SWITCH if (_saveFailedProjects) { // Create a recovery file of project uids for re processing var failedProjectsLog = Path.Combine(_tempFolder, $"FailedProjects{DateTime.Now.Date.ToShortDateString().Replace('/', '-')}_{DateTime.Now.Hour}{DateTime.Now.Minute}{DateTime.Now.Second}.log"); var completedProjectsLog = Path.Combine(_tempFolder, $"Completed{DateTime.Now.Date.ToShortDateString().Replace('/', '-')}_{DateTime.Now.Hour}{DateTime.Now.Minute}{DateTime.Now.Second}.log"); if (!Directory.Exists(_tempFolder)) { Directory.CreateDirectory(_tempFolder); } var allProjects = _database.GetTable <MigrationProject>(Table.Projects).ToList(); using (TextWriter streamWriterFailed = new StreamWriter(failedProjectsLog)) using (TextWriter streamWriterCompleted = new StreamWriter(completedProjectsLog)) { foreach (var project in processedProjects) { var migrationProject = allProjects.FirstOrDefault(x => x.ProjectUid == project); if (migrationProject == null) { continue; } if (migrationProject.MigrationState == MigrationState.Completed) { streamWriterCompleted.WriteLine($"{migrationProject.ProjectUid}"); continue; } var message = string.IsNullOrEmpty(migrationProject.MigrationStateMessage) ? null : $" // {migrationProject.MigrationStateMessage}"; streamWriterFailed.WriteLine($"{migrationProject.ProjectUid}{message}"); } } } // Set the final summary figures. var completedCount = _database.Find <MigrationProject>(Table.Projects, x => x.MigrationState == MigrationState.Completed) .Count(); _database.Update(MigrationInfoId, (MigrationInfo x) => x.ProjectsSuccessful = completedCount); var failedCount = _database.Find <MigrationProject>(Table.Projects, x => x.MigrationState == MigrationState.Failed) .Count(); _database.Update(MigrationInfoId, (MigrationInfo x) => x.ProjectsFailed = failedCount); Log.LogInformation("Migration processing completed."); }
/// <summary> /// Send HTTP request for importing a file /// </summary> private string DoHttpRequest(string resourceUri, HttpMethod httpMethod, byte[] payloadData, ImportedFileDescriptor fileDescriptor, string contentType) { _database.Update(MigrationInfoId, (MigrationInfo x) => x.FilesTotal += 1); if (!(WebRequest.Create(resourceUri) is HttpWebRequest request)) { return(string.Empty); } request.Method = httpMethod.Method; request.Headers.Add("Authorization", _bearerToken); request.Headers.Add("X-VisionLink-CustomerUid", fileDescriptor.CustomerUid); request.Headers.Add("X-JWT-Assertion", JWTFactory.CreateToken(_jwtToken, fileDescriptor.ImportedBy)); if (payloadData != null) { request.ContentType = contentType; var writeStream = request.GetRequestStreamAsync().Result; writeStream.Write(payloadData, 0, payloadData.Length); } var responseString = string.Empty; try { using (var response = (HttpWebResponse)request.GetResponseAsync().Result) { if (response.StatusCode != HttpStatusCode.Accepted) { _log.LogInformation($"{nameof(DoHttpRequest)}: Response returned status code: {response.StatusCode}"); } responseString = GetStringFromResponseStream(response); _log.LogTrace($"{nameof(DoHttpRequest)}: {responseString}"); } } catch (AggregateException ex) { _log.LogError($"{nameof(DoHttpRequest)}: {ex.Message}"); _log.LogError(JsonConvert.SerializeObject(request)); foreach (var e in ex.InnerExceptions) { if (!(e is WebException)) { continue; } var webException = (WebException)e; if (!(webException.Response is HttpWebResponse response)) { continue; } return(GetStringFromResponseStream(response)); } } _database.Update(MigrationInfoId, (MigrationInfo x) => x.FilesUploaded += 1); return(responseString); }