public void CanGetExportDataSuccess(string message) { var customHeaders = new HeaderDictionary(); var scheduleRequest = new ScheduleJobRequest { Url = "some url", Filename = "dummy" }; var context = GetMockHangfireContext(typeof(ExportJobTests), TestContext.TestName, message); Mock <IApiClient> apiClient = new Mock <IApiClient>(); apiClient.Setup(a => a.SendRequest <CompactionExportResult>(scheduleRequest, customHeaders)).ReturnsAsync(new CompactionExportResult()); Mock <ITransferProxy> transferProxy = new Mock <ITransferProxy>(); transferProxy.Setup(t => t.Upload(It.IsAny <Stream>(), It.IsAny <string>())).Verifiable(); Mock <ITransferProxyFactory> transferProxyFactory = new Mock <ITransferProxyFactory>(); transferProxyFactory.Setup(x => x.NewProxy(It.IsAny <TransferProxyType>())).Returns(transferProxy.Object); Mock <Logging.ILoggerFactory> logger = new Mock <Logging.ILoggerFactory>(); var exportJob = new ExportJob(apiClient.Object, transferProxyFactory.Object, logger.Object); var result = exportJob.GetExportData(Guid.NewGuid(), customHeaders, context); }
public ScheduleJobResult StartExport([FromBody] ScheduleJobRequest request) { log.LogInformation($"StartExport: Url {request?.Url}"); JobRequest jobRequest = new JobRequest() { JobUid = Guid.Parse("c3cbb048-05c1-4961-a799-70434cb2f162"), SetupParameters = request, RunParameters = Request.Headers.GetCustomHeaders() }; log.LogInformation($"{nameof(StartExport)}: {JsonConvert.SerializeObject(request)}"); jobRequest.Validate(); jobRequest.AttributeFilters = SpecialFilters.ExportFilter; string hangfireJobId; try { hangfireJobId = jobRunner.QueueHangfireJob(jobRequest); } catch (Exception e) { log.LogError($"Queue VSS job failed with exception {e.Message}", e); throw; } //Hangfire will substitute a PerformContext automatically return(new ScheduleJobResult { JobId = hangfireJobId }); }
public ScheduleJobResponse ScheduleJob(ScheduleJobRequest request) { Type jobType = Type.GetType(request.Type); if (jobType == null) { throw new ArgumentException(string.Format("JobType '{0}' could not be resolved", request.Type)); } if (request.CalendarSchedule == null) { throw new ArgumentException(string.Format("CalendarSchedule was not specified.")); } return(new ScheduleJobResponse { Job = new JobData( jobManager.JobStore.ScheduleJob( jobType, request.Data, request.MetaData, request.QueueId, request.CalendarSchedule.AsInternalSchedule(), request.Name, request.Description, request.Application, request.Group, request.AbsoluteTimeout, (Logic.DataModel.Jobs.JobStatus?)(int?) request.Status, request.CreatedDate, request.SuppressHistory, request.DeleteWhenDone)), }); }
public async Task CanGetExportDataFailure(string message) { var customHeaders = new HeaderDictionary(); var scheduleRequest = new ScheduleJobRequest { Url = "some url", Filename = "dummy" }; var ms = new MemoryStream(Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(scheduleRequest))); var fileStreamResult = new FileStreamResult(ms, ContentTypeConstants.ApplicationJson); var context = GetMockHangfireContext(typeof(ExportJobTests), TestContext.TestName, message); var exception = new Exception(message); Mock <IApiClient> apiClient = new Mock <IApiClient>(); apiClient.Setup(a => a.SendRequest <CompactionExportResult>(It.IsAny <ScheduleJobRequest>(), customHeaders)).Throws(exception); Mock <ITransferProxy> transferProxy = new Mock <ITransferProxy>(); transferProxy.Setup(t => t.Upload(It.IsAny <Stream>(), It.IsAny <string>())).Verifiable(); transferProxy.Setup(t => t.Download(It.IsAny <string>())).Returns(() => Task.FromResult(fileStreamResult)); Mock <ITransferProxyFactory> transferProxyFactory = new Mock <ITransferProxyFactory>(); transferProxyFactory.Setup(x => x.NewProxy(It.IsAny <TransferProxyType>())).Returns(transferProxy.Object); Mock <Logging.ILoggerFactory> logger = new Mock <Logging.ILoggerFactory>(); var exportJob = new ExportJob(apiClient.Object, transferProxyFactory.Object, logger.Object); await Assert.ThrowsExceptionAsync <Exception>(() => exportJob.GetExportData(Guid.NewGuid(), customHeaders, context)); ms.Dispose(); }
/// <summary> /// Send an HTTP request to the requested URL /// </summary> /// <param name="jobRequest">Details of the job request</param> /// <param name="customHeaders">Custom HTTP headers for the HTTP request</param> /// <returns>The result of the HTTP request as an instance of T</returns> public async Task <T> SendRequest <T>(ScheduleJobRequest jobRequest, IHeaderDictionary customHeaders) { var result = default(T); var method = new HttpMethod(jobRequest.Method ?? "GET"); _log.LogDebug($"Job request is {JsonConvert.SerializeObject(jobRequest)}"); try { // Merge the Custom headers passed in with the http request, and the headers requested by the Schedule Job foreach (var header in jobRequest.Headers) { if (!customHeaders.ContainsKey(header.Key)) { customHeaders[header.Key] = header.Value; } else { _log.LogDebug($"HTTP Header '{header.Key}' exists in both the web requests and job request headers, using web request value. Web Request Value: '${customHeaders[header.Key]}', Job Request Value: '${header.Value}'"); } } using var serviceScope = _scopeFactory.CreateScope(); var webRequest = serviceScope.ServiceProvider.GetService <IWebRequest>(); // The Schedule job request may contain encoded binary data, or a standard string, // We need to handle both cases differently, as we could lose data if converting binary information to a string if (jobRequest.IsBinaryData) { using var ms = new MemoryStream(jobRequest.PayloadBytes); result = await webRequest.ExecuteRequest <T>(jobRequest.Url, ms, customHeaders, method, jobRequest.Timeout, 0); } else if (!string.IsNullOrEmpty(jobRequest.Payload)) { using var ms = new MemoryStream(Encoding.UTF8.GetBytes(jobRequest.Payload)); result = await webRequest.ExecuteRequest <T>(jobRequest.Url, ms, customHeaders, method, jobRequest.Timeout, 0); } else { // Null payload (which is ok), so we don't need a stream result = await webRequest.ExecuteRequest <T>(jobRequest.Url, null, customHeaders, method, jobRequest.Timeout, 0); } _log.LogDebug($"Result of send request: {JsonConvert.SerializeObject(result)}"); } catch (Exception ex) { var message = ex.Message; var stacktrace = ex.StackTrace; //Check for 400 and 500 errors which come through as an inner exception if (ex.InnerException != null) { message = ex.InnerException.Message; stacktrace = ex.InnerException.StackTrace; } _log.LogWarning("Error sending data: ", message); _log.LogWarning("Stacktrace: ", stacktrace); throw; } return(result); }
public async Task <ScheduleJobResult> BackgroundUpload( FlowFile file, [FromQuery] Guid projectUid, [FromQuery] ImportedFileType importedFileType, [FromQuery] DxfUnitsType dxfUnitsType, [FromQuery] DateTime fileCreatedUtc, [FromQuery] DateTime fileUpdatedUtc, [FromQuery] DateTime?surveyedUtc, [FromServices] ISchedulerProxy scheduler, [FromServices] ITransferProxyFactory transferProxyFactory) { if (importedFileType == ImportedFileType.ReferenceSurface) { ServiceExceptionHandler.ThrowServiceException(HttpStatusCode.BadRequest, 122); } FlowJsFileImportDataValidator.ValidateUpsertImportedFileRequest( file, projectUid, importedFileType, dxfUnitsType, fileCreatedUtc, fileUpdatedUtc, UserEmailAddress, surveyedUtc, null, null); Logger.LogInformation( $"{nameof(BackgroundUpload)}: file: {file.flowFilename} path {file.path} projectUid {projectUid} ImportedFileType: {importedFileType} " + $"DxfUnitsType: {dxfUnitsType} surveyedUtc {(surveyedUtc == null ? "N/A" : surveyedUtc.ToString())}"); if (string.Equals(Request.Method, HttpMethod.Post.ToString(), StringComparison.OrdinalIgnoreCase)) { await ValidateFileDoesNotExist(projectUid.ToString(), file.flowFilename, importedFileType, surveyedUtc, null, null); } var s3Path = $"project/importedfile/{Guid.NewGuid()}.dat"; var fileStream = System.IO.File.Open(file.path, FileMode.Open, FileAccess.Read); var transferProxy = transferProxyFactory.NewProxy(TransferProxyType.Temporary); transferProxy.Upload(fileStream, s3Path); var baseUrl = Request.Host.ToUriComponent(); // The QueryString will have values in it, so it's safe to add extra queries with the & as opposed to ?, then & var callbackUrl = $"http://{baseUrl}/internal/v6/importedfile{Request.QueryString}"; callbackUrl += $"&filename={WebUtility.UrlEncode(file.flowFilename)}&awsFilePath={WebUtility.UrlEncode(s3Path)}"; Logger.LogInformation($"{nameof(BackgroundUpload)}: baseUrl {callbackUrl}"); var executionTimeout = ConfigStore.GetValueInt("PEGASUS_EXECUTION_TIMEOUT_MINS", 5) * 60000;//minutes converted to millisecs var request = new ScheduleJobRequest { Filename = file.flowFilename, Method = "GET", // match the internal upload Method Url = callbackUrl, Timeout = executionTimeout }; request.SetStringPayload(string.Empty); var headers = Request.Headers.GetCustomHeaders(); return(await scheduler.ScheduleBackgroundJob(request, headers)); }
public async Task Run(object o, object context) { recipients = o.GetConvertedObject <string[]>(); log.LogDebug($"Starting to process {customerProjects?.Count} projects"); foreach (var project in customerProjects) { JobRequest jobRequest; try { log.LogInformation($"Processing project {project.Name}"); // Create a relevant filter var filter = await filters.CreateFilter(project.ProjectUID, new FilterRequest() { FilterType = FilterType.Transient, FilterJson = FILTER_JSON }, headers); log.LogDebug($"Created filter {filter.FilterDescriptor.FilterUid}"); //generate filename var generatedFilename = $"{project.Name + " " + DateTime.UtcNow.ToString("yyyy-MM-ddTHH-mm-ss")}"; log.LogDebug($"Generated filename {generatedFilename}"); //generate uri var baseUri = await serviceResolution.ResolveService("productivity3dinternal_service_public_v2"); var requestUri = $"{baseUri.Endpoint}/api/v2/export/machinepasses?projectUid={project.ProjectUID}&filename={generatedFilename}&filterUid={filter.FilterDescriptor.FilterUid}&coordType=0&outputType=0&restrictOutput=False&rawDataOutput=False"; log.LogDebug($"Export request url {requestUri}"); var jobExportRequest = new ScheduleJobRequest() { Url = requestUri, Timeout = 9000000, Filename = generatedFilename }; jobRequest = new JobRequest() { JobUid = Guid.Parse("c3cbb048-05c1-4961-a799-70434cb2f162"), SetupParameters = jobExportRequest, RunParameters = headers, AttributeFilters = SpecialFilters.ExportFilter }; } catch (Exception e) { log.LogError(e, $"Failed to prepare for exports with exception"); throw; } try { log.LogDebug($"Firing export job for project {project.Name}"); var hangfireJobId = jobRunner.QueueHangfireJob(jobRequest, exportEmailGenerator); JobStorage.Current.GetConnection().SetJobParameter(hangfireJobId, Tags.PROJECTNAME_TAG, JsonConvert.SerializeObject(project.Name)); JobStorage.Current.GetConnection().SetJobParameter(hangfireJobId, Tags.RECIPIENTS_TAG, JsonConvert.SerializeObject(recipients)); } catch (Exception e) { log.LogError(e, $"Queue VSS job failed with exception {e.Message}"); throw; } } }
/// <summary> /// Save the request in S3 for use in the background task, rather than in the Database /// </summary> /// <param name="request">Request to be saved</param> /// <returns>A Guid to be passed in to the background task</returns> private Guid SaveRequest(ScheduleJobRequest request) { var guid = Guid.NewGuid(); var data = JsonConvert.SerializeObject(request, Formatting.None); var bytes = Encoding.UTF8.GetBytes(data); using (var ms = new MemoryStream(bytes)) { _transferProxy.Upload(ms, $"{S3_SCHEDULE_SAVE_LOCATION}/{guid}"); } return(guid); }
public JobRequestExtensionsTests() { _scheduleJobRequest = new ScheduleJobRequest(); _scheduleJobRequest.Headers = new HeaderDictionary { { "custom-header", "some value" }, { "Content-Type", "application/json" }, { "Cache-Control", "none" } }; _scheduleJobRequest.Url = "url"; _scheduleJobRequest.Filename = "filename"; _serializedRequest = JsonConvert.SerializeObject(_scheduleJobRequest); }
/// <summary> /// Fetch the Schedule Job Request for a given Request ID /// </summary> /// <param name="requestId">Request ID returned from the SaveRequest Method</param> /// <returns>The original Scheduled Task class</returns> private async Task <ScheduleJobRequest> DownloadRequest(Guid requestId) { ScheduleJobRequest request = null; var fileStreamResult = await _transferProxy.Download($"{S3_SCHEDULE_SAVE_LOCATION}/{requestId}"); using (var ms = new MemoryStream()) { fileStreamResult.FileStream.CopyTo(ms); var bytes = ms.ToArray(); var data = Encoding.UTF8.GetString(bytes); request = JsonConvert.DeserializeObject <ScheduleJobRequest>(data); } return(request); }
/// <summary> /// Schedule an export job wit the scheduler /// </summary> private ScheduleResult ScheduleJob(string exportDataUrl, string fileName, ISchedulerProxy scheduler, int?timeout = null) { if (timeout == null) { var configStoreTimeout = ConfigStore.GetValueInt("SCHEDULED_JOB_TIMEOUT"); timeout = configStoreTimeout > 0 ? configStoreTimeout : FIVE_MIN_SCHEDULER_TIMEOUT; } var request = new ScheduleJobRequest { Url = exportDataUrl, Filename = fileName, Timeout = timeout }; return(WithServiceExceptionTryExecute(() => new ScheduleResult { JobId = scheduler.ScheduleExportJob(request, Request.Headers.GetCustomHeaders()).Result?.JobId })); }
/// <inheritdoc /> public async Task <ScheduleJobResult> ScheduleBackgroundJob(ScheduleJobRequest request, IHeaderDictionary customHeaders) { var jsonData = JsonConvert.SerializeObject(request); using (var payload = new MemoryStream(Encoding.UTF8.GetBytes(jsonData))) { // "/internal/v1/background" var result = await SendMasterDataItemServiceDiscoveryNoCache <ScheduleJobResult> ("/background", customHeaders, HttpMethod.Post, payload : payload); if (result != null) { return(result); } } log.LogDebug($"{nameof(ScheduleBackgroundJob)} Failed to schedule a background job"); return(null); }
public ScheduleJobResult StartMockExport([FromBody] ScheduleJobRequest request) { var jobId = IN_PROGRESS_JOB_ID; if (request.Url.Contains("Test-success")) { jobId = SUCCESS_JOB_ID; } if (request.Url.Contains("Test-failed")) { jobId = FAILURE_JOB_ID; } if (request.Url.Contains("Test-timeout")) { jobId = TIMEOUT_JOB_ID; } return(new ScheduleJobResult { JobId = jobId }); }
private string GetScheduledJobId(string filterUid, string filename, int timeoutMillisecs = 300000)//5 mins { var url = $"{ts.tsCfg.vetaExportUrl}?projectUid={GOLDEN_DATA_DIMENSIONS_PROJECT_UID_1}&fileName={filename}&filterUid={filterUid}"; var request = new ScheduleJobRequest { Url = url, Filename = filename, Timeout = timeoutMillisecs }; Console.WriteLine($"Uri is {url}"); var requestJson = JsonConvert.SerializeObject(request); var responseJson = ts.CallSchedulerWebApi("internal/v1/export", "POST", requestJson); Console.WriteLine($"Response from the mockApi is {responseJson}"); var scheduleResult = JsonConvert.DeserializeObject <ScheduleJobResult>(responseJson, new JsonSerializerSettings { DateTimeZoneHandling = DateTimeZoneHandling.Unspecified }); Assert.IsNotNull(scheduleResult, "Should get a schedule job response"); Assert.IsTrue(!string.IsNullOrEmpty(scheduleResult.JobId), "Should get a job id"); return(scheduleResult.JobId); }
public async Task <string> ExecuteExportProc(ScheduleJobRequest request, IHeaderDictionary customHeaders, PerformContext context) { var result = await _apiClient.SendRequest <CompactionExportResult>(request, customHeaders); try { // Set the results so the results can access the final url easily if (context != null) { _log.LogInformation($"Setting export job {context.BackgroundJob.Id} downloadLink={result.DownloadLink}"); JobStorage.Current.GetConnection().SetJobParameter(context.BackgroundJob.Id, S3_KEY_STATE_KEY, GetS3Key(context.BackgroundJob.Id, request.Filename)); JobStorage.Current.GetConnection().SetJobParameter(context.BackgroundJob.Id, DOWNLOAD_LINK_STATE_KEY, result.DownloadLink); } return(result.DownloadLink); } catch (Exception ex) { _log.LogError(ex, "Exception in ApiClient delegate."); throw; } }
private static void ScheduleJob(string name, string description, string data, string metaData, string jobType, TimeSpan?absoluteTimeout, byte queueId, string application, string group, bool suppressHistory, bool deleteWhenDone, SimpleSchedule schedule) { if (!schedule.StartDailyAt.HasValue) { schedule.StartDailyAt = new TimeSpan(); } ScheduleJobRequest request = new ScheduleJobRequest { Application = application, DeleteWhenDone = deleteWhenDone, Description = description, Name = name, QueueId = queueId, Type = jobType, MetaData = metaData, Data = data, AbsoluteTimeout = absoluteTimeout, Group = group, CalendarSchedule = new CalendarSchedule { ScheduleType = typeof(global::BackgroundWorkerService.Logic.DataModel.Scheduling.CalendarSchedule).AssemblyQualifiedName, DaysOfWeek = schedule.DaysOfWeek.ToArray(), StartDailyAt = new TimeOfDay { Hour = schedule.StartDailyAt.Value.Hours, Minute = schedule.StartDailyAt.Value.Minutes, Second = schedule.StartDailyAt.Value.Seconds }, RepeatInterval = schedule.RepeatInterval, EndDateTime = null, StartDateTime = DateTime.Now, }, }; using (AccessPointClient client = new AccessPointClient()) { client.ScheduleJob(request); } }
/// <summary> /// Queue a Scheduled Job to be run in the background /// </summary> /// <param name="request">Scheduled Job Details</param> /// <param name="customHeaders">Any Customer headers to be passed with the Scheduled Job Request</param> /// <returns>A Job ID for the Background Job</returns> public string QueueJob(ScheduleJobRequest request, IHeaderDictionary customHeaders) { throw new NotImplementedException(); }