/// <summary> /// Creates a new background job that will wait for a successful completion /// of another background job to be enqueued. /// </summary> /// <param name="parentId">Identifier of a background job to wait completion for.</param> /// <param name="context">The Execution context/parameters.</param> /// <returns>Unique identifier of a created job.</returns> protected virtual string ContinueWith(string parentId, Type jobType, object context = null) { var job = CreateJobInstance(jobType); var id = BackgroundJob.ContinueWith(parentId, () => ExecuteJob(jobType, context)); return(id); }
public void Run(ProjectAlgoListEntity projectAlg, string executedBy) { if (projectAlg == null || (projectAlg.ProjectId == 0 && projectAlg.Algos.Count == 0)) { return; } List <ExecutionInfoEntity> algoExecs = new List <ExecutionInfoEntity>(); ExecutionInfoEntity firstAlgoExe; algoExecs = ProjectsRepository.SetAlgoExecutions(projectAlg, executedBy); firstAlgoExe = algoExecs.First(); string resultPath = Path.Combine(ExecutionPath, string.Format("{0}_{1}", firstAlgoExe.ProjectId, firstAlgoExe.Id)); Directory.CreateDirectory(resultPath); string backgroundJobID = BackgroundJob.Enqueue(() => StartExecution(firstAlgoExe, executedBy, resultPath, firstAlgoExe.Id)); if (algoExecs.Count > 1) { for (int i = 1; i < algoExecs.Count; i++) { backgroundJobID = BackgroundJob.ContinueWith(backgroundJobID, () => StartExecution(algoExecs[i], executedBy, resultPath, firstAlgoExe.Id)); } BackgroundJob.ContinueWith(backgroundJobID, () => FinishProjectExecution(executedBy, firstAlgoExe)); } }
public static void MyAction() { var parentJobId = BackgroundJob.Schedule(() => Console.WriteLine("!Delayed Job !!"), TimeSpan.FromMinutes(4)); //Continuations job BackgroundJob.ContinueWith(parentJobId, () => ProcessContinuationJobs()); }
public HttpResponseMessage Transform(string template) { try { var req = new FixedLengthDto() { FileName = template }; using (var ms = new MemoryStream(2048)) { var task = Request.Body.CopyToAsync(ms); task.Wait(); req.DataResult = ms.ToArray(); // returns base64 encoded string JSON result } var jobId = BackgroundJob.Enqueue <IFixedLengthCommand>(x => x.Transform(req)); BackgroundJob.ContinueWith <IFixedLengthCommand>(jobId, x => x.CallBack(req)); } catch (IOException) { throw new HttpRequestException(HttpStatusCode.InternalServerError.ToString()); } HttpResponseMessage response = new HttpResponseMessage(); response.StatusCode = HttpStatusCode.Created; return(response); }
public async Task <IActionResult> SendPhotoToUsersAsync([FromForm] FileToUsersModel model) { if (!ModelState.IsValid) { return(BadRequest(ModelState)); } UserCheckResultDto userCheckResult; List <UserCheckResultDto> userCheckResults = new List <UserCheckResultDto>(); string path = await SaveFileAsync(model.File); ISocialProvider provider = _serviceAccessor(model.Provider); FileToUserDto fileToUser = new FileToUserDto { Name = model.File.FileName, SenderName = model.SenderName, Subject = model.Subject, Caption = model.Caption ?? "", Path = path, Priority = model.Priority }; using (IEnumerator <string> enumer = model.Logins.GetEnumerator()) { if (enumer.MoveNext()) { if (string.IsNullOrEmpty(parentIds[model.Provider]) || JobStorage.Current.GetMonitoringApi().JobDetails(parentIds[model.Provider]) == null) { userCheckResult = await provider.UserCheck(enumer.Current); userCheckResults.Add(userCheckResult); if (userCheckResult.IsValid) { fileToUser.Login = enumer.Current; parentIds[model.Provider] = BackgroundJob.Enqueue(() => provider.SendPhotoToUserAsync(fileToUser)); } if (!enumer.MoveNext()) { return(Ok(JsonConvert.SerializeObject(userCheckResults))); } } do { userCheckResult = await provider.UserCheck(enumer.Current); userCheckResults.Add(userCheckResult); if (userCheckResult.IsValid) { fileToUser.Login = enumer.Current; parentIds[model.Provider] = BackgroundJob.ContinueWith(parentIds[model.Provider], () => provider.SendPhotoToUserAsync(fileToUser)); } } while (enumer.MoveNext()); } } return(Ok(JsonConvert.SerializeObject(userCheckResults))); }
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline. public void Configure(IApplicationBuilder app, IHostingEnvironment env) { GlobalConfiguration.Configuration.UseSqlServerStorage("Server=(localdb)\\MSSQLLocalDB;Integrated Security=true;Initial Catalog=ReviewDB;"); app.UseHangfireDashboard(); app.UseHangfireServer(); if (env.IsDevelopment()) { app.UseDeveloperExceptionPage(); } //BackgroundJob.Enqueue(() => new MovieJob().MovieJobInserted()); BackgroundJob.Schedule(() => new MovieJob().MovieJobInserted(), TimeSpan.FromSeconds(5)); RecurringJob.AddOrUpdate(() => Console.WriteLine("Minutely Job"), Cron.Minutely); var id = BackgroundJob.Enqueue(() => Console.WriteLine("Hello, ")); BackgroundJob.ContinueWith(id, () => Console.WriteLine("world!")); var jobFireForget = BackgroundJob.Enqueue(() => Console.WriteLine($"Fire and forget: {DateTime.Now}")); var jobDelayed = BackgroundJob.Schedule(() => Console.WriteLine($"Delayed: {DateTime.Now}"), TimeSpan.FromSeconds(30)); BackgroundJob.ContinueWith(jobDelayed, () => Console.WriteLine($"Continuation: {DateTime.Now}")); RecurringJob.AddOrUpdate(() => Console.WriteLine($"Recurring: {DateTime.Now}"), Cron.Minutely); }
public async Task <IHttpActionResult> Post() { if (!this.Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } var provider = new MultipartMemoryStreamProvider(); await this.Request.Content.ReadAsMultipartAsync(provider); foreach (var file in provider.Contents) { var filename = file.Headers.ContentDisposition.FileName.Trim('\"'); var buffer = await file.ReadAsByteArrayAsync(); ImageHelper.SaveOriginal(filename, buffer); var id = BackgroundJob.Enqueue(() => ImageHelper.StoreAndResize(filename)); BackgroundJob.ContinueWith(id, () => Debug.WriteLine("Resize complete", filename)); // New code to enable getting current status nameToJobIdMapping.AddOrUpdate(filename, id, (oldKey, oldValue) => id); } return(this.Ok()); }
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline. public void Configure(IApplicationBuilder app, IHostingEnvironment env, ILoggerFactory loggerFactory) { if (env.IsDevelopment()) { app.UseDeveloperExceptionPage(); } else { app.UseExceptionHandler("/Home/Error"); // The default HSTS value is 30 days. You may want to change this for production scenarios, see https://aka.ms/aspnetcore-hsts. app.UseHsts(); } app.UseHangfireDashboard(); app.UseHangfireServer(); app.UseDeveloperExceptionPage(); app.UseDirectoryBrowser(); app.UseStaticFiles(); app.UseMvc(routes => { routes.MapRoute( name: "default", template: "{controller=Home}/{action=Index}/{id?}"); }); BackgroundJob.Enqueue(() => Console.WriteLine("Tarefa executada")); BackgroundJob.Schedule(() => Console.WriteLine("Atraso na execução da tarefa"), TimeSpan.FromMinutes(1)); var id = BackgroundJob.Enqueue(() => Console.WriteLine("Hello, ")); BackgroundJob.ContinueWith(id, () => Console.WriteLine("world!")); }
public ContinuationsJob() { var parentJobId = "1534353453133"; //Continuations job BackgroundJob.ContinueWith(parentJobId, () => ProcessContinuationsJob()); }
static void Main(string[] args) { //log.DebugFormat("{0}:我测试一下日志记录了没有!", "log4net"); GlobalConfiguration.Configuration .UseLog4NetLogProvider() .UseSqlServerStorage("Data Source=192.168.191.78;User Id=sa;Password=sa1994sa;Database=DataSample;Pooling=true;Max Pool Size=5000;Min Pool Size=0;"); //初始化生成HangFire数据库表 Console.WriteLine("Hangfire Server started. Press any key to exit..."); var server = new BackgroundJobServer(); //支持基于队列的任务处理:任务执行不是同步的,而是放到一个持久化队列中,以便马上把请求控制权返回给调用者。 var jobId = BackgroundJob.Enqueue(() => Console.WriteLine("{0}===》这是队列任务!", DateTime.Now.ToString("HH:mm:ss"))); //延迟任务执行:不是马上调用方法,而是设定一个未来时间点再来执行。 BackgroundJob.Schedule(() => Console.WriteLine("{0}===》这是延时任务!", DateTime.Now.ToString("HH:mm:ss")), TimeSpan.FromSeconds(5)); //循环任务执行:一行代码添加重复执行的任务,其内置了常见的时间循环模式,也可基于CRON表达式来设定复杂的模式。 RecurringJob.AddOrUpdate(() => Console.WriteLine("{0}===》这是每分钟执行的任务!", DateTime.Now.ToString("HH:mm:ss")), Cron.Minutely); //注意最小单位是分钟 //延续性任务执行:类似于.NET中的Task,可以在第一个任务执行完之后紧接着再次执行另外的任务 BackgroundJob.ContinueWith(jobId, () => Console.WriteLine("{0}===》这是延续性任务!", DateTime.Now.ToString("HH:mm:ss"))); Console.ReadKey(); }
// GET: Default public ActionResult Index() { /* 清空数据 HangFire主要数据 * TRUNCATE TABLE HangFire.JobQueue * go * TRUNCATE TABLE HangFire.JobParameter * go * TRUNCATE TABLE HangFire.[State] * go * DELETE FROM HangFire.Job * go */ log.DebugFormat("{0}:我测试一下日志记录了没有!", "log4net"); #region HangFire任务 //支持基于队列的任务处理:任务执行不是同步的,而是放到一个持久化队列中,以便马上把请求控制权返回给调用者。 var jobId = BackgroundJob.Enqueue(() => InsertData("队列任务")); //延迟任务执行:不是马上调用方法,而是设定一个未来时间点再来执行。 BackgroundJob.Schedule(() => InsertData("延时任务"), TimeSpan.FromSeconds(10)); //循环任务执行:一行代码添加重复执行的任务,其内置了常见的时间循环模式,也可基于CRON表达式来设定复杂的模式。 RecurringJob.AddOrUpdate(() => InsertData("每分钟执行任务"), Cron.Minutely); //注意最小单位是分钟 //延续性任务执行:类似于.NET中的Task,可以在第一个任务执行完之后紧接着再次执行另外的任务 BackgroundJob.ContinueWith(jobId, () => InsertData("连续任务")); #endregion return(Content("init job create ok!")); }
public async Task Monitor(string name, PerformContext context) { var web = new HtmlWeb() { UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36" }; context.WriteLine($"开始获取数据:{name}"); var doc = web.Load($"http://news.baidu.com/ns?word={name}&tn=news&sr=0&cl=2&rn=50&ct=0&clk=sortbytime"); var elements = doc.DocumentNode.SelectNodes("//div[@class='result']"); foreach (var element in elements) { var title = element.SelectSingleNode("h3[@class='c-title']/a/text()").InnerText.Replace("\r", "").Replace("\n", ""); var url = element.SelectSingleNode("h3[@class='c-title']/a").Attributes["href"].Value; //var author = element.SelectSingleNode(".//div/p[@class='c-author']/text()").InnerText; var summary_element = element.SelectSingleNode(".//div[@class='c-summary c-row ']"); if (summary_element == null) { summary_element = element.SelectSingleNode(".//div[@class='c-summary c-row c-gap-top-small']/div[@class='c-span18 c-span-last']"); } var author_element = summary_element.SelectSingleNode(".//p[@class='c-author']"); var info_element = summary_element.SelectSingleNode(".//span[@class='c-info']"); summary_element.RemoveChild(author_element); summary_element.RemoveChild(info_element); var author = author_element.InnerText; var summary = summary_element.InnerText.Replace("<em>", "").Replace("</em>", "").Replace(" ", ""); var time = string.Empty; try { time = author.Substring(author.IndexOf(" ", StringComparison.Ordinal) + 12); } catch (Exception e) { context.WriteLine($"{e}"); } context.WriteLine($"{title} -- {time}"); if (await _connection.ExecuteScalarAsync <int>(@"SELECT COUNT(1) FROM [dbo].[BaiduNews] WHERE [Url]=@Url", new { Url = url }) != 0) { continue; } await _connection.ExecuteAsync( @"INSERT INTO [dbo].[BaiduNews] (Keyword,Title,Url,Summary,Time) VALUES(@Keyword,@Title,@Url,@Summary,@Time)", new { Keyword = name, Title = title, Url = url, Summary = summary, Time = time }); BackgroundJob.ContinueWith <NewsJob>(context.BackgroundJob.Id, job => job.Dowload(url, null)); } }
public ActionResult Index() { // Hangfireでjobをキューに登録 var id = BackgroundJob.Enqueue(() => Console.WriteLine("Simple Job")); BackgroundJob.ContinueWith(id, () => Console.WriteLine("world!")); return(View()); }
public void Start() { BackgroundJob.Enqueue <UserManager <User> >(m => m.FindByIdAsync("1")); string jobId = BackgroundJob.Schedule <UserManager <User> >(m => m.FindByIdAsync("2"), TimeSpan.FromMinutes(2)); BackgroundJob.ContinueWith <TestHangfireJob>(jobId, m => m.GetUserCount()); RecurringJob.AddOrUpdate <TestHangfireJob>(m => m.GetUserCount(), Cron.Minutely, TimeZoneInfo.Local); }
public async Task <IActionResult> OnPostContinueJobAsync() { string started = DateTime.Now.ToString(); var id = BackgroundJob.Enqueue(() => MailAsync("Enqueue with", started)); BackgroundJob.ContinueWith(id, () => MailAsync("Continue", started), JobContinuationOptions.OnlyOnSucceededState); return(Page()); }
public ActionResult FireDependent() { var firstId = BackgroundJob.Enqueue(() => DoSomething(5)); BackgroundJob.ContinueWith(firstId, () => DoSomething(6)); return(View("Index")); }
public void CountTo10In5ThanRandom() { var to10 = _calculationJobRepo.CreateJob(JobType.CountTo10); var toRandom = _calculationJobRepo.CreateJob(JobType.CountToRandom); var to10Id = BackgroundJob.Schedule(() => CountTo(to10.Id), TimeSpan.FromSeconds(5)); BackgroundJob.ContinueWith(to10Id, () => CountTo(toRandom.Id)); }
public static void ContinuationTest() { var jobA = BackgroundJob.Enqueue(() => ContinuationPartA()); var jobB = BackgroundJob.ContinueWith(jobA, () => ContinuationPartB(), JobContinuationOptions.OnlyOnSucceededState); BackgroundJob.ContinueWith(jobB, () => ContinuationPartC(), JobContinuationOptions.OnAnyFinishedState); }
public static void Setup(ChannelsData channelsData) { string enqueue = BackgroundJob.Enqueue(() => SetupChannel(channelsData.MasterChannel)); foreach (ChannelData channelData in channelsData.Channels) { enqueue = BackgroundJob.ContinueWith(enqueue, () => SetupChannel(channelData)); } }
public ContinuationsJob() { //Delayed job var parentJobId = BackgroundJob.Schedule(() => Console.WriteLine("I am a Delayed Job !!"), TimeSpan.FromMinutes(4)); //Continuations job BackgroundJob.ContinueWith(parentJobId, () => ProcessContinuationsJob()); }
/// <inheritdoc /> public Task Invoke(PerformContext context) { var relicRewardsJob = BackgroundJob.ContinueWith <IRelicRewardsScraperJob>(context.BackgroundJob.Id, job => job.Invoke(null)); var primeItemsJob = BackgroundJob.ContinueWith <IPrimeItemsScraperJob>(relicRewardsJob, job => job.Invoke(null)); var blueprintJob = BackgroundJob.ContinueWith <IBlueprintScraperJob>(primeItemsJob, job => job.Invoke(null)); return(Task.CompletedTask); }
public void SubmitBuildExecute( int buildId, Hashtable configurationData, string configurationPackageName, string configurationVersion, string certificateThumbprint) { configurationData = this.ReparseJson(configurationData); var build = this.BuildRepository.Find(buildId); if (build == null) { throw new Exception("Could not find build with Id " + buildId + " in the database."); } configurationData = this.ReparseJson(configurationData); var targets = this.SplitConfigurationData(configurationData, configurationPackageName, configurationVersion); foreach (var targetGroup in targets) { var jobId = BackgroundJob.Enqueue( () => this.ExecuteBuild( JobCancellationToken.Null, targetGroup.ConfigurationData, build.Id, targetGroup.ConfigurationPackageName, targetGroup.ConfigurationPackageVersion, certificateThumbprint)); BackgroundJob.ContinueWith( jobId, () => this.PollBuildStatus(JobCancellationToken.Null, build.Id), JobContinuationOptions.OnAnyFinishedState); foreach (var target in targetGroup.Targets) { build.Targets.Add( new BuildTarget { Build = build, JobId = Convert.ToInt32(jobId), Status = BuildStatus.Enqueued, Target = target, ConfigurationPackageName = targetGroup.ConfigurationPackageName, ConfigurationPackageVersion = targetGroup.ConfigurationPackageVersion, CertificateThumbprint = certificateThumbprint }); } } build.Status = BuildStatus.Enqueued; this.Context.SaveChanges(); this.Logging.SubmittedBuildRequest(build); }
public Source FeedSource(int feedingSourceId) { FeedingSource fs = this.GetFeedingSource(feedingSourceId); if (fs != null && fs.FileData != null) { Source s = new Source(); s.SourceName = fs.Name; s.SourcePath = fs.Name; s.SourceDate = DateTime.Now; s.FileExtension = FileUtil.GetExtension(fs.Name); s.FileData = fs.FileData; s.IsRestricted = fs.Restricted; s.IsReadOnly = fs.IsReadOnly; s.Notes = "Fed from FeedingSourceID=" + fs.Id; if (!string.IsNullOrEmpty(fs.UploadNotes)) { s.Notes += "\n\n" + fs.UploadNotes; } s.FileDateTimeStamp = fs.FileModifiedDateTime; foreach (SourceAuthor a in fs.SourceAuthors) { s.AddSourceAuthor(a); } foreach (SourceOwningEntity e in fs.SourceOwningEntities) { s.AddSourceOwningEntity(e); } // persist Source s = this.sourceTasks.SaveSource(s); // link FeedingSource with Source fs.Source = s; s.AddFeedingSource(fs); // persist the join fs = this.SaveFeedingSource(fs); // queue ocr scan of new source var jobId = BackgroundJob.Enqueue <ISourceContentTasks>(x => x.OcrScanAndSetSourceQueueable(s.Id)); // queue indexing of the new Source BackgroundJob.ContinueWith <ISourceTasks>(jobId, x => x.IndexSourceQueueable(s.Id, s.HasUploadedBy() ? s.GetUploadedBy().UserID : string.Empty, s.SourceAuthors.Select(y => y.Author).ToList(), s.SourceOwningEntities.Select(y => y.Name).ToList(), s.JhroCase != null ? s.JhroCase.CaseNumber : string.Empty, this.sourceTasks.GetSourceDTO(s.Id).FileSize) ); return(s); } return(null); }
public ActionResult Index() { ViewBag.Title = "Home Page"; var id = BackgroundJob.Enqueue(() => Debug.WriteLine("Hello, ")); BackgroundJob.ContinueWith(id, () => Debug.WriteLine("world!")); return(View()); }
// GET: api/Books public IEnumerable <Book> GetBooks() { long lastId = db.Books.Max(b => b.Id) + 1; var jobId = BackgroundJob.Enqueue(() => _unitOfWork.BooksRepository.Insert(BuildBookFom(lastId, "Teste_" + DateTime.Now, 1991, 10.00, "Comedy", null, 4))); BackgroundJob.ContinueWith(jobId, () => _unitOfWork.BooksRepository.Insert(BuildBookFom(lastId + 1, "Teste_" + DateTime.Now, 1992, 11.00, "Comedy", null, 4))); return(_unitOfWork.BooksRepository.GetAll().Include(b => b.Author).ToList()); }
private string StartTraining(int id, string lastJob) { string cmd = Server.MapPath("~/tf_model/research/object_detection/train.py"); string args = "--logtostderr --train_dir=trainingoutput/ --pipeline_config_path=training/faster_rcnn_inception_v2_coco.config"; string workingDir = Server.MapPath(String.Format("~/Storage/{0}/data", id)); //run_python_cmd_async(cmd, args, brandName, workingDir); var jobId = BackgroundJob.ContinueWith(lastJob, () => TrainJob(cmd, args, id, workingDir, JobCancellationToken.Null)); return(jobId); }
public void Set() { BackgroundJob.Enqueue(() => CreateText(User.Identity.Name, DateTime.Now)); BackgroundJob.Schedule(() => CreateText("Delayed", DateTime.Now), TimeSpan.FromDays(1)); RecurringJob.AddOrUpdate(() => CreateText("Daily Job", DateTime.Now), Cron.Daily); var id = BackgroundJob.Enqueue(() => CreateText("Hello, ", DateTime.Now)); BackgroundJob.ContinueWith(id, () => CreateText("world!", DateTime.Now)); }
public string Get() { Console.WriteLine($"Request: {DateTime.Now}"); var jobFireForget = BackgroundJob.Enqueue(() => Debug.WriteLine($"Fire and forget: {DateTime.Now}")); var jobDelayed = BackgroundJob.Schedule(() => Debug.WriteLine($"Delayed: {DateTime.Now}"), TimeSpan.FromSeconds(30)); BackgroundJob.ContinueWith(jobDelayed, () => Debug.WriteLine($"Continuation: {DateTime.Now}")); RecurringJob.AddOrUpdate(() => Debug.WriteLine($"Recurring: {DateTime.Now}"), Cron.Minutely); return("Jobs criados com sucesso!"); }
public void Schedule(string jobId, Expression <Action> methodCall, DateTime delay) { // Create a scheduled task var internalJobId = BackgroundJob.Schedule(methodCall, delay); BackgroundJob.ContinueWith(internalJobId, () => RemoveJob(jobId, false), JobContinuationOptions.OnlyOnSucceededState); // Save the internal id AddJob(internalJobId, jobId); }
private string ExportGraph(int id, string lastJob) { int numSteps = 10000; string cmd = Server.MapPath("~/tf_model/research/object_detection/export_inference_graph.py"); string workingDir = Server.MapPath(String.Format("~/Storage/{0}/data", id)); string args = String.Format("--input_type image_tensor --pipeline_config_path training/faster_rcnn_inception_v2_coco.config --trained_checkpoint_prefix trainingoutput/model.ckpt-{0} --output_directory graph", numSteps.ToString()); var jobId = BackgroundJob.ContinueWith(lastJob, () => ExportGraphJob(cmd, args, id, workingDir, JobCancellationToken.Null)); return(jobId); }