public IEnumerable<string> GetDirectoriesToScan(JobDataMap mergedJobDataMap) { List<string> directoriesToScan = new List<string>(); string dirName = mergedJobDataMap.GetString(DirectoryScanJob.DirectoryName); string dirNames = mergedJobDataMap.GetString(DirectoryScanJob.DirectoryNames); if (dirName == null && dirNames == null) { throw new JobExecutionException($"The parameter '{DirectoryScanJob.DirectoryName}' or '{DirectoryScanJob.DirectoryNames}' " + "is required and was not found in merged JobDataMap"); } /* If the user supplied both DirectoryScanJob.DirectoryName and DirectoryScanJob.DirectoryNames, then just use both. The directory names will be 'distincted' by the caller. */ if (dirName != null) { directoriesToScan.Add(dirName); } if (dirNames != null) { directoriesToScan.AddRange( dirNames.Split(new[] {";"}, StringSplitOptions.RemoveEmptyEntries)); } return directoriesToScan; }
public void TestSetObjectPropsPrimitives() { JobDataMap jobDataMap = new JobDataMap(); jobDataMap.Put("intValue", 1); jobDataMap.Put("longValue", 2L); jobDataMap.Put("floatValue", 3.0f); jobDataMap.Put("doubleValue", 4.0); jobDataMap.Put("booleanValue", true); jobDataMap.Put("shortValue", 5); jobDataMap.Put("charValue", 'a'); jobDataMap.Put("byteValue", 6); jobDataMap.Put("stringValue", "S1"); Dictionary<string, string> map = new Dictionary<string, string>(); map.Add("A", "B"); jobDataMap.Put("mapValue", map); TestObject myObject = new TestObject(); factory.SetObjectProperties(myObject, jobDataMap); Assert.AreEqual(1, myObject.IntValue); Assert.AreEqual(2, myObject.LongValue); Assert.AreEqual(3.0f, myObject.FloatValue); Assert.AreEqual(4.0, myObject.DoubleValue); Assert.IsTrue(myObject.BooleanValue); Assert.AreEqual(5, myObject.ShortValue); Assert.AreEqual('a', myObject.CharValue); Assert.AreEqual((byte) 6, myObject.ByteValue); Assert.AreEqual("S1", myObject.StringValue); Assert.IsTrue(myObject.MapValue.ContainsKey("A")); }
public void TestSerializeJobData() { StdAdoDelegate del = new StdAdoDelegate(LogManager.GetLogger(GetType()), "QRTZ_", "TESTSCHED", "INSTANCE", new DbProvider("SqlServer-20", ""), new SimpleTypeLoadHelper()); JobDataMap jdm = new JobDataMap(); del.SerializeJobData(jdm); jdm.Clear(); jdm.Put("key", "value"); jdm.Put("key2", null); del.SerializeJobData(jdm); jdm.Clear(); jdm.Put("key1", "value"); jdm.Put("key2", null); jdm.Put("key3", new NonSerializableTestClass()); try { del.SerializeJobData(jdm); Assert.Fail(); } catch (SerializationException e) { Assert.IsTrue(e.Message.IndexOf("key3") >= 0); } }
public JobRunContextE(BatchTrigger trigger, BatchJobDetail jobDetail, JobDataMap jobDataMap, IWindsorContainer container) : base(trigger, jobDetail, jobDataMap, container) { this.trigger = trigger; this.jobDetail = jobDetail; this.jobDataMap = jobDataMap; this.container = container; }
public void TestSetObjectPropsUnknownProperty() { JobDataMap jobDataMap = new JobDataMap(); jobDataMap.Put("bogusValue", 1); try { factory.SetObjectProperties(new TestObject(), jobDataMap); Assert.Fail(); } catch (SchedulerException) { } }
public void TestSetObjectPropsCharStringTooShort() { JobDataMap jobDataMap = new JobDataMap(); jobDataMap.Put("charValue", ""); try { factory.SetObjectProperties(new TestObject(), jobDataMap); Assert.Fail(); } catch (SchedulerException) { } }
/// <summary> /// Get the object to serialize when generating serialized file for future /// tests, and against which to validate deserialized object. /// </summary> /// <returns></returns> protected override object GetTargetObject() { JobDataMap jobDataMap = new JobDataMap(); jobDataMap.Put("A", "B"); SimpleTriggerImpl t = new SimpleTriggerImpl("SimpleTrigger", "SimpleGroup", "JobName", "JobGroup", StartTime, EndTime, 5, TimeSpan.FromSeconds(1)); t.CalendarName = "MyCalendar"; t.Description = "SimpleTriggerDesc"; t.JobDataMap = jobDataMap; t.MisfireInstruction = (MisfireInstruction.SimpleTrigger.RescheduleNextWithRemainingCount); return t; }
public void TestSerializeJobData() { var args = new DelegateInitializationArgs(); args.Logger = LogManager.GetLogger(GetType()); args.TablePrefix = "QRTZ_"; args.InstanceName = "TESTSCHED"; args.InstanceId = "INSTANCE"; args.DbProvider = new DbProvider("SqlServer-20", ""); args.TypeLoadHelper = new SimpleTypeLoadHelper(); args.ObjectSerializer = new DefaultObjectSerializer(); var del = new StdAdoDelegate(); del.Initialize(args); var jdm = new JobDataMap(); del.SerializeJobData(jdm); jdm.Clear(); jdm.Put("key", "value"); jdm.Put("key2", null); del.SerializeJobData(jdm); jdm.Clear(); jdm.Put("key1", "value"); jdm.Put("key2", null); jdm.Put("key3", new NonSerializableTestClass()); try { del.SerializeJobData(jdm); Assert.Fail(); } catch (SerializationException e) { Assert.IsTrue(e.Message.IndexOf("key3") >= 0); } }
public object Deserialize(global::MongoDB.Bson.IO.BsonReader bsonReader, Type nominalType, Type actualType, IBsonSerializationOptions options) { if (nominalType != typeof(JobDataMap) || actualType != typeof(JobDataMap)) { var message = string.Format("Can't deserialize a {0} with {1}.", nominalType.FullName, this.GetType().Name); throw new BsonSerializationException(message); } var bsonType = bsonReader.CurrentBsonType; if (bsonType == BsonType.Document) { JobDataMap item = new JobDataMap(); bsonReader.ReadStartDocument(); while (bsonReader.ReadBsonType() != BsonType.EndOfDocument) { string key = bsonReader.ReadName(); object value = BsonSerializer.Deserialize<object>(bsonReader); item.Add(key, value); } bsonReader.ReadEndDocument(); return item; } else if (bsonType == BsonType.Null) { bsonReader.ReadNull(); return null; } else { var message = string.Format("Can't deserialize a {0} from BsonType {1}.", nominalType.FullName, bsonType); throw new BsonSerializationException(message); } }
public void TestSetObjectPropsFromStrings() { JobDataMap jobDataMap = new JobDataMap(); jobDataMap.Put("intValue", "1"); jobDataMap.Put("longValue", "2"); jobDataMap.Put("floatValue", "3.0"); jobDataMap.Put("doubleValue", "4.0"); jobDataMap.Put("booleanValue", "true"); jobDataMap.Put("shortValue", "5"); jobDataMap.Put("charValue", "a"); jobDataMap.Put("byteValue", "6"); TestObject myObject = new TestObject(); factory.SetObjectProperties(myObject, jobDataMap); Assert.AreEqual(1, myObject.IntValue); Assert.AreEqual(2L, myObject.LongValue); Assert.AreEqual(3.0f, myObject.FloatValue); Assert.AreEqual(4.0, myObject.DoubleValue); Assert.AreEqual(true, myObject.BooleanValue); Assert.AreEqual(5, myObject.ShortValue); Assert.AreEqual('a', myObject.CharValue); Assert.AreEqual((byte) 6, myObject.ByteValue); }
private static async Task Start(ScheduleView view, PluginLoadContext lc, Action <Guid, DateTime?> callBack) { //throw new SchedulerException("SchedulerException"); //在应用程序域中创建实例返回并保存在job中,这是最终调用任务执行的实例 TaskBase instance = AssemblyHelper.CreateTaskInstance(lc, view.Schedule.Id, view.Schedule.AssemblyName, view.Schedule.ClassName); if (instance == null) { throw new InvalidCastException($"任务实例创建失败,请确认目标任务是否派生自TaskBase类型。程序集:{view.Schedule.AssemblyName},类型:{view.Schedule.ClassName}"); } // instance.logger = new LogWriter(); ; JobDataMap map = new JobDataMap { new KeyValuePair <string, object> ("domain", lc), new KeyValuePair <string, object> ("instance", instance), new KeyValuePair <string, object> ("name", view.Schedule.Title), new KeyValuePair <string, object> ("params", ConvertParamsJson(view.Schedule.CustomParamsJson)), new KeyValuePair <string, object> ("keepers", view.Keepers), new KeyValuePair <string, object> ("children", view.Children) }; IJobDetail job = JobBuilder.Create <RootJob>() .WithIdentity(view.Schedule.Id.ToString()) .UsingJobData(map) //.UsingJobData("assembly", task.AssemblyName) //.UsingJobData("class", task.ClassName) .Build(); //添加触发器 _scheduler.ListenerManager.AddJobListener(new JobRunListener(view.Schedule.Id.ToString(), callBack), KeyMatcher <JobKey> .KeyEquals(new JobKey(view.Schedule.Id.ToString()))); if (view.Schedule.RunLoop) { if (!CronExpression.IsValidExpression(view.Schedule.CronExpression)) { throw new Exception("cron表达式验证失败"); } CronTriggerImpl trigger = new CronTriggerImpl { CronExpressionString = view.Schedule.CronExpression, Name = view.Schedule.Title, Key = new TriggerKey(view.Schedule.Id.ToString()), Description = view.Schedule.Remark }; if (view.Schedule.StartDate.HasValue) { trigger.StartTimeUtc = TimeZoneInfo.ConvertTimeToUtc(view.Schedule.StartDate.Value); } if (view.Schedule.EndDate.HasValue) { trigger.EndTimeUtc = TimeZoneInfo.ConvertTimeToUtc(view.Schedule.EndDate.Value); } await _scheduler.ScheduleJob(job, trigger); } else { DateTimeOffset start = TimeZoneInfo.ConvertTimeToUtc(DateTime.Now); if (view.Schedule.StartDate.HasValue) { start = TimeZoneInfo.ConvertTimeToUtc(view.Schedule.StartDate.Value); } DateTimeOffset end = start.AddMinutes(1); if (view.Schedule.EndDate.HasValue) { end = TimeZoneInfo.ConvertTimeToUtc(view.Schedule.EndDate.Value); } ITrigger trigger = TriggerBuilder.Create() .WithIdentity(view.Schedule.Id.ToString()) .StartAt(start) .WithSimpleSchedule(x => x .WithRepeatCount(1).WithIntervalInMinutes(1)) .EndAt(end) .Build(); await _scheduler.ScheduleJob(job, trigger); } LogHelper.Info($"任务[{view.Schedule.Title}]启动成功!", view.Schedule.Id); _ = Task.Run(() => { while (true) { var log = instance.ReadLog(); if (log != null) { LogManager.Queue.Write(new SystemLogEntity { Category = log.Category, Message = log.Message, ScheduleId = log.ScheduleId, Node = log.Node, StackTrace = log.StackTrace, TraceId = log.TraceId, CreateTime = log.CreateTime }); } else { Thread.Sleep(3000); } } }); }
/// <summary> /// Calls the equivalent method on the 'proxied' <see cref="QuartzScheduler" />. /// </summary> public virtual void TriggerJob(JobKey jobKey, JobDataMap data) { sched.TriggerJob(jobKey, data); }
private void ConvertParameters(JobDataMap data) { foreach (var item in data) { Log.Debug($"Key: [{item.Key}], Value: [{item.Value}]"); } var error = false; if (data.ContainsKey("username")) { username = data.Get("username").ToString(); } if (string.IsNullOrEmpty(username)) { error = true; Log.Debug("Parametro [username] non configurato"); } if (data.ContainsKey("password")) { password = data.Get("password").ToString(); } if (string.IsNullOrEmpty(password)) { error = true; Log.Debug("Parametro [password] non configurato"); } if (data.ContainsKey("urlAPI")) { urlAPI = data.Get("urlAPI").ToString(); } if (string.IsNullOrEmpty(urlAPI)) { error = true; Log.Debug("Parametro [urlAPI] non configurato"); } if (data.ContainsKey("urlCLIENT")) { urlCLIENT = data.Get("urlCLIENT").ToString(); } if (string.IsNullOrEmpty(urlCLIENT)) { error = true; Log.Debug("Parametro [urlCLIENT] non configurato"); } if (data.ContainsKey("CartellaStampeLink")) { CartellaStampeLink = data.Get("CartellaStampeLink").ToString(); } if (string.IsNullOrEmpty(CartellaStampeLink)) { error = true; Log.Debug("Parametro [CartellaStampeLink] non configurato"); } if (data.ContainsKey("NumMaxTentativi")) { NumMaxTentativi = data.Get("NumMaxTentativi").ToString(); } if (string.IsNullOrEmpty(NumMaxTentativi)) { error = true; Log.Debug("Parametro [NumMaxTentativi] non configurato"); } if (data.ContainsKey("CartellaLavoroTemporanea")) { CartellaLavoroTemporanea = data.Get("CartellaLavoroTemporanea").ToString(); } if (string.IsNullOrEmpty(CartellaLavoroTemporanea)) { error = true; Log.Debug("Parametro [CartellaLavoroTemporanea] non configurato"); } if (data.ContainsKey("CartellaLavoroStampe")) { CartellaLavoroStampe = data.Get("CartellaLavoroStampe").ToString(); } if (string.IsNullOrEmpty(CartellaLavoroStampe)) { error = true; Log.Debug("Parametro [CartellaLavoroStampe] non configurato"); } if (data.ContainsKey("SMTP")) { SMTP = data.Get("SMTP").ToString(); } if (string.IsNullOrEmpty(SMTP)) { error = true; Log.Debug("Parametro [SMTP] non configurato"); } if (data.ContainsKey("EmailFrom")) { EmailFrom = data.Get("EmailFrom").ToString(); } if (string.IsNullOrEmpty(EmailFrom)) { error = true; Log.Debug("Parametro [EmailFrom] non configurato"); } if (data.ContainsKey("RootRepository")) { RootRepository = data.Get("RootRepository").ToString(); } if (string.IsNullOrEmpty(RootRepository)) { error = true; Log.Debug("Parametro [RootRepository] non configurato"); } if (data.ContainsKey("InvioNotifiche")) { InvioNotifiche = Convert.ToBoolean(Convert.ToInt16(data.Get("InvioNotifiche"))); } Log.Debug($"Parametro [InvioNotifiche] = {InvioNotifiche}"); if (error) { throw new Exception("Mancano dei parametri alla configurazione."); } }
public void Execute(IJobExecutionContext context) { JobDataMap dataMap = context.JobDetail.JobDataMap; var rockContext = new RockContext(); InteractionChannelService channelService = new InteractionChannelService(rockContext); InteractionComponentService componentService = new InteractionComponentService(rockContext); InteractionService interactionService = new InteractionService(rockContext); ScheduleService scheduleService = new ScheduleService(rockContext); LocationService locationService = new LocationService(rockContext); AttendanceService attendanceService = new AttendanceService(rockContext); AttendanceOccurrenceService attendanceOccurrenceService = new AttendanceOccurrenceService(rockContext); GroupService groupService = new GroupService(rockContext); PersonAliasService personAliasService = new PersonAliasService(rockContext); // Load the channel InteractionChannelCache channel = InteractionChannelCache.Get(dataMap.GetString("InteractionChannel").AsGuid()); // Setup int campusLocationTypeId = DefinedValueCache.Get(Rock.SystemGuid.DefinedValue.LOCATION_TYPE_CAMPUS).Id; var groupType = GroupTypeCache.Get(dataMap.GetString("GroupType").AsGuid()); var groupLocations = groupService.GetByGroupTypeId(groupType.Id).Where(g => g.IsActive == true).SelectMany(g => g.GroupLocations).ToList(); string operation = !string.IsNullOrWhiteSpace(dataMap.GetString("Operation")) ? dataMap.GetString("Operation") : null; //Create a nested campus list Dictionary <int, int> nestedCampuses; var nestedCampusDT = DefinedTypeCache.Get(Constants.DEFINED_TYPE_NESTED_CAMPUSES); if (nestedCampusDT != null) { nestedCampuses = nestedCampusDT.DefinedValues .ToDictionary( v => CampusCache.Get(v.GetAttributeValue(Constants.DEFINED_VALUE_ATTRIBUTE_PARENT_CAMPUS)).Id, v => CampusCache.Get(v.GetAttributeValue(Constants.DEFINED_VALUE_ATTRIBUTE_CHILD_CAMPUS)).Id); } else { nestedCampuses = new Dictionary <int, int>(); } // Fetch the job so we can get the last run date/time int jobId = Convert.ToInt16(context.JobDetail.Description); var jobService = new ServiceJobService(rockContext); var job = jobService.Get(jobId); DateTime lastRun = job?.LastSuccessfulRunDateTime ?? DateTime.MinValue; var componentCampusMapping = dataMap.GetString("ComponentCampusMapping").AsDictionaryOrNull(); DateRange dateRange = Rock.Web.UI.Controls.SlidingDateRangePicker.CalculateDateRangeFromDelimitedValues(dataMap.GetString("DateRange") ?? "-1||"); // Flip the component campus mapping around and translate to ids Dictionary <int, List <int> > campusComponentIds = new Dictionary <int, List <int> >(); foreach (CampusCache campus in CampusCache.All()) { var componentNames = componentCampusMapping.Where(ccm => ccm.Value == campus.Name).Select(c => c.Key.ToLower()); campusComponentIds[campus.Id] = componentService.Queryable().Where(cs => componentNames.Contains(cs.Name.ToLower()) && cs.ChannelId == channel.Id).Select(c => c.Id).ToList(); } foreach (GroupLocation gl in groupLocations) { if (gl.Group.CampusId.HasValue) { Location location = gl.Location; List <int> componentIds = campusComponentIds[gl.Group.CampusId.Value]; foreach (Schedule schedule in gl.Schedules) { var occurrences = schedule.GetOccurrences(dateRange.Start.Value, dateRange.End.Value); foreach (var occurrence in occurrences) { DateTime startDate = occurrence.Period.StartTime.Value; DateTime endDate = occurrence.Period.EndTime.Value; var peopleAttended = interactionService.Queryable().Where( i => componentIds.Contains(i.InteractionComponentId) && i.InteractionDateTime <= endDate && i.InteractionEndDateTime >= startDate && i.PersonAliasId != null && (i.CreatedDateTime > lastRun || i.PersonalDevice.ModifiedDateTime > lastRun || i.PersonalDevice.CreatedDateTime > lastRun) && (operation == null || i.Operation == operation) ).Select(i => i.PersonAliasId).Distinct(); int newAttendance = 0; var occurrenceModel = attendanceOccurrenceService.Get(occurrence.Period.StartTime.Value.Date, gl.GroupId, location.Id, schedule.Id); // Make sure we don't already have an attendance Record var existingAttendees = attendanceOccurrenceService.Queryable().Where(ao => DbFunctions.TruncateTime(ao.OccurrenceDate) == occurrence.Period.StartTime.Value.Date && ao.ScheduleId == schedule.Id && ao.GroupId == gl.GroupId && ao.LocationId == location.Id).SelectMany(a => a.Attendees).Where(a => a.DidAttend == true).Select(a => a.PersonAliasId); foreach (int personAliasId in peopleAttended.Except(existingAttendees)) { Attendance attendance; // Check to see if an occurrence exists already if (occurrenceModel == null) { attendance = attendanceService.AddOrUpdate(personAliasId, occurrence.Period.StartTime.Value, gl.GroupId, location.Id, schedule.Id, gl.Group.CampusId); attendance.EndDateTime = occurrence.Period?.EndTime?.Value; attendance.DidAttend = true; attendance.CampusId = gl.Group.CampusId; occurrenceModel = attendance.Occurrence; } else { attendance = new Attendance(); attendance.PersonAliasId = personAliasId; attendance.OccurrenceId = occurrenceModel.Id; attendance.StartDateTime = occurrence.Period.StartTime.Value; attendance.EndDateTime = occurrence.Period?.EndTime?.Value; attendance.DidAttend = true; attendance.CampusId = gl.Group.CampusId; attendanceService.Add(attendance); } if (gl.Group.CampusId.HasValue && nestedCampuses.ContainsKey(gl.Group.CampusId.Value)) { var campusId = personAliasService.Queryable() .Where(a => a.Id == personAliasId) .Select(a => a.Person.PrimaryCampusId) .FirstOrDefault(); if (campusId == nestedCampuses[gl.Group.CampusId.Value]) { attendance.CampusId = campusId; } } newAttendance++; } if (newAttendance > 0) { rockContext.SaveChanges(); context.Result += string.Format("{0} people attended {1} on {2}.\n", newAttendance, gl.Group.Campus.Name, occurrence.Period.StartTime.Value.ToString("MM/dd/yyyy h:mm tt")); } } } } } }
/// <summary> /// Executes the specified context. /// </summary> /// <param name="context">The context.</param> public virtual void Execute(IJobExecutionContext context) { JobDataMap dataMap = context.JobDetail.JobDataMap; var expireDays = dataMap.GetString("ExpireDate").AsIntegerOrNull() ?? 1; int remindersSent = 0; using (var rockContext = new RockContext()) { DateTime now = RockDateTime.Now; DateTime expireDate = now.AddDays(expireDays * -1); foreach (var instance in new RegistrationInstanceService(rockContext) .Queryable("RegistrationTemplate,Registrations") .Where(i => i.IsActive && i.RegistrationTemplate.IsActive && i.RegistrationTemplate.ReminderEmailTemplate != "" && !i.ReminderSent && i.SendReminderDateTime.HasValue && i.SendReminderDateTime <= now && i.SendReminderDateTime >= expireDate) .ToList()) { var template = instance.RegistrationTemplate; foreach (var registration in instance.Registrations .Where(r => !r.IsTemporary && r.ConfirmationEmail != null && r.ConfirmationEmail != "")) { var mergeFields = new Dictionary <string, object>(); mergeFields.Add("RegistrationInstance", registration.RegistrationInstance); mergeFields.Add("Registration", registration); var emailMessage = new RockEmailMessage(); emailMessage.AdditionalMergeFields = mergeFields; emailMessage.AddRecipient(new RecipientData(registration.ConfirmationEmail, mergeFields)); emailMessage.FromEmail = template.ReminderFromEmail; emailMessage.FromName = template.ReminderFromName; emailMessage.Subject = template.ReminderSubject; emailMessage.Message = template.ReminderEmailTemplate; emailMessage.Send(); } instance.SendReminderDateTime = now; instance.ReminderSent = true; remindersSent++; rockContext.SaveChanges(); } if (remindersSent == 0) { context.Result = "No reminders to send"; } else if (remindersSent == 1) { context.Result = "1 reminder was sent"; } else { context.Result = string.Format("{0} reminders were sent", remindersSent); } } }
/// <summary> /// Get the object to serialize when generating serialized file for future /// tests, and against which to validate deserialized object. /// </summary> /// <returns></returns> protected override object GetTargetObject() { JobDataMap m = new JobDataMap(); m.Put("key", 5); return m; }
protected virtual string GetOptionalParameter(JobDataMap data, string propertyName) { string value = data.GetString(propertyName); if (string.IsNullOrEmpty(value)) { return null; } return value; }
/// <summary> /// Job that executes routine Rock cleanup tasks /// /// Called by the <see cref="IScheduler" /> when a /// <see cref="ITrigger" /> fires that is associated with /// the <see cref="IJob" />. /// </summary> public virtual void Execute(IJobExecutionContext context) { // get the job map JobDataMap dataMap = context.JobDetail.JobDataMap; // delete accounts that have not been confirmed in X hours int userExpireHours = Int32.Parse(dataMap.GetString("HoursKeepUnconfirmedAccounts")); DateTime userAccountExpireDate = DateTime.Now.Add(new TimeSpan(userExpireHours * -1, 0, 0)); var userLoginService = new UserLoginService(); foreach (var user in userLoginService.Queryable().Where(u => u.IsConfirmed == false && u.CreationDateTime < userAccountExpireDate).ToList()) { userLoginService.Delete(user, null); userLoginService.Save(user, null); } // purge exception log int exceptionExpireDays = Int32.Parse(dataMap.GetString("DaysKeepExceptions")); DateTime exceptionExpireDate = DateTime.Now.Add(new TimeSpan(exceptionExpireDays * -1, 0, 0, 0)); ExceptionLogService exceptionLogService = new ExceptionLogService(); foreach (var exception in exceptionLogService.Queryable().Where(e => e.ExceptionDateTime < exceptionExpireDate).ToList()) { exceptionLogService.Delete(exception, null); exceptionLogService.Save(exception, null); } // purge audit log int auditExpireDays = Int32.Parse(dataMap.GetString("AuditLogExpirationDays")); DateTime auditExpireDate = DateTime.Now.Add(new TimeSpan(auditExpireDays * -1, 0, 0, 0)); AuditService auditService = new AuditService(); foreach (var audit in auditService.Queryable().Where(a => a.DateTime < auditExpireDate).ToList()) { auditService.Delete(audit, null); auditService.Save(audit, null); } // clean the cached file directory //get the attributes string cacheDirectoryPath = dataMap.GetString("BaseCacheDirectory"); int cacheExpirationDays = int.Parse(dataMap.GetString("DaysKeepCachedFiles")); DateTime cacheExpirationDate = DateTime.Now.Add(new TimeSpan(cacheExpirationDays * -1, 0, 0, 0)); //if job is being run by the IIS scheduler and path is not null if (context.Scheduler.SchedulerName == "RockSchedulerIIS" && !String.IsNullOrEmpty(cacheDirectoryPath)) { //get the physical path of the cache directory cacheDirectoryPath = System.Web.Hosting.HostingEnvironment.MapPath(cacheDirectoryPath); } //if directory is not blank and cache expiration date not in the future if (!String.IsNullOrEmpty(cacheDirectoryPath) && cacheExpirationDate <= DateTime.Now) { //Clean cache directory CleanCacheDirectory(cacheDirectoryPath, cacheExpirationDate); } // clean out any temporary binary files BinaryFileService binaryFileService = new BinaryFileService(); foreach (var binaryFile in binaryFileService.Queryable().Where(bf => bf.IsTemporary == true).ToList()) { if (binaryFile.LastModifiedDateTime < DateTime.Now.AddDays(-1)) { binaryFileService.Delete(binaryFile, null); binaryFileService.Save(binaryFile, null); } } }
/// <summary> /// Called by the <see cref="T:Quartz.IScheduler"/> when a <see cref="T:Quartz.ITrigger"/> /// fires that is associated with the <see cref="T:Quartz.IJob"/>. /// </summary> /// <remarks> /// The implementation may wish to set a result object on the /// JobExecutionContext before this method exits. The result itself /// is meaningless to Quartz, but may be informative to /// <see cref="T:Quartz.IJobListener"/>s or /// <see cref="T:Quartz.ITriggerListener"/>s that are watching the job's /// execution. /// </remarks> /// <param name="context">The execution context.</param> public void Execute(IJobExecutionContext context) { _logger = LogManager.GetCurrentClassLogger(); if (_broker == null) { Log(LogLevel.Error, "Data Update Job failed: broker not set."); return; } JobDataMap dataMap = context.JobDetail.JobDataMap; DataUpdateJobSettings settings; try { settings = JsonConvert.DeserializeObject <DataUpdateJobSettings>((string)dataMap["settings"]); } catch (Exception e) { _logger.Error(e, "Failed to deserialize data update job settings"); return; } Log(LogLevel.Info, string.Format("Data Update job {0} triggered.", settings.Name)); //Multiple jobs may be called simultaneously, so what we do is seed the Random based on the job name byte[] bytes = new byte[settings.Name.Length * sizeof(char)]; Buffer.BlockCopy(settings.Name.ToCharArray(), 0, bytes, 0, bytes.Length); Random r = new Random((int)DateTime.Now.TimeOfDay.TotalSeconds ^ BitConverter.ToInt32(bytes, 0)); _requesterID = "DataUpdateJob" + r.Next(); //we use this ID to identify this particular data update job List <Instrument> instruments = settings.UseTag ? _instrumentManager.FindInstruments(x => x.Tags.Any(y => y.ID == settings.TagID)).Result : _instrumentManager.FindInstruments(x => x.ID == settings.InstrumentID).Result; if (instruments.Count == 0) { Log(LogLevel.Error, string.Format("Aborting data update job {0}: no instruments found.", settings.Name)); return; } _broker.HistoricalDataArrived += _broker_HistoricalDataArrived; _broker.Error += _broker_Error; int counter = 1; //What we do here: we check what we have available locally.. //If there is something, we send a query to grab data between the last stored time and "now" //Otherwise we send a query to grab everything since 1900 foreach (Instrument i in instruments) { if (!i.ID.HasValue) { continue; } //don't request data on expired securities unless the expiration was recent if (i.Expiration.HasValue && (DateTime.Now - i.Expiration.Value).TotalDays > 15) { Log(LogLevel.Trace, string.Format("Data update job {0}: ignored instrument w/ ID {1} due to expiration date.", settings.Name, i.ID)); continue; } DateTime startingDT = new DateTime(1900, 1, 1); var storageInfo = _localStorage.GetStorageInfo(i.ID.Value); if (storageInfo.Any(x => x.Frequency == settings.Frequency)) { var relevantStorageInfo = storageInfo.First(x => x.Frequency == settings.Frequency); startingDT = relevantStorageInfo.LatestDate; } DateTime endDt = DateTime.Now; //try to get the current time in the instrument's exchange timezone string timeZone = i?.Exchange?.Timezone; if (!string.IsNullOrEmpty(timeZone)) { try { var tz = TimeZoneInfo.FindSystemTimeZoneById(timeZone); endDt = TimeZoneInfo.ConvertTimeFromUtc(DateTime.UtcNow, tz); } catch (Exception e) { _logger.Error(e, "Could not find timezone " + timeZone); } } var req = new HistoricalDataRequest( i, settings.Frequency, startingDT, endDt, dataLocation: DataLocation.ExternalOnly, saveToLocalStorage: true, rthOnly: true, requestID: counter) { RequesterIdentity = _requesterID }; try { _broker.RequestHistoricalData(req); lock (_reqIDLock) { _pendingRequests.Add(req); } } catch (Exception ex) { _errors.Add(ex.Message); } counter++; } Stopwatch sw = new Stopwatch(); sw.Start(); //loop until time runs out or all requests are completed while (_pendingRequests.Count > 0 && sw.ElapsedMilliseconds < _settings.Timeout * 1000) { Thread.Sleep(100); } JobComplete(); Log(LogLevel.Info, string.Format("Data Update job {0} completed.", settings.Name)); }
public async Task Start() { Log.Initialize(); _jobLogic = new JobLogic(); _triggerLogic = new TriggerLogic(); Jobs = _jobLogic.appoggio; Triggers = _triggerLogic.appoggio; // write code here that runs when the Windows Service starts up. var props = new NameValueCollection { { "quartz.serializer.type", "binary" } }; var factory = new StdSchedulerFactory(props); scheduler = await factory.GetScheduler(); try { //clear all await scheduler.Clear(); // and start it off if (!scheduler.IsStarted) { await scheduler.Start(); } foreach (var itemTrigger in Triggers) { var itemJob = Jobs.FirstOrDefault(x => x.name == itemTrigger.jobname); var assembly = Assembly.LoadFrom( ConfigurationSettings.AppSettings["PathCustomJobs"] + "/" + itemJob.entrypoint); var type = assembly.GetTypes()[0]; //carica i parametri del job var dictionaryDataMap = new Dictionary <string, string>(); foreach (var entry in itemJob.parameters) { dictionaryDataMap.Add(entry.Key, entry.Value); } var jobDataMap = new JobDataMap(dictionaryDataMap); var job = JobBuilder.Create(type) .WithIdentity(itemTrigger.jobname, "group1") .SetJobData(jobDataMap) .Build(); var trigger = TriggerBuilder.Create() .WithIdentity(itemTrigger.name, "group1") .StartNow() .WithCronSchedule(itemTrigger.cronexpression) .Build(); //Tell quartz to schedule the job using our trigger await scheduler.ScheduleJob(job, trigger); } // some sleep to show what's happening await Task.Delay(TimeSpan.FromSeconds(60)); //60 } catch (SchedulerException se) { Log.Error("SchedulerService", se); } catch (System.Reflection.ReflectionTypeLoadException exRef) { Log.Error("SchedulerService", exRef); } }
public string RunJob(ref JobDataMap dataMap, string jobName, string id, string taskName) { return(null); }
/// <summary>Process all transactions (payments) from Service Reef.</summary> /// <param name="message">The message that is returned depending on the result.</param> /// <param name="state">The state of the process.</param> /// <returns><see cref="WorkerResultStatus"/></returns> public void Execute(IJobExecutionContext context) { RockContext dbContext = new RockContext(); FinancialBatchService financialBatchService = new FinancialBatchService(dbContext); PersonService personService = new PersonService(dbContext); PersonAliasService personAliasService = new PersonAliasService(dbContext); FinancialAccountService financialAccountService = new FinancialAccountService(dbContext); FinancialAccountService accountService = new FinancialAccountService(dbContext); FinancialTransactionService financialTransactionService = new FinancialTransactionService(dbContext); FinancialGatewayService financialGatewayService = new FinancialGatewayService(dbContext); DefinedValueService definedValueService = new DefinedValueService(dbContext); DefinedTypeService definedTypeService = new DefinedTypeService(dbContext); TransactionService transactionService = new TransactionService(new PayPalReporting.Data.PayPalReportingContext()); // Get the datamap for loading attributes JobDataMap dataMap = context.JobDetail.JobDataMap; String warnings = string.Empty; FinancialBatch batch = null; Double totalAmount = 0; var total = 1; var processed = 0; try { DateRange dateRange = Rock.Web.UI.Controls.SlidingDateRangePicker.CalculateDateRangeFromDelimitedValues(dataMap.GetString("DateRange") ?? "-1||"); String SRApiKey = Encryption.DecryptString(dataMap.GetString("ServiceReefAPIKey")); String SRApiSecret = Encryption.DecryptString(dataMap.GetString("ServiceReefAPISecret")); String SRApiUrl = dataMap.GetString("ServiceReefAPIURL"); DefinedValueCache transactionSource = DefinedValueCache.Get(dataMap.GetString("TransactionSource").AsGuid(), dbContext); DefinedValueCache connectionStatus = DefinedValueCache.Get(dataMap.GetString("ConnectionStatus").AsGuid(), dbContext); DefinedValueCache contribution = DefinedValueCache.Get(Rock.SystemGuid.DefinedValue.TRANSACTION_TYPE_CONTRIBUTION); // Setup some lookups DefinedTypeCache creditCards = DefinedTypeCache.Get(Rock.SystemGuid.DefinedType.FINANCIAL_CREDIT_CARD_TYPE.AsGuid(), dbContext); DefinedTypeCache tenderType = DefinedTypeCache.Get(Rock.SystemGuid.DefinedType.FINANCIAL_CURRENCY_TYPE.AsGuid(), dbContext); FinancialAccount specialFund = accountService.Get(dataMap.GetString("Account").AsGuid()); FinancialGateway gateway = financialGatewayService.Get(dataMap.GetString("FinancialGateway").AsGuid()); List <FinancialAccount> trips = financialAccountService.Queryable().Where(fa => fa.ParentAccountId == specialFund.Id).OrderBy(fa => fa.Order).ToList(); // Get the trips DefinedValueCache serviceReefAccountType = DefinedValueCache.Get(dataMap.Get("ServiceReefAccountType").ToString().AsGuid()); // Setup the ServiceReef API Client var client = new RestClient(SRApiUrl); client.Authenticator = new HMACAuthenticator(SRApiKey, SRApiSecret); // Get all payments from ServiceReef var request = new RestRequest("v1/payments", Method.GET); request.AddParameter("pageSize", 100); if (dateRange.Start.HasValue) { request.AddParameter("startDate", dateRange.Start.Value.ToString("o")); } if (dateRange.End.HasValue) { request.AddParameter("endDate", dateRange.End.Value.ToString("o")); } request.AddParameter("page", 1); while (total > processed) { var response = client.Execute <Contracts.Payments>(request); if (response.StatusCode != System.Net.HttpStatusCode.OK) { throw new Exception("ServiceReef API Response: " + response.StatusDescription + " Content Length: " + response.ContentLength); } if (response.Data != null && response.Data.PageInfo != null) { total = response.Data.PageInfo.TotalRecords; foreach (Contracts.Payments.Result result in response.Data.Results) { // Process the transaction if (result.PaymentProcessorTransactionId != null) { if (result.FirstName == null || result.LastName == null) { warnings += "Missing Firstname/Lastname for ServiceReef transaction Id: " + result.TransactionId + Environment.NewLine; processed++; continue; } FinancialTransaction tran = financialTransactionService.Queryable().Where(tx => tx.TransactionCode == result.PaymentProcessorTransactionId).FirstOrDefault(); // We haven't processed this before so get busy! if (tran == null) { var glCode = result.EventCode; glCode = glCode.IsNotNullOrWhiteSpace() ? glCode : "[No GL Code]"; if (glCode.Length > 50) { glCode = glCode.Substring(0, 50); } FinancialAccount trip = null; // Make sure we have a sub-account to go with this transaction if (result.EventId > 0) { trip = trips.Where(t => t.GlCode == glCode && t.Url == result.EventUrl).FirstOrDefault(); } if (trip == null) { if (result.EventCode == null) { warnings += "Event Code is missing on the Service Reef Trip for ServiceReef transaction Id: " + result.TransactionId + Environment.NewLine; processed++; continue; } // Create the trip subaccount FinancialAccount tripFA = new FinancialAccount(); tripFA.Name = result.EventName; // Name is limited to 50 if (tripFA.Name.Length > 50) { tripFA.Name = tripFA.Name.Substring(0, 50); } tripFA.Description = "Service Reef Event. Name: " + result.EventName + " ID: " + result.EventId; tripFA.GlCode = glCode; tripFA.Url = result.EventUrl; tripFA.PublicName = result.EventName; // Public Name is limited to 50 if (tripFA.PublicName.Length > 50) { tripFA.PublicName = tripFA.PublicName.Substring(0, 50); } tripFA.IsTaxDeductible = true; tripFA.IsPublic = false; tripFA.ParentAccountId = specialFund.Id; tripFA.Order = specialFund.Order + 1; tripFA.AccountTypeValueId = serviceReefAccountType.Id; // Figure out what order it should be; foreach (FinancialAccount tmpTrip in trips) { if (tmpTrip.Name.CompareTo(tripFA.Name) < 0) { tripFA.Order++; } } financialAccountService.Add(tripFA); // Now save the trip dbContext.SaveChanges(); // Increment all the rest of the Orders financialAccountService.Queryable().Where(fa => fa.Order >= tripFA.Order && fa.Id != tripFA.Id).ToList().ForEach(c => c.Order++); dbContext.SaveChanges(); trips = financialAccountService.Queryable().Where(fa => fa.ParentAccountId == specialFund.Id).OrderBy(fa => fa.Order).ToList(); trip = tripFA; } tran = new FinancialTransaction(); tran.FinancialPaymentDetail = new FinancialPaymentDetail(); if (result.Type == "CreditCard") { tran.FinancialPaymentDetail.CurrencyTypeValueId = tenderType.DefinedValues.Where(t => t.Value == "Credit Card").FirstOrDefault().Id; } else { tran.TransactionTypeValueId = tenderType.DefinedValues.Where(t => t.Value == "Credit Card").FirstOrDefault().Id; } Person person = null; // Find the person this transaction belongs to // 1. First start by determining whether this was a person // paying their application fee or contributing to themselves // because then we can just use their member info if (result.UserId > 0 && result.DonatedToUserId == result.UserId && result.DonatedToFirstName == result.FirstName && result.DonatedToLastName == result.LastName) { var memberRequest = new RestRequest("v1/members/{userId}", Method.GET); memberRequest.AddUrlSegment("userId", result.UserId.ToString()); var memberResult = client.Execute <Contracts.Member>(memberRequest); if (memberResult.Data != null && memberResult.Data.ArenaId > 0) { try { Person personMatch = personAliasService.Queryable().Where(pa => pa.AliasPersonId == memberResult.Data.ArenaId).Select(pa => pa.Person).FirstOrDefault(); if (personMatch == null) { throw new Exception("Person not found: " + memberResult.Data.ArenaId); } person = personMatch; } catch (Exception e) { warnings += "Loading the person failed transaction id " + result.TransactionId + " for " + result.FirstName + " " + result.LastName + " with the following error: " + e.Message + Environment.NewLine; processed++; continue; } } } // 2. If we didn't get a person match via their Alias Id // then just use the standard person match logic if (person == null) { String street1 = null; String postalCode = null; if (result.Address != null) { street1 = result.Address.Address1; postalCode = result.Address.Zip; } List <Person> matches = personService.GetByMatch(result.FirstName.Trim(), result.LastName.Trim(), null, result.Email, null, street1, postalCode).ToList(); if (matches.Count > 1) { // Find the oldest member record in the list person = matches.Where(p => p.ConnectionStatusValue.Value == "Member").OrderBy(p => p.Id).FirstOrDefault(); if (person == null) { // Find the oldest attendee record in the list person = matches.Where(p => p.ConnectionStatusValue.Value == "Attendee").OrderBy(p => p.Id).FirstOrDefault(); if (person == null) { person = matches.OrderBy(p => p.Id).First(); } } } else if (matches.Count == 1) { person = matches.First(); } else { // Create the person person = new Person(); person.FirstName = result.FirstName.Trim(); person.LastName = result.LastName.Trim(); if (result.Email.IsValidEmail()) { person.Email = result.Email.Trim(); } person.RecordTypeValueId = DefinedValueCache.Get(Rock.SystemGuid.DefinedValue.PERSON_RECORD_TYPE_PERSON.AsGuid()).Id; person.ConnectionStatusValueId = connectionStatus.Id; person.RecordStatusValueId = DefinedValueCache.Get(Rock.SystemGuid.DefinedValue.PERSON_RECORD_STATUS_ACTIVE.AsGuid()).Id; Group family = PersonService.SaveNewPerson(person, dbContext); GroupLocation location = new GroupLocation(); location.GroupLocationTypeValueId = DefinedValueCache.Get(Rock.SystemGuid.DefinedValue.GROUP_LOCATION_TYPE_HOME).Id; location.Location = new Location() { Street1 = result.Address.Address1, Street2 = result.Address.Address2, City = result.Address.City, State = result.Address.State, PostalCode = result.Address.Zip, Country = result.Address.Country }; family.CampusId = CampusCache.All().FirstOrDefault().Id; family.GroupLocations.Add(location); dbContext.SaveChanges(); } } // Get details about the transaction from our PayPal report table Transaction tx = transactionService.Get(result.PaymentProcessorTransactionId); if (tx != null) { if (tx.TenderType.Contains("ACH")) { result.Type = "ACH"; result.Method = null; } else { result.Type = "Credit Card"; result.Method = tx.TenderType; } } else { // Defaults result.Type = "Credit Card"; result.Method = "Visa"; warnings += "Unable to find transaction in _org_secc_PaypalReporting_Transaction table: " + result.TransactionId + Environment.NewLine; } // If we don't have a batch, create one if (batch == null) { batch = new FinancialBatch(); batch.BatchStartDateTime = result.Date; batch.BatchEndDateTime = DateTime.Now; batch.Name = "Service Reef Payments"; batch.Status = BatchStatus.Open; financialBatchService.Add(batch); dbContext.SaveChanges(); } // Complete the FinancialTransaction tran.AuthorizedPersonAliasId = person.PrimaryAliasId; tran.BatchId = batch.Id; tran.Summary = "F" + specialFund.Id + ":$" + result.Amount.ToString(); tran.TransactionDateTime = result.Date; tran.FinancialGatewayId = gateway.Id; FinancialTransactionDetail financialTransactionDetail = new FinancialTransactionDetail(); financialTransactionDetail.AccountId = trip.Id; financialTransactionDetail.Amount = result.Amount.ToString().AsDecimal(); tran.TransactionDetails.Add(financialTransactionDetail); tran.TransactionTypeValueId = contribution.Id; tran.FinancialPaymentDetail = new FinancialPaymentDetail(); tran.FinancialPaymentDetail.CurrencyTypeValueId = tenderType.DefinedValues.Where(type => type.Value.ToLower() == result.Type.ToLower()).FirstOrDefault().Id; if (result.Method != null) { tran.FinancialPaymentDetail.CreditCardTypeValueId = creditCards.DefinedValues.Where(card => card.Value.ToLower() == result.Method.ToLower()).FirstOrDefault().Id; } tran.TransactionCode = result.PaymentProcessorTransactionId; tran.SourceTypeValueId = transactionSource.Id; financialTransactionService.Add(tran); dbContext.SaveChanges(); totalAmount += result.Amount; } } processed++; } } else { total = 0; } // Update the page number for the next request var pageParam = request.Parameters.Where(p => p.Name == "page").FirstOrDefault(); pageParam.Value = ( int )pageParam.Value + 1; } } catch (Exception ex) { throw new Exception("ServiceReef Job Failed", ex); } finally { if (batch != null && totalAmount > 0) { batch.ControlAmount = ( Decimal )totalAmount; } dbContext.SaveChanges(); } if (warnings.Length > 0) { throw new Exception(warnings); } context.Result = "Successfully imported " + processed + " transactions."; }
/// <summary> /// Called by the <see cref="IScheduler" /> when a <see cref="ITrigger" /> /// fires that is associated with the <see cref="IJob" />. /// </summary> /// <param name="context">The execution context.</param> /// <remarks> /// The implementation may wish to set a result object on the /// JobExecutionContext before this method exits. The result itself /// is meaningless to Quartz, but may be informative to /// <see cref="IJobListener" />s or /// <see cref="ITriggerListener" />s that are watching the job's /// execution. /// </remarks> public virtual void Execute(IJobExecutionContext context) { JobDataMap dataMap = context.JobDetail.JobDataMap; int resendDays = dataMap.GetString("ResendInviteAfterNumberDays").AsIntegerOrNull() ?? 5; int maxInvites = dataMap.GetString("MaxInvites").AsIntegerOrNull() ?? 2; int checkDays = dataMap.GetString("CheckForSignatureDays").AsIntegerOrNull() ?? 30; string folderPath = System.Web.Hosting.HostingEnvironment.MapPath("~/App_Data/Cache/SignNow"); var errorMessages = new List <string>(); int signatureRequestsSent = 0; int documentsUpdated = 0; // Send documents using (var rockContext = new RockContext()) { var maxInviteDate = RockDateTime.Today.AddDays(0 - resendDays); var maxCheckDays = RockDateTime.Today.AddDays(0 - checkDays); var docTypeService = new SignatureDocumentTemplateService(rockContext); var docService = new SignatureDocumentService(rockContext); // Check for status updates foreach (var document in new SignatureDocumentService(rockContext).Queryable() .Where(d => ( d.Status == SignatureDocumentStatus.Sent || (d.Status == SignatureDocumentStatus.Signed && !d.BinaryFileId.HasValue) ) && d.LastInviteDate.HasValue && d.LastInviteDate.Value > maxCheckDays) .ToList()) { var updateErrorMessages = new List <string>(); var status = document.Status; int?binaryFileId = document.BinaryFileId; if (docTypeService.UpdateDocumentStatus(document, folderPath, out updateErrorMessages)) { if (status != document.Status || !binaryFileId.Equals(document.BinaryFileId)) { rockContext.SaveChanges(); documentsUpdated++; } } else { errorMessages.AddRange(updateErrorMessages); } } // Send any needed signature requests var docsSent = new Dictionary <int, List <int> >(); foreach (var gm in new GroupMemberService(rockContext).Queryable() .Where(m => m.GroupMemberStatus == GroupMemberStatus.Active && m.Group.IsActive && m.Person.Email != null && m.Person.Email != "" && m.Group.RequiredSignatureDocumentTemplate != null && !m.Group.RequiredSignatureDocumentTemplate.Documents.Any(d => d.AppliesToPersonAlias.PersonId == m.PersonId && d.Status == SignatureDocumentStatus.Signed ) ) .Select(m => new { GroupName = m.Group.Name, Person = m.Person, DocumentType = m.Group.RequiredSignatureDocumentTemplate }) .ToList()) { if (docsSent.ContainsKey(gm.Person.Id)) { if (docsSent[gm.Person.Id].Contains(gm.DocumentType.Id)) { continue; } else { docsSent[gm.Person.Id].Add(gm.DocumentType.Id); } } else { docsSent.Add(gm.Person.Id, new List <int> { gm.DocumentType.Id }); } var document = docService.Queryable() .Where(d => d.SignatureDocumentTemplateId == gm.DocumentType.Id && d.AppliesToPersonAlias.PersonId == gm.Person.Id && d.AssignedToPersonAlias.PersonId == gm.Person.Id && d.Status != SignatureDocumentStatus.Signed ) .OrderByDescending(d => d.CreatedDateTime) .FirstOrDefault(); if (document == null || (document.InviteCount < maxInvites && document.LastInviteDate < maxInviteDate)) { string documentName = string.Format("{0}_{1}", gm.GroupName.RemoveSpecialCharacters(), gm.Person.FullName.RemoveSpecialCharacters()); var sendErrorMessages = new List <string>(); if (document != null) { docTypeService.SendDocument(document, gm.Person.Email, out sendErrorMessages); } else { docTypeService.SendDocument(gm.DocumentType, gm.Person, gm.Person, documentName, gm.Person.Email, out sendErrorMessages); } if (!errorMessages.Any()) { rockContext.SaveChanges(); signatureRequestsSent++; } else { errorMessages.AddRange(sendErrorMessages); } } } } if (errorMessages.Any()) { throw new Exception("One or more exceptions occurred processing signature documents..." + Environment.NewLine + errorMessages.AsDelimited(Environment.NewLine)); } context.Result = string.Format("{0} signature requests sent; {1} existing document's status updated", signatureRequestsSent, documentsUpdated); }
protected virtual MailMessage BuildMessageFromParameters(JobDataMap data) { string to = GetRequiredParameter(data, PropertyRecipient); string from = GetRequiredParameter(data, PropertySender); string subject = GetRequiredParameter(data, PropertySubject); string message = GetRequiredParameter(data, PropertyMessage); string cc = GetOptionalParameter(data, PropertyCcRecipient); string replyTo = GetOptionalParameter(data, PropertyReplyTo); string encoding = GetOptionalParameter(data, PropertyEncoding); MailMessage mailMessage = new MailMessage(); mailMessage.To.Add(to); if (!string.IsNullOrEmpty(cc)) { mailMessage.CC.Add(cc); } mailMessage.From = new MailAddress(from); if (!string.IsNullOrEmpty(replyTo)) { #if NET_40 mailMessage.ReplyToList.Add(new MailAddress(replyTo)); #else mailMessage.ReplyTo = new MailAddress(replyTo); #endif } mailMessage.Subject = subject; mailMessage.Body = message; if (!string.IsNullOrEmpty(encoding)) { var encodingToUse = Encoding.GetEncoding(encoding); mailMessage.BodyEncoding = encodingToUse; mailMessage.SubjectEncoding = encodingToUse; } return mailMessage; }
public void Execute(IJobExecutionContext context) { JobDataMap jobDetail = context.MergedJobDataMap; Console.WriteLine(DateTime.Now.ToString()); }
/// <summary> /// Job that will run quick SQL queries on a schedule. /// /// Called by the <see cref="IScheduler" /> when a /// <see cref="ITrigger" /> fires that is associated with /// the <see cref="IJob" />. /// </summary> public virtual void Execute(IJobExecutionContext context) { JobDataMap dataMap = context.JobDetail.JobDataMap; // get registrations where // + template is active // + instance is active // + template has a number of days between reminders // + template as fields needed to send a reminder email // + the registration has a cost // + the registration has been closed within the last xx days (to prevent eternal nagging) using (RockContext rockContext = new RockContext()) { int sendCount = 0; int registrationInstanceCount = 0; var publicAppRoot = GlobalAttributesCache.Get().GetValue("PublicApplicationRoot"); RegistrationService registrationService = new RegistrationService(rockContext); var currentDate = RockDateTime.Today; var cutoffDays = dataMap.GetString("CutoffDate").AsIntegerOrNull() ?? 30; // Do not filter registrations by template or instance cost, it will miss $0 registrations that have optional fees. var registrations = registrationService.Queryable("RegistrationInstance") .Where(r => r.RegistrationInstance.RegistrationTemplate.IsActive && r.RegistrationInstance.IsActive == true && (r.RegistrationInstance.RegistrationTemplate.PaymentReminderTimeSpan != null && r.RegistrationInstance.RegistrationTemplate.PaymentReminderTimeSpan != 0) && r.RegistrationInstance.RegistrationTemplate.PaymentReminderEmailTemplate != null && r.RegistrationInstance.RegistrationTemplate.PaymentReminderEmailTemplate.Length > 0 && r.RegistrationInstance.RegistrationTemplate.PaymentReminderFromEmail != null && r.RegistrationInstance.RegistrationTemplate.PaymentReminderFromEmail.Length > 0 && r.RegistrationInstance.RegistrationTemplate.PaymentReminderSubject != null && r.RegistrationInstance.RegistrationTemplate.PaymentReminderSubject.Length > 0 && (r.RegistrationInstance.EndDateTime == null || currentDate <= System.Data.Entity.SqlServer.SqlFunctions.DateAdd("day", cutoffDays, r.RegistrationInstance.EndDateTime))) .ToList(); registrationInstanceCount = registrations.Select(r => r.RegistrationInstance.Id).Distinct().Count(); var errors = new List <string>(); foreach (var registration in registrations) { if (registration.DiscountedCost > registration.TotalPaid) { var reminderDate = RockDateTime.Now.AddDays(registration.RegistrationInstance.RegistrationTemplate.PaymentReminderTimeSpan.Value * -1); if (registration.LastPaymentReminderDateTime < reminderDate) { Dictionary <string, object> mergeObjects = new Dictionary <string, object>(); mergeObjects.Add("Registration", registration); mergeObjects.Add("RegistrationInstance", registration.RegistrationInstance); var emailMessage = new RockEmailMessage(); emailMessage.AdditionalMergeFields = mergeObjects; emailMessage.AddRecipient(registration.GetConfirmationRecipient(mergeObjects)); emailMessage.FromEmail = registration.RegistrationInstance.RegistrationTemplate.PaymentReminderFromEmail; emailMessage.FromName = registration.RegistrationInstance.RegistrationTemplate.PaymentReminderSubject; emailMessage.Subject = registration.RegistrationInstance.RegistrationTemplate.PaymentReminderFromName; emailMessage.Message = registration.RegistrationInstance.RegistrationTemplate.PaymentReminderEmailTemplate; emailMessage.AppRoot = publicAppRoot; var emailErrors = new List <string>(); emailMessage.Send(out errors); registration.LastPaymentReminderDateTime = RockDateTime.Now; rockContext.SaveChanges(); if (!emailErrors.Any()) { sendCount++; } } } } context.Result = string.Format("Sent {0} from {1}", "reminder".ToQuantity(sendCount), "registration instances".ToQuantity(registrationInstanceCount)); if (errors.Any()) { StringBuilder sb = new StringBuilder(); sb.AppendLine(); sb.Append(string.Format("{0} Errors: ", errors.Count())); errors.ForEach(e => { sb.AppendLine(); sb.Append(e); }); string errorMessage = sb.ToString(); context.Result += errorMessage; var exception = new Exception(errorMessage); HttpContext context2 = HttpContext.Current; ExceptionLogService.LogException(exception, context2); throw exception; } } }
public virtual Task Execute(IJobExecutionContext context) { JobKey jobKey = context.JobDetail.Key; // 로그 lock (ThisLock) { L4Logger l4Logger = new L4Logger(jobKey.Name + ".log"); l4Logger.Add("Start Task : " + jobKey.Name); l4Logger.Close(); } JobDataMap dataMap = context.JobDetail.JobDataMap; string address = dataMap.GetString("address"); string nx = dataMap.GetString("nx"); string ny = dataMap.GetString("ny"); Console.WriteLine("IJob says: {0}, address:{1}, nx:{2}, ny:{3}, executing at {4}", jobKey, address, nx, ny, DateTime.Now.ToString("r")); WeatherCrawlerData wcd = new WeatherCrawlerData(); // db.ini 파일을 읽어서 수집서버 DB 연결 DbIniManager dbIniManager = new DbIniManager(); dbIniManager.ReadIni(); DbManager dm = new DbManager(dbIniManager.IpAddress, dbIniManager.DbName, dbIniManager.CollectionName, dbIniManager.Id, dbIniManager.Pw); if (dm.Connect()) { // 이미 같은 주소의 데이터가 있는지 체크 if (dm.IsExistAddress(address)) { Console.WriteLine("이미 같은 주소의 데이터가 DB에 존재합니다"); // 기존 데이터 중 currentData만 보존 List <CurrentData> existCD = dm.GetCurrentData(address); // 기존 currentData를 포함하여 데이터 생성 wcd = GetAssembledWCD(address, nx, ny, true, existCD); if (null != wcd) { // 기존 데이터 삭제 dm.DeleteDocumentByAddress(address); // 수집서버 DB에 데이터 INSERT dm.InsertWeatherData(wcd); } } else { Console.WriteLine("같은 주소의 데이터가 DB에 존재하지 않습니다"); wcd = GetAssembledWCD(address, nx, ny, false, null); if (null != wcd) { // 수집서버 DB에 데이터 INSERT dm.InsertWeatherData(wcd); } } } else { MessageBox.Show("수집서버 DB에 접속할 수 없습니다"); } // 로그 lock (ThisLock) { L4Logger l4Logger = new L4Logger(jobKey.Name + ".log"); l4Logger.Add("End Task : " + jobKey.Name); l4Logger.Close(); } return(Task.FromResult(0)); }
/// <summary> /// Calls the equivalent method on the 'proxied' <see cref="QuartzScheduler" />. /// </summary> public virtual void TriggerJob(JobKey jobKey, JobDataMap data) { try { GetRemoteScheduler().TriggerJob(jobKey, data); } catch (RemotingException re) { throw InvalidateHandleCreateException("Error communicating with remote scheduler.", re); } }
public void TestSetObjectPropsNullNonPrimative() { JobDataMap jobDataMap = new JobDataMap(); jobDataMap.Put("mapValue", null); TestObject testObject = new TestObject(); Dictionary<string, string> map = new Dictionary<string, string>(); map.Add("A", "B"); testObject.MapValue = map; factory.SetObjectProperties(testObject, jobDataMap); Assert.IsNull(testObject.MapValue); }
/// <summary> /// Executes the specified context. /// </summary> /// <param name="context">The context.</param> public void Execute(IJobExecutionContext context) { var rockContext = new RockContext(); JobDataMap dataMap = context.JobDetail.JobDataMap; Guid? systemEmailGuid = dataMap.GetString("NotificationEmailTemplate").AsGuidOrNull(); if (systemEmailGuid.HasValue) { var selectedGroupTypes = new List <Guid>(); if (!string.IsNullOrWhiteSpace(dataMap.GetString("GroupTypes"))) { selectedGroupTypes = dataMap.GetString("GroupTypes").Split(',').Select(Guid.Parse).ToList(); } var excludedGroupRoleIds = new List <int>(); if (!string.IsNullOrWhiteSpace(dataMap.GetString("ExcludedGroupRoleIds"))) { excludedGroupRoleIds = dataMap.GetString("ExcludedGroupRoleIds").Split(',').Select(int.Parse).ToList(); } var notificationOption = dataMap.GetString("NotifyParentLeaders").ConvertToEnum <NotificationOption>(NotificationOption.None); var accountAbilityGroupGuid = dataMap.GetString("AccountabilityGroup").AsGuid(); // get groups matching of the types provided GroupService groupService = new GroupService(rockContext); var groups = groupService.Queryable().AsNoTracking() .Where(g => selectedGroupTypes.Contains(g.GroupType.Guid) && g.IsActive == true && g.GroupRequirements.Any()); foreach (var group in groups) { // check for members that don't meet requirements var groupMembersWithIssues = groupService.GroupMembersNotMeetingRequirements(group.Id, true); if (groupMembersWithIssues.Count > 0) { // add issues to issue list GroupsMissingRequirements groupMissingRequirements = new GroupsMissingRequirements(); groupMissingRequirements.Id = group.Id; groupMissingRequirements.Name = group.Name; if (group.GroupType != null) { groupMissingRequirements.GroupTypeId = group.GroupTypeId; groupMissingRequirements.GroupTypeName = group.GroupType.Name; } groupMissingRequirements.AncestorPathName = groupService.GroupAncestorPathName(group.Id); // get list of the group leaders groupMissingRequirements.Leaders = group.Members .Where(m => m.GroupRole.IsLeader == true && !excludedGroupRoleIds.Contains(m.GroupRoleId)) .Select(m => new GroupMemberResult { Id = m.Id, PersonId = m.PersonId, FullName = m.Person.FullName }) .ToList(); List <GroupMembersMissingRequirements> groupMembers = new List <GroupMembersMissingRequirements>(); foreach (var groupMemberIssue in groupMembersWithIssues) { GroupMembersMissingRequirements groupMember = new GroupMembersMissingRequirements(); groupMember.FullName = groupMemberIssue.Key.Person.FullName; groupMember.Id = groupMemberIssue.Key.Id; groupMember.PersonId = groupMemberIssue.Key.PersonId; groupMember.GroupMemberRole = groupMemberIssue.Key.GroupRole.Name; List <MissingRequirement> missingRequirements = new List <MissingRequirement>(); foreach (var issue in groupMemberIssue.Value) { MissingRequirement missingRequirement = new MissingRequirement(); missingRequirement.Id = issue.Key.GroupRequirement.GroupRequirementType.Id; missingRequirement.Name = issue.Key.GroupRequirement.GroupRequirementType.Name; missingRequirement.Status = issue.Key.MeetsGroupRequirement; missingRequirement.OccurrenceDate = issue.Value; switch (issue.Key.MeetsGroupRequirement) { case MeetsGroupRequirement.Meets: missingRequirement.Message = issue.Key.GroupRequirement.GroupRequirementType.PositiveLabel; break; case MeetsGroupRequirement.MeetsWithWarning: missingRequirement.Message = issue.Key.GroupRequirement.GroupRequirementType.WarningLabel; break; case MeetsGroupRequirement.NotMet: missingRequirement.Message = issue.Key.GroupRequirement.GroupRequirementType.NegativeLabel; break; } missingRequirements.Add(missingRequirement); } groupMember.MissingRequirements = missingRequirements; groupMembers.Add(groupMember); } groupMissingRequirements.GroupMembersMissingRequirements = groupMembers; _groupsMissingRequriements.Add(groupMissingRequirements); // add leaders as people to notify foreach (var leader in group.Members.Where(m => m.GroupRole.IsLeader == true && !excludedGroupRoleIds.Contains(m.GroupRoleId))) { NotificationItem notification = new NotificationItem(); notification.GroupId = group.Id; notification.Person = leader.Person; _notificationList.Add(notification); } // notify parents if (notificationOption != NotificationOption.None) { var parentLeaders = new GroupMemberService(rockContext).Queryable("Person").AsNoTracking() .Where(m => m.GroupRole.IsLeader && !excludedGroupRoleIds.Contains(m.GroupRoleId)); if (notificationOption == NotificationOption.DirectParent) { // just the parent group parentLeaders = parentLeaders.Where(m => m.GroupId == group.ParentGroupId); } else { // all parents in the heirarchy var parentIds = groupService.GetAllAncestorIds(group.Id); parentLeaders = parentLeaders.Where(m => parentIds.Contains(m.GroupId)); } foreach (var parentLeader in parentLeaders.ToList()) { NotificationItem parentNotification = new NotificationItem(); parentNotification.Person = parentLeader.Person; parentNotification.GroupId = group.Id; _notificationList.Add(parentNotification); } } } } // send out notificatons var appRoot = Rock.Web.Cache.GlobalAttributesCache.Read(rockContext).GetValue("ExternalApplicationRoot"); var recipients = new List <RecipientData>(); var notificationRecipients = _notificationList.GroupBy(p => p.Person.Id).ToList(); foreach (var recipientId in notificationRecipients) { var recipient = _notificationList.Where(n => n.Person.Id == recipientId.Key).Select(n => n.Person).FirstOrDefault(); var mergeFields = Rock.Lava.LavaHelper.GetCommonMergeFields(null); mergeFields.Add("Person", recipient); var notificationGroupIds = _notificationList .Where(n => n.Person.Id == recipient.Id) .Select(n => n.GroupId) .ToList(); var missingRequirements = _groupsMissingRequriements.Where(g => notificationGroupIds.Contains(g.Id)).ToList(); mergeFields.Add("GroupsMissingRequirements", missingRequirements); recipients.Add(new RecipientData(recipient.Email, mergeFields)); Email.Send(systemEmailGuid.Value, recipients, appRoot); recipients.Clear(); } // add accountability group members if (!accountAbilityGroupGuid.IsEmpty()) { var accountabilityGroupMembers = new GroupMemberService(rockContext).Queryable().AsNoTracking() .Where(m => m.Group.Guid == accountAbilityGroupGuid) .Select(m => m.Person); foreach (var person in accountabilityGroupMembers) { var mergeFields = Rock.Lava.LavaHelper.GetCommonMergeFields(null); mergeFields.Add("Person", person); mergeFields.Add("GroupsMissingRequirements", _groupsMissingRequriements); recipients.Add(new RecipientData(person.Email, mergeFields)); } } Email.Send(systemEmailGuid.Value, recipients, appRoot); context.Result = string.Format("{0} requirement notification {1} sent", recipients.Count, "email".PluralizeIf(recipients.Count() != 1)); } else { context.Result = "Warning: No NotificationEmailTemplate found"; } }
public void TestSetObjectPropsWrongPrimativeType() { JobDataMap jobDataMap = new JobDataMap(); jobDataMap.Put("intValue", (float) 7); try { factory.SetObjectProperties(new TestObject(), jobDataMap); Assert.Fail(); } catch (SchedulerException) { } }
public virtual void Execute(IJobExecutionContext context) { JobDataMap dataMap = context.JobDetail.JobDataMap; // // Get all our configuration options. // Guid engagedDataViewGuid = dataMap.GetString("EngagedDataView").AsGuid(); Guid?disengagedDataViewGuid = dataMap.GetString("DisengagedDataView").AsGuidOrNull(); Guid?entryWorkflowType = dataMap.GetString("EntryWorkflow").AsGuidOrNull(); Guid?exitWorkflowType = dataMap.GetString("ExitWorkflow").AsGuidOrNull(); Guid?engagedAttributeGuid = dataMap.GetString("EngagedAttribute").AsGuidOrNull(); Guid?startDateAttributeGuid = dataMap.GetString("StartDateAttribute").AsGuidOrNull(); Guid?endDateAttributeGuid = dataMap.GetString("EndDateAttribute").AsGuidOrNull(); // // Find the specified attributes in the cache. // var engagedAttribute = engagedAttributeGuid.HasValue ? AttributeCache.Read(engagedAttributeGuid.Value) : null; var startDateAttribute = startDateAttributeGuid.HasValue ? AttributeCache.Read(startDateAttributeGuid.Value) : null; var endDateAttribute = endDateAttributeGuid.HasValue ? AttributeCache.Read(endDateAttributeGuid.Value) : null; using (var rockContext = new RockContext()) { var workflowTypeService = new WorkflowTypeService(rockContext); var errorMessages = new List <string>(); List <int> engagedDataViewPersonIds = new List <int>(); List <int> disengagedDataViewPersonIds = null; List <int> engagingPersonIds = null; List <int> disengagingPersonIds = null; // // Get the list of people that are in the engaged data view. // try { var dataViewService = new DataViewService(rockContext); DataView dataView; int dataTimeout = 900; dataView = dataViewService.Get(engagedDataViewGuid); var qry = dataView.GetQuery(null, rockContext, dataTimeout, out errorMessages); if (qry != null) { engagedDataViewPersonIds = qry.AsNoTracking().Select(a => a.Id).ToList(); } if (disengagedDataViewGuid.HasValue) { dataView = dataViewService.Get(disengagedDataViewGuid.Value); qry = dataView.GetQuery(null, rockContext, dataTimeout, out errorMessages); if (qry != null) { disengagedDataViewPersonIds = qry.AsNoTracking().Select(a => a.Id).ToList(); } } } catch (Exception e) { ExceptionLogService.LogException(e, System.Web.HttpContext.Current); while (e != null) { if (e is SqlException && (e as SqlException).Number == -2) { errorMessages.Add("This dataview did not complete in a timely manner."); } else { errorMessages.Add(e.Message); } e = e.InnerException; } } // // If we had any errors trying to run the data view then abort. // if (errorMessages.Any()) { throw new Exception(string.Join("\n", errorMessages)); } // // Get a list of all person Ids that exist in the database. // var personQry = new PersonService(rockContext).Queryable().AsNoTracking(); // // Calculate the list of people that are entering and leaving engagement based on // specific criteria: // // Have EngagedAttribute && Have DisengagedDataView // EngagingPeople = EngagedDataView - AlreadyEngaged // DisengagingPeople = DisengagedDataView - AlreadyDisengaged // // Have EngagedAttribute && No DisengagedDataView // EngagingPeople = EngagedDataView - AlreadyEngaged // DisengagingPeople = AlreadyEngaged - EngagedDataView // // No EngagedAttribute && Have DisengagedDataView // EngagingPeople = EngagedDataView // DisengagingPeople = DisengatedDataView // // No EngagedAttribute && No DisengagedDataView // EngagingPeople = EngagedDataView // DisengagingPeople = AllPeople - EngagedDataView // if (engagedAttribute != null) { var alreadyEngagedPersonIds = personQry.WhereAttributeValue(rockContext, engagedAttribute.Key, "True").Select(p => p.Id).ToList(); engagingPersonIds = engagedDataViewPersonIds.Except(alreadyEngagedPersonIds).ToList(); if (disengagedDataViewPersonIds != null) { var alreadyDisengagedPersonIds = personQry.WhereAttributeValue(rockContext, engagedAttribute.Key, "False").Select(p => p.Id).ToList(); disengagingPersonIds = disengagedDataViewPersonIds.Except(alreadyDisengagedPersonIds).ToList(); } else { disengagingPersonIds = alreadyEngagedPersonIds.Except(engagedDataViewPersonIds).ToList(); } } else { engagingPersonIds = engagedDataViewPersonIds; if (disengagedDataViewPersonIds != null) { disengagingPersonIds = disengagedDataViewPersonIds.ToList(); } else { var allPersonIds = personQry.Select(p => p.Id).ToList(); disengagingPersonIds = allPersonIds.Except(engagedDataViewPersonIds).ToList(); } } // // Add any new people to engagement status. // if (engagingPersonIds.Any()) { ProcessPeople(engagingPersonIds, true, engagedAttribute, startDateAttribute, endDateAttribute, entryWorkflowType); } // // Remove any old people from engagement status. // if (disengagingPersonIds.Any()) { ProcessPeople(disengagingPersonIds, false, engagedAttribute, startDateAttribute, endDateAttribute, exitWorkflowType); } context.Result = string.Format("Added {0} and removed {1} people from engaged status.", engagingPersonIds.Count, disengagingPersonIds.Count); } }
/// <summary> /// Job to get a National Change of Address (NCOA) report for all active people's addresses. /// /// Called by the <see cref="IScheduler" /> when a /// <see cref="ITrigger" /> fires that is associated with /// the <see cref="IJob" />. /// </summary> public virtual void Execute(IJobExecutionContext context) { Exception exception = null; // Get the job setting(s) JobDataMap dataMap = context.JobDetail.JobDataMap; SparkDataConfig sparkDataConfig = Ncoa.GetSettings(); if (!sparkDataConfig.NcoaSettings.IsEnabled || !sparkDataConfig.NcoaSettings.IsValid()) { return; } try { Guid?sparkDataApiKeyGuid = sparkDataConfig.SparkDataApiKey.AsGuidOrNull(); if (sparkDataApiKeyGuid == null) { exception = new NoRetryException($"Spark Data API Key '{sparkDataConfig.SparkDataApiKey.ToStringSafe()}' is empty or invalid. The Spark Data API Key can be configured in System Settings > Spark Data Settings."); return; } switch (sparkDataConfig.NcoaSettings.CurrentReportStatus) { case "": case null: if (sparkDataConfig.NcoaSettings.RecurringEnabled) { StatusStart(sparkDataConfig); } break; case "Start": StatusStart(sparkDataConfig); break; case "Failed": StatusFailed(sparkDataConfig); break; case "Pending: Report": StatusPendingReport(sparkDataConfig); break; case "Pending: Export": StatusPendingExport(sparkDataConfig); break; case "Complete": StatusComplete(sparkDataConfig); break; } } catch (Exception ex) { exception = ex; } finally { if (exception != null) { context.Result = $"NCOA Job failed: {exception.Message}"; if (exception is NoRetryException || exception is NoRetryAggregateException) { sparkDataConfig.NcoaSettings.CurrentReportStatus = "Complete"; sparkDataConfig.NcoaSettings.LastRunDate = RockDateTime.Now; } else { sparkDataConfig.NcoaSettings.CurrentReportStatus = "Failed"; } StringBuilder sb = new StringBuilder($"NOCA job failed: {RockDateTime.Now.ToString()} - {exception.Message}"); Exception innerException = exception; while (innerException.InnerException != null) { innerException = innerException.InnerException; sb.AppendLine(innerException.Message); } sparkDataConfig.Messages.Add(sb.ToString()); Ncoa.SaveSettings(sparkDataConfig); try { var ncoa = new Ncoa(); ncoa.SendNotification(sparkDataConfig, "failed"); } catch { } if (sparkDataConfig.SparkDataApiKey.IsNotNullOrWhiteSpace() && sparkDataConfig.NcoaSettings.FileName.IsNotNullOrWhiteSpace()) { SparkDataApi sparkDataApi = new SparkDataApi(); } Exception ex = new AggregateException("NCOA job failed.", exception); HttpContext context2 = HttpContext.Current; ExceptionLogService.LogException(ex, context2); throw ex; } else { string msg; if (sparkDataConfig.NcoaSettings.CurrentReportStatus == "Complete") { using (RockContext rockContext = new RockContext()) { NcoaHistoryService ncoaHistoryService = new NcoaHistoryService(rockContext); msg = $"NCOA request processed, {ncoaHistoryService.Count()} {(ncoaHistoryService.Count() == 1 ? "address" : "addresses")} processed, {ncoaHistoryService.MovedCount()} {(ncoaHistoryService.MovedCount() > 1 ? "were" : "was")} marked as 'moved'"; } } else { msg = $"Job complete. NCOA status: {sparkDataConfig.NcoaSettings.CurrentReportStatus}"; } context.Result = msg; sparkDataConfig.Messages.Add($"{msg}: {RockDateTime.Now.ToString()}"); Ncoa.SaveSettings(sparkDataConfig); } } }
public void RunJobNow(string jobName, string groupName, JobDataMap jdm) { IScheduler sched = quartzInstance.GetQuartzScheduler(); sched.TriggerJob(new JobKey(jobName, groupName), jdm); }
public static async Task <IScheduler> Create(bool start = true) { var scheduler = await StdSchedulerFactory.GetDefaultScheduler(); { var jobData = new JobDataMap(); jobData.Put("DateFrom", DateTime.Now); jobData.Put("QuartzAssembly", File.ReadAllBytes(typeof(IScheduler).Assembly.Location)); var job = JobBuilder.Create <DummyJob>() .WithIdentity("Sales", "REPORTS") .WithDescription("Hello Job!") .UsingJobData(jobData) .StoreDurably() .Build(); var trigger = TriggerBuilder.Create() .WithIdentity("MorningSales") .StartNow() .WithCronSchedule("0 0 8 1/1 * ? *") .Build(); await scheduler.ScheduleJob(job, trigger); trigger = TriggerBuilder.Create() .WithIdentity("MonthlySales") .ForJob(job.Key) .StartNow() .WithCronSchedule("0 0 12 1 1/1 ? *") .Build(); await scheduler.ScheduleJob(trigger); await scheduler.PauseTrigger(trigger.Key); trigger = TriggerBuilder.Create() .WithIdentity("HourlySales") .ForJob(job.Key) .StartNow() .WithSimpleSchedule(x => x.WithIntervalInHours(1).RepeatForever()) .Build(); await scheduler.ScheduleJob(trigger); } { var job = JobBuilder.Create <DummyJob>().WithIdentity("Job1").StoreDurably().Build(); await scheduler.AddJob(job, false); job = JobBuilder.Create <DummyJob>().WithIdentity("Job2").StoreDurably().Build(); await scheduler.AddJob(job, false); job = JobBuilder.Create <DummyJob>().WithIdentity("Job3").StoreDurably().Build(); await scheduler.AddJob(job, false); job = JobBuilder.Create <DummyJob>().WithIdentity("Job4").StoreDurably().Build(); await scheduler.AddJob(job, false); job = JobBuilder.Create <DummyJob>().WithIdentity("Job5").StoreDurably().Build(); await scheduler.AddJob(job, false); job = JobBuilder.Create <DummyJob>().WithIdentity("Send SMS", "CRITICAL").StoreDurably().RequestRecovery().Build(); await scheduler.AddJob(job, false); var trigger = TriggerBuilder.Create() .WithIdentity("PushAds (US)") .ForJob(job.Key) .UsingJobData("Location", "US") .StartNow() .WithCronSchedule("0 0/5 * 1/1 * ? *") .Build(); await scheduler.ScheduleJob(trigger); trigger = TriggerBuilder.Create() .WithIdentity("PushAds (EU)") .ForJob(job.Key) .UsingJobData("Location", "EU") .StartNow() .WithCronSchedule("0 0/7 * 1/1 * ? *") .Build(); await scheduler.ScheduleJob(trigger); await scheduler.PauseTriggers(GroupMatcher <TriggerKey> .GroupEquals("LONGRUNNING")); job = JobBuilder.Create <DummyJob>().WithIdentity("Send Push", "CRITICAL").StoreDurably().RequestRecovery().Build(); await scheduler.AddJob(job, false); } { var job = JobBuilder.Create <DisallowConcurrentJob>() .WithIdentity("Load CSV", "IMPORT") .StoreDurably() .Build(); var trigger = TriggerBuilder.Create() .WithIdentity("CSV_small", "FREQUENTLY") .ForJob(job) .StartNow() .WithSimpleSchedule(x => x.WithIntervalInSeconds(5).RepeatForever()) .Build(); await scheduler.ScheduleJob(job, trigger); trigger = TriggerBuilder.Create() .WithIdentity("CSV_big", "LONGRUNNING") .ForJob(job) .StartNow() .WithDailyTimeIntervalSchedule(x => x.OnMondayThroughFriday()) .Build(); await scheduler.ScheduleJob(trigger); } if (start) { await scheduler.Start(); } return(scheduler); }
/// <summary> /// Executes the specified context. /// </summary> /// <param name="context">The context.</param> public virtual void Execute(IJobExecutionContext context) { JobDataMap dataMap = context.JobDetail.JobDataMap; var emailTemplateGuid = dataMap.GetString("SystemEmail").AsGuidOrNull(); var dataViewGuid = dataMap.GetString("DataView").AsGuidOrNull(); if (dataViewGuid != null && emailTemplateGuid.HasValue) { var rockContext = new RockContext(); var dataView = new DataViewService(rockContext).Get((Guid)dataViewGuid); List <IEntity> resultSet = null; var errorMessages = new List <string>(); var dataTimeout = dataMap.GetString("DatabaseTimeout").AsIntegerOrNull() ?? 180; try { var qry = dataView.GetQuery(null, rockContext, dataTimeout, out errorMessages); if (qry != null) { resultSet = qry.AsNoTracking().ToList(); } } catch (Exception exception) { ExceptionLogService.LogException(exception, HttpContext.Current); while (exception != null) { if (exception is SqlException && (exception as SqlException).Number == -2) { // if there was a SQL Server Timeout, have the warning be a friendly message about that. errorMessages.Add("This dataview did not complete in a timely manner. You can try again or adjust the timeout setting of this block."); exception = exception.InnerException; } else { errorMessages.Add(exception.Message); exception = exception.InnerException; } return; } } var recipients = new List <RockEmailMessageRecipient>(); if (resultSet.Any()) { foreach (Person person in resultSet) { if (!person.IsEmailActive || person.Email.IsNullOrWhiteSpace() || person.EmailPreference == EmailPreference.DoNotEmail) { continue; } var mergeFields = Lava.LavaHelper.GetCommonMergeFields(null); mergeFields.Add("Person", person); recipients.Add(new RockEmailMessageRecipient(person, mergeFields)); } } var emailMessage = new RockEmailMessage(emailTemplateGuid.Value); emailMessage.SetRecipients(recipients); var errors = new List <string>(); emailMessage.Send(out errors); context.Result = string.Format("{0} emails sent", recipients.Count()); if (errors.Any()) { StringBuilder sb = new StringBuilder(); sb.AppendLine(); sb.Append(string.Format("{0} Errors: ", errors.Count())); errors.ForEach(e => { sb.AppendLine(); sb.Append(e); }); string errorMessage = sb.ToString(); context.Result += errorMessage; var exception = new Exception(errorMessage); HttpContext context2 = HttpContext.Current; ExceptionLogService.LogException(exception, context2); throw exception; } } }
public async Task CreateOrUpdate(JobCreateOrUpdateRequest request) { request.Name = request.Name.Trim(); request.Group = request.Group.Trim(); JobKey key = new JobKey(request.Name, request.Group); if (await _scheduler.CheckExists(key)) { if (!request.IsUpdate) { throw new Exception("已存在相同名称的任务"); // 新增时,存在相同任务,不创建 } else { await _scheduler.DeleteJob(key); // 更新时,先删除,再创建 } } ; /******Data*****/ JobDataMap dataMap = new JobDataMap(); dataMap.Put(DataKeys.HttpMethod, request.HttpMethod); dataMap.Put(DataKeys.RequestUrl, request.RequestUrl); dataMap.Put(DataKeys.TriggerType, request.TriggerType); dataMap.Put(DataKeys.RepeatCount, request.RepeatCount); dataMap.Put(DataKeys.Interval, request.Interval); dataMap.Put(DataKeys.IntervalType, request.IntervalType); dataMap.Put(DataKeys.Cron, request.Cron); dataMap.Put(DataKeys.RequestBody, request.RequestBody); dataMap.Put(DataKeys.CreateTime, DateTime.Now.ToString()); dataMap.Put(DataKeys.StartTime, request.StartTime.ToString()); dataMap.Put(DataKeys.EndTime, request.EndTime.HasValue ? request.EndTime.Value.ToString() : string.Empty); /******Job*****/ IJobDetail job = JobBuilder.Create <HttpJob>() .StoreDurably(true) // 是否持久化, 无关联触发器时是否移除,false:移除 .RequestRecovery() // 重启后是否恢复任务 .WithDescription(request.Description ?? string.Empty) .WithIdentity(request.Name, request.Group) .UsingJobData(dataMap) .Build(); /******Trigger*****/ TriggerBuilder builder = TriggerBuilder.Create() .WithIdentity(request.Name, request.Group) .StartAt(request.StartTime.Value) .ForJob(job); if (request.EndTime.HasValue) { builder.EndAt(request.EndTime.Value); } if (request.TriggerType == (int)TriggerTypeEnum.Simple) { builder.WithSimpleSchedule(simple => { if (request.IntervalType == (int)IntervalTypeEnum.Second) { simple.WithIntervalInSeconds(request.Interval); } if (request.IntervalType == (int)IntervalTypeEnum.Minute) { simple.WithIntervalInMinutes(request.Interval); } if (request.IntervalType == (int)IntervalTypeEnum.Hour) { simple.WithIntervalInHours(request.Interval); } if (request.IntervalType == (int)IntervalTypeEnum.Day) { simple.WithIntervalInHours(request.Interval * 24); } if (request.RepeatCount > 0) { simple.WithRepeatCount(request.RepeatCount); } else { simple.RepeatForever(); } simple.WithMisfireHandlingInstructionFireNow(); // 如果延迟执行了 }); } else { builder.WithCronSchedule(request.Cron, cron => { cron.WithMisfireHandlingInstructionFireAndProceed(); }); } ITrigger trigger = builder.Build(); await _scheduler.ScheduleJob(job, trigger); // 加入调度,并持久化 FlushCache(); }
private static void Start(Task task, AppDomain domain) { //throw new SchedulerException("SchedulerException"); //在应用程序域中创建实例返回并保存在job中,这是最终调用任务执行的实例 TaskBase instance = domain.CreateInstanceFromAndUnwrap(AssemblyHelper.GetTaskAssemblyPath(task.AssemblyName), task.ClassName) as TaskBase; if (instance == null) { throw new InvalidCastException($"任务实例创建失败,请确认目标任务是否派生自TaskBase类型。程序集:{task.AssemblyName},类型:{task.ClassName}"); } // instance.logger = new LogWriter(); ; JobDataMap map = new JobDataMap { new KeyValuePair <string, object> ("domain", domain), new KeyValuePair <string, object> ("instance", instance), new KeyValuePair <string, object> ("name", task.Title), new KeyValuePair <string, object> ("params", task.CustomParamsJson) }; string jobName = task.Id.ToString().ToLower(); IJobDetail job = JobBuilder.Create(typeof(RootJob)).WithIdentity(jobName) .SetJobData(map) //.UsingJobData("assembly", task.AssemblyName) //.UsingJobData("class", task.ClassName) .Build(); //添加触发器 _scheduler.ListenerManager.AddJobListener(new JobRunListener(jobName), KeyMatcher <JobKey> .KeyEquals(new JobKey(jobName))); if (task.RunMoreTimes) { if (!CronExpression.IsValidExpression(task.CronExpression)) { throw new Exception("cron表达式验证失败"); } CronTriggerImpl trigger = new CronTriggerImpl { CronExpressionString = task.CronExpression, Name = task.Title, Key = new TriggerKey(task.Id.ToString()), Description = task.Remark }; if (task.StartDate.HasValue) { trigger.StartTimeUtc = TimeZoneInfo.ConvertTimeToUtc(task.StartDate.Value); } if (task.EndDate.HasValue) { trigger.EndTimeUtc = TimeZoneInfo.ConvertTimeToUtc(task.EndDate.Value); } _scheduler.ScheduleJob(job, trigger); } else { DateTimeOffset start = TimeZoneInfo.ConvertTimeToUtc(DateTime.Now); if (task.StartDate.HasValue) { start = TimeZoneInfo.ConvertTimeToUtc(task.StartDate.Value); } DateTimeOffset end = start.AddMinutes(1); if (task.EndDate.HasValue) { end = TimeZoneInfo.ConvertTimeToUtc(task.EndDate.Value); } ITrigger trigger = TriggerBuilder.Create() .WithIdentity(jobName) .StartAt(start) .WithSimpleSchedule(x => x .WithRepeatCount(1).WithIntervalInMinutes(1)) .EndAt(end) .Build(); _scheduler.ScheduleJob(job, trigger); } LogHelper.Info($"任务\"{task.Title}\"启动成功!", task.Id); System.Threading.Tasks.Task.Run(() => { while (true) { var log = instance.ReadLog(); if (log != null) { //System.Diagnostics.Debug.WriteLine("queue:" + log.Contents); LogManager.Queue.Write(log); } else { System.Threading.Thread.Sleep(3000); } } }); }
public void TestSetObjectPropsWrongNonPrimitiveType() { JobDataMap jobDataMap = new JobDataMap(); jobDataMap.Put("mapValue", 7.2f); try { factory.SetObjectProperties(new TestObject(), jobDataMap); Assert.Fail(); } catch (SchedulerException) { } }
/// <summary> /// Sets the object properties. /// </summary> /// <param name="obj">The object to set properties to.</param> /// <param name="data">The data to set.</param> public virtual void SetObjectProperties(object obj, JobDataMap data) { Type paramType = null; foreach (string name in data.Keys) { string c = CultureInfo.InvariantCulture.TextInfo.ToUpper(name.Substring(0, 1)); string propName = c + name.Substring(1); object o = data[name]; PropertyInfo prop = obj.GetType().GetProperty(propName); try { if (prop == null) { HandleError($"No property on Job class {obj.GetType()} for property '{name}'"); continue; } paramType = prop.PropertyType; if (o == null && (paramType.GetTypeInfo().IsPrimitive || paramType.GetTypeInfo().IsEnum)) { // cannot set null to these HandleError($"Cannot set null to property on Job class {obj.GetType()} for property '{name}'"); } if (paramType == typeof(char) && o is string && ((string) o).Length != 1) { // handle special case HandleError($"Cannot set empty string to char property on Job class {obj.GetType()} for property '{name}'"); } object goodValue = paramType == typeof (TimeSpan) ? ObjectUtils.GetTimeSpanValueForProperty(prop, o) : ConvertValueIfNecessary(paramType, o); prop.GetSetMethod().Invoke(obj, new object[] {goodValue}); } catch (FormatException nfe) { HandleError( $"The setter on Job class {obj.GetType()} for property '{name}' expects a {paramType} but was given {o}", nfe); } catch (MethodAccessException) { HandleError($"The setter on Job class {obj.GetType()} for property '{name}' expects a {paramType} but was given a {o.GetType()}"); } catch (ArgumentException e) { HandleError( $"The setter on Job class {obj.GetType()} for property '{name}' expects a {paramType} but was given {o.GetType()}", e); } catch (UnauthorizedAccessException e) { HandleError( $"The setter on Job class {obj.GetType()} for property '{name}' could not be accessed.", e); } catch (TargetInvocationException e) { HandleError( $"The setter on Job class {obj.GetType()} for property '{name}' could not be accessed.", e); } catch (Exception e) { HandleError( $"The setter on Job class {obj.GetType()} for property '{name}' threw exception when processing.", e); } } }
public void CronTrigger_AfterTriggerUpdate_Retains_Cron_Type() { //Arrange var cronTriggerImpl = new CronTriggerImpl("Trigger", "Trigger.Group", "JobName", "JobGroup", "0 15 23 * * ?"); cronTriggerImpl.CalendarName = "calName"; cronTriggerImpl.MisfireInstruction = 1; cronTriggerImpl.Description = "Description"; cronTriggerImpl.SetPreviousFireTimeUtc(new DateTimeOffset(new DateTime(2010,1,1))); cronTriggerImpl.SetNextFireTimeUtc(new DateTimeOffset(new DateTime(2010, 2, 1))); cronTriggerImpl.JobKey = new JobKey("JobKey","JobKeyGroup"); cronTriggerImpl.Priority = 1; var dbProvider = MockRepository.GenerateStub<IDbProvider>(); var dbCommand = MockRepository.GenerateStub<IDbCommand>(); var dataParameterCollection = MockRepository.GenerateStub<IDataParameterCollection>(); dbProvider.Stub(d => d.CreateCommand()).Return(dbCommand).Repeat.Any(); Func<StubDataParameter> dataParam = () => new StubDataParameter(); dbProvider.Stub(d => d.CreateParameter()).Do(dataParam); dbCommand.Stub(c => c.CreateParameter()).Do(dataParam); var dataParameterCollectionOutputs = new List<object>(); Func<object, int> dataParameterFunc = x => { dataParameterCollectionOutputs.Add(x); return 1; }; dataParameterCollection.Stub(d => d.Add(Arg<object>.Is.Anything)).Do(dataParameterFunc); dbCommand.Stub(c => c.Parameters).Return(dataParameterCollection); var metaData = MockRepository.GenerateStub<DbMetadata>(); dbProvider.Stub(d => d.Metadata).Return(metaData); Func<string, string> paramFunc = x => x; metaData.Stub(m => m.GetParameterName(Arg<string>.Is.Anything)).Do(paramFunc); DelegateInitializationArgs args = new DelegateInitializationArgs(); args.Logger = LogManager.GetLogger(GetType()); args.TablePrefix = "QRTZ_"; args.InstanceName = "TESTSCHED"; args.InstanceId = "INSTANCE"; args.DbProvider = dbProvider; args.TypeLoadHelper = new SimpleTypeLoadHelper(); var adoDelegate = new StdAdoDelegate(); adoDelegate.Initialize(args); var dbConnection = new StubConnection(); var conn = new ConnectionAndTransactionHolder(dbConnection, null); var jobDetail = MockRepository.GenerateMock<IJobDetail>(); var jobDataMap = new JobDataMap(); jobDataMap.ClearDirtyFlag(); cronTriggerImpl.JobDataMap = jobDataMap; //Act adoDelegate.UpdateTrigger(conn, cronTriggerImpl, "state", jobDetail); //Assert var resultDataParameters = dataParameterCollectionOutputs.Select(x => x as IDataParameter).Where(x => x.ParameterName == "triggerType").FirstOrDefault(); Assert.AreEqual("CRON",resultDataParameters.Value); }
protected virtual string GetRequiredParameter(JobDataMap data, string propertyName) { string value = data.GetString(propertyName); if (string.IsNullOrEmpty(value)) { throw new ArgumentException(propertyName + " not specified."); } return value; }
public void RunBatchJobs(IWindsorContainer container) { log.Info("----------------------------------Invincible's dividing line---------------------------------------"); log.Info("BatchJobs run start."); IList<BatchTrigger> tobeFiredTriggerList = this.batchTriggerMgrE.GetTobeFiredTrigger(); if (tobeFiredTriggerList != null && tobeFiredTriggerList.Count > 0) { foreach (BatchTrigger tobeFiredTrigger in tobeFiredTriggerList) { BatchJobDetail jobDetail = tobeFiredTrigger.BatchJobDetail; BatchRunLog runLog = new BatchRunLog(); try { #region Job运行前处理 log.Info("Start run job. JobId:" + jobDetail.Id + ", JobName:" + jobDetail.Name); runLog.BatchJobDetail = jobDetail; runLog.BatchTrigger = tobeFiredTrigger; runLog.StartTime = DateTime.Now; runLog.Status = "InProcess"; this.batchRunLogMgrE.CreateBatchRunLog(runLog); #endregion #region 运行Job JobDataMap dataMap = new JobDataMap(); #region Job参数获取 IList<BatchJobParameter> batchJobParameterList = this.batchJobParameterMgrE.GetBatchJobParameter(jobDetail.Id); if (batchJobParameterList != null && batchJobParameterList.Count > 0) { foreach (BatchJobParameter batchJobParameter in batchJobParameterList) { log.Debug("Set Job Parameter Name:" + batchJobParameter.ParameterName + ", Value:" + batchJobParameter.ParameterValue); dataMap.PutData(batchJobParameter.ParameterName, batchJobParameter.ParameterValue); } } #endregion #region Trigger参数获取 IList<BatchTriggerParameter> batchTriggerParameterList = this.batchTriggerParameterMgrE.GetBatchTriggerParameter(tobeFiredTrigger.Id); if (batchTriggerParameterList != null && batchTriggerParameterList.Count > 0) { foreach (BatchTriggerParameter batchTriggerParameter in batchTriggerParameterList) { log.Debug("Set Trigger Parameter Name:" + batchTriggerParameter.ParameterName + ", Value:" + batchTriggerParameter.ParameterValue); dataMap.PutData(batchTriggerParameter.ParameterName, batchTriggerParameter.ParameterValue); } } #endregion #region 初始化JobRunContext JobRunContext jobRunContext = new JobRunContext(tobeFiredTrigger, jobDetail, dataMap, container); #endregion #region 调用Job IJob job = container.Resolve<IJob>(jobDetail.ServiceName); log.Debug("Start run job: " + jobDetail.ServiceName); job.Execute(jobRunContext); #endregion #endregion #region Job运行后处理 log.Info("Job run successful. JobId:" + jobDetail.Id + ", JobName:" + jobDetail.Name); runLog.EndTime = DateTime.Now; runLog.Status = "Successful"; this.batchRunLogMgrE.UpdateBatchRunLog(runLog); #endregion } catch (Exception ex) { try { log.Error("Job run failure. JobId:" + jobDetail.Id + ", JobName:" + jobDetail.Name, ex); runLog.EndTime = DateTime.Now; runLog.Status = "Failure"; if (ex.Message != null && ex.Message.Length > 255) { runLog.Message = ex.Message.Substring(0, 255); } else { runLog.Message = ex.Message; } this.batchRunLogMgrE.UpdateBatchRunLog(runLog); } catch (Exception ex1) { log.Error("", ex1); } } finally { #region 更新BatchTrigger try { BatchTrigger oldTobeFiredTrigger = this.batchTriggerMgrE.LoadBatchTrigger(tobeFiredTrigger.Id); oldTobeFiredTrigger.TimesTriggered++; oldTobeFiredTrigger.PreviousFireTime = oldTobeFiredTrigger.NextFireTime; if (oldTobeFiredTrigger.RepeatCount != 0 && oldTobeFiredTrigger.TimesTriggered >= oldTobeFiredTrigger.RepeatCount) { //关闭Trigger log.Debug("Close Trigger:" + oldTobeFiredTrigger.Name); oldTobeFiredTrigger.Status = BusinessConstants.CODE_MASTER_STATUS_VALUE_CLOSE; oldTobeFiredTrigger.NextFireTime = null; } else { //设置下次运行时间 log.Debug("Set Trigger Next Start Time, Add:" + oldTobeFiredTrigger.Interval.ToString() + " " + oldTobeFiredTrigger.IntervalType); //if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_YEAR) //{ // oldTobeFiredTrigger.NextFireTime = dateTimeNow.AddYears(oldTobeFiredTrigger.Interval); //} //else if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_MONTH) //{ // oldTobeFiredTrigger.NextFireTime = dateTimeNow.AddMonths(oldTobeFiredTrigger.Interval); //} //else if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_DAY) //{ // oldTobeFiredTrigger.NextFireTime = dateTimeNow.AddDays(oldTobeFiredTrigger.Interval); //} //else if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_HOUR) //{ // oldTobeFiredTrigger.NextFireTime = dateTimeNow.AddHours(oldTobeFiredTrigger.Interval); //} //else if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_MINUTE) //{ // oldTobeFiredTrigger.NextFireTime = dateTimeNow.AddMinutes(oldTobeFiredTrigger.Interval); //} //else if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_SECOND) //{ // oldTobeFiredTrigger.NextFireTime = dateTimeNow.AddSeconds(oldTobeFiredTrigger.Interval); //} //else if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_MILLISECOND) //{ // oldTobeFiredTrigger.NextFireTime = dateTimeNow.AddMilliseconds(oldTobeFiredTrigger.Interval); //} DateTime dateTimeNow = DateTime.Now; if (!oldTobeFiredTrigger.NextFireTime.HasValue) { oldTobeFiredTrigger.NextFireTime = dateTimeNow; } while (oldTobeFiredTrigger.NextFireTime.Value <= dateTimeNow) { if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_YEAR) { oldTobeFiredTrigger.NextFireTime = oldTobeFiredTrigger.NextFireTime.Value.AddYears(oldTobeFiredTrigger.Interval); } else if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_MONTH) { oldTobeFiredTrigger.NextFireTime = oldTobeFiredTrigger.NextFireTime.Value.AddMonths(oldTobeFiredTrigger.Interval); } else if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_DAY) { oldTobeFiredTrigger.NextFireTime = oldTobeFiredTrigger.NextFireTime.Value.AddDays(oldTobeFiredTrigger.Interval); } else if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_HOUR) { oldTobeFiredTrigger.NextFireTime = oldTobeFiredTrigger.NextFireTime.Value.AddHours(oldTobeFiredTrigger.Interval); } else if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_MINUTE) { oldTobeFiredTrigger.NextFireTime = oldTobeFiredTrigger.NextFireTime.Value.AddMinutes(oldTobeFiredTrigger.Interval); } else if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_SECOND) { oldTobeFiredTrigger.NextFireTime = oldTobeFiredTrigger.NextFireTime.Value.AddSeconds(oldTobeFiredTrigger.Interval); } else if (oldTobeFiredTrigger.IntervalType == BusinessConstants.DATETIME_TYPE_MILLISECOND) { oldTobeFiredTrigger.NextFireTime = oldTobeFiredTrigger.NextFireTime.Value.AddMilliseconds(oldTobeFiredTrigger.Interval); } else { throw new ArgumentException("invalid Interval Type:" + oldTobeFiredTrigger.IntervalType); } } log.Debug("Trigger Next Start Time is set as:" + oldTobeFiredTrigger.NextFireTime.Value.ToString("yyyy-MM-dd HH:mm:ss")); } this.batchTriggerMgrE.UpdateBatchTrigger(oldTobeFiredTrigger); } catch (Exception ex) { log.Error("Error occur when update batch trigger.", ex); } #endregion } } } else { log.Info("No job found may run in this batch."); } log.Info("BatchJobs run end."); }
/// <summary> /// Called by the scheduler at the time of the trigger firing, in order to /// produce a <see cref="IJob" /> instance on which to call Execute. /// </summary> /// <remarks> /// <p> /// It should be extremely rare for this method to throw an exception - /// basically only the the case where there is no way at all to instantiate /// and prepare the Job for execution. When the exception is thrown, the /// Scheduler will move all triggers associated with the Job into the /// <see cref="TriggerState.Error" /> state, which will require human /// intervention (e.g. an application restart after fixing whatever /// configuration problem led to the issue wih instantiating the Job. /// </p> /// </remarks> /// <param name="bundle">The TriggerFiredBundle from which the <see cref="IJobDetail" /> /// and other info relating to the trigger firing can be obtained.</param> /// <param name="scheduler"></param> /// <returns>the newly instantiated Job</returns> /// <throws> SchedulerException if there is a problem instantiating the Job. </throws> public override IJob NewJob(TriggerFiredBundle bundle, IScheduler scheduler) { IJob job = base.NewJob(bundle, scheduler); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.PutAll(scheduler.Context); jobDataMap.PutAll(bundle.JobDetail.JobDataMap); jobDataMap.PutAll(bundle.Trigger.JobDataMap); SetObjectProperties(job, jobDataMap); return job; }
private static async Task Start(IHosSchedule schedule) { JobDataMap map = new JobDataMap { new KeyValuePair <string, object> ("instance", schedule), }; string jobKey = schedule.Main.Id.ToString(); try { IJobDetail job = JobBuilder.Create().OfType(schedule.GetQuartzJobType()).WithIdentity(jobKey).UsingJobData(map).Build(); //添加监听器 var listener = new JobRunListener(jobKey); listener.OnSuccess += StartedEvent; _scheduler.ListenerManager.AddJobListener(listener, KeyMatcher <JobKey> .KeyEquals(new JobKey(jobKey))); ITrigger trigger = GetTrigger(schedule.Main); await _scheduler.ScheduleJob(job, trigger, schedule.CancellationTokenSource.Token); using (var scope = new Core.ScopeDbContext()) { var db = scope.GetDbContext(); var task = db.Schedules.FirstOrDefault(x => x.Id == schedule.Main.Id); if (task != null) { task.NextRunTime = TimeZoneInfo.ConvertTimeFromUtc(trigger.GetNextFireTimeUtc().Value.UtcDateTime, TimeZoneInfo.Local); await db.SaveChangesAsync(); } } } catch (Exception ex) { throw new SchedulerException(ex); } LogHelper.Info($"任务[{schedule.Main.Title}]启动成功!", schedule.Main.Id); _ = Task.Run(() => { while (true) { if (schedule.RunnableInstance == null) { break; } var log = schedule.RunnableInstance.ReadLog(); if (log != null) { LogManager.Queue.Write(new SystemLogEntity { Category = log.Category, Message = log.Message, ScheduleId = log.ScheduleId, Node = log.Node, StackTrace = log.StackTrace, TraceId = log.TraceId, CreateTime = log.CreateTime }); } else { Thread.Sleep(3000); } } }); }
public void Execute(IJobExecutionContext context) { JobDataMap dataMap = context.JobDetail.JobDataMap; var rockContext = new RockContext(); var jobService = new ServiceJobService(rockContext); GroupService groupService = new GroupService(rockContext); CategoryService categoryService = new CategoryService(rockContext); MetricService metricService = new MetricService(rockContext); var metricCategories = MetricCategoriesFieldAttribute.GetValueAsGuidPairs(dataMap.GetString("Metrics")); DateRange dateRange = SlidingDateRangePicker.CalculateDateRangeFromDelimitedValues(dataMap.GetString("DateRange")); var systemEmail = dataMap.GetString("Email").AsGuidOrNull(); if (!systemEmail.HasValue) { throw new Exception("System Email is required!"); } // get job type id int jobId = Convert.ToInt16(context.JobDetail.Description); var job = jobService.Get(jobId); DateTime _midnightToday = RockDateTime.Today.AddDays(1); var currentDateTime = RockDateTime.Now; int recipients = 0; Group notificationGroup = groupService.Get(dataMap.GetString("NotificationGroup").AsGuid()); List <MetricCount> metricCounts = CampusCache.All().Select(c => new MetricCount() { Campus = c, TotalEntered = 0, TotalMetrics = 0 }).ToList(); List <Metric> metrics = new List <Metric>(); // If we have some reasonable data, go ahead and run the job if (notificationGroup != null && metricCategories.Count > 0 && dateRange.Start.HasValue && dateRange.End.HasValue) { foreach (MetricCategoryPair metricCategoryPair in metricCategories) { Metric metric = metricService.Get(metricCategoryPair.MetricGuid); metrics.Add(metric); // Split this by campus partition if (metric.MetricPartitions.Any(mp => mp.EntityType.Name.Contains("Campus"))) { foreach (CampusCache campus in CampusCache.All()) { // Check to see if we also have a schedule partition if (metric.MetricPartitions.Any(mp => mp.EntityType.Name.Contains("Schedule"))) { var services = GetServices(campus, dataMap, dateRange); metricCounts.Where(mc => mc.Campus == campus).FirstOrDefault().TotalMetrics += services.Count; foreach (var service in services) { var hasValues = metric.MetricValues.Where(mv => mv.MetricValuePartitions.Any(mvp => mvp.MetricPartition.EntityType.Name.Contains("Campus") && mvp.EntityId == campus.Id) && mv.MetricValuePartitions.Any(mvp => mvp.MetricPartition.EntityType.Name.Contains("Schedule") && mvp.EntityId == service.Id) && mv.MetricValueDateTime >= dateRange.Start.Value && mv.MetricValueDateTime <= dateRange.End.Value).Any(); if (hasValues) { metricCounts.Where(mc => mc.Campus == campus).FirstOrDefault().TotalEntered++; } } } else { // Add totals for metrics and, if values are entered, metrics entered. metricCounts.Where(mc => mc.Campus == campus).FirstOrDefault().TotalMetrics++; var hasValues = metric.MetricValues.Where(mv => mv.MetricValuePartitions.Any(mvp => mvp.MetricPartition.EntityType.Name.Contains("Campus") && mvp.EntityId == campus.Id) && mv.MetricValueDateTime >= dateRange.Start.Value && mv.MetricValueDateTime <= dateRange.End.Value).Any(); if (hasValues) { metricCounts.Where(mc => mc.Campus == campus).FirstOrDefault().TotalEntered++; } } } } } // Create the merge fields var mergeFields = Rock.Lava.LavaHelper.GetCommonMergeFields(null); mergeFields.Add("MetricCounts", metricCounts); mergeFields.Add("Metrics", metrics); mergeFields.Add("DateRange", dateRange.ToString()); mergeFields.Add("LastRunDate", job.LastSuccessfulRunDateTime); // Setup the email and send it out! RockEmailMessage message = new RockEmailMessage(systemEmail.Value); message.AdditionalMergeFields = mergeFields; foreach (GroupMember member in notificationGroup.Members) { message.AddRecipient(member.Person.Email); recipients++; } message.SendSeperatelyToEachRecipient = true; message.Send(); } context.Result = string.Format("Sent " + recipients + " metric entry digest emails."); }
public void CanScheduleJobWithData() { JobDataMap dataMap = new JobDataMap(); dataMap.Add("itemId", Guid.NewGuid()); dataMap.Add("date", DateTime.UtcNow); dataMap.Add("int", 6); dataMap.Add("string", "this is a string"); IJobDetail job = JobBuilder.Create<HelloWorldJob>() .WithIdentity("HelloWorld", "HelloWorldGroup") .RequestRecovery(true) .SetJobData(dataMap) .WithDescription("Job that says 'Hello World!'") .Build(); ITrigger trigger = TriggerBuilder.Create() .WithIdentity("HelloWorldTrigger", "HelloWorldGroup") .WithDescription("A test trigger") .WithSimpleSchedule(x => x.WithIntervalInMinutes(5).WithRepeatCount(10)) .StartNow() .Build(); scheduler.ScheduleJob(job, trigger); }
/// <summary> /// Sets the object properties. /// </summary> /// <param name="obj">The object to set properties to.</param> /// <param name="data">The data to set.</param> public virtual void SetObjectProperties(object obj, JobDataMap data) { Type paramType = null; foreach (string name in data.Keys) { string c = name.Substring(0, 1).ToUpper(CultureInfo.InvariantCulture); string propName = c + name.Substring(1); object o = data[name]; PropertyInfo prop = obj.GetType().GetProperty(propName); try { if (prop == null) { HandleError(string.Format(CultureInfo.InvariantCulture, "No property on Job class {0} for property '{1}'", obj.GetType(), name)); continue; } paramType = prop.PropertyType; if (o == null && (paramType.IsPrimitive || paramType.IsEnum)) { // cannot set null to these HandleError(string.Format(CultureInfo.InvariantCulture, "Cannot set null to property on Job class {0} for property '{1}'", obj.GetType(), name)); } if (paramType == typeof(char) && o!= null && o is string && ((string) o).Length != 1) { // handle special case HandleError(string.Format(CultureInfo.InvariantCulture, "Cannot set empty string to char property on Job class {0} for property '{1}'", obj.GetType(), name)); } object goodValue = ObjectUtils.ConvertValueIfNecessary(paramType, o); prop.GetSetMethod().Invoke(obj, new object[] {goodValue}); } catch (FormatException nfe) { HandleError( string.Format(CultureInfo.InvariantCulture, "The setter on Job class {0} for property '{1}' expects a {2} but was given {3}", obj.GetType(), name, paramType, o), nfe); continue; } catch (MethodAccessException) { HandleError(string.Format(CultureInfo.InvariantCulture, "The setter on Job class {0} for property '{1}' expects a {2} but was given a {3}", obj.GetType(), name, paramType, o.GetType())); continue; } catch (ArgumentException e) { HandleError( string.Format(CultureInfo.InvariantCulture, "The setter on Job class {0} for property '{1}' expects a {2} but was given {3}", obj.GetType(), name, paramType, o.GetType()), e); continue; } catch (UnauthorizedAccessException e) { HandleError( string.Format(CultureInfo.InvariantCulture, "The setter on Job class {0} for property '{1}' could not be accessed.", obj.GetType(), name), e); continue; } catch (TargetInvocationException e) { HandleError( string.Format(CultureInfo.InvariantCulture, "The setter on Job class {0} for property '{1}' could not be accessed.", obj.GetType(), name), e); continue; } } }
/// <summary> /// Calls the equivalent method on the 'proxied' <see cref="QuartzScheduler" />. /// </summary> public virtual void TriggerJob(JobKey jobKey, JobDataMap data) { CallInGuard(x => x.TriggerJob(jobKey, data)); }
/// <summary> /// Create a JobExcecutionContext with the given context data. /// </summary> public JobExecutionContextImpl(IScheduler scheduler, TriggerFiredBundle firedBundle, IJob job) { this.scheduler = scheduler; trigger = firedBundle.Trigger; calendar = firedBundle.Calendar; jobDetail = firedBundle.JobDetail; this.job = job; recovering = firedBundle.Recovering; fireTimeUtc = firedBundle.FireTimeUtc; scheduledFireTimeUtc = firedBundle.ScheduledFireTimeUtc; prevFireTimeUtc = firedBundle.PrevFireTimeUtc; nextFireTimeUtc = firedBundle.NextFireTimeUtc; jobDataMap = new JobDataMap(); jobDataMap.PutAll(jobDetail.JobDataMap); jobDataMap.PutAll(trigger.JobDataMap); }
/// <summary> /// Job that will run quick SQL queries on a schedule. /// /// Called by the <see cref="IScheduler" /> when a /// <see cref="ITrigger" /> fires that is associated with /// the <see cref="IJob" />. /// </summary> public virtual void Execute(IJobExecutionContext context) { JobDataMap dataMap = context.JobDetail.JobDataMap; // get job parms bool runIntegrityCheck = dataMap.GetBoolean("RunIntegrityCheck"); bool runIndexRebuild = dataMap.GetBoolean("RunIndexRebuild"); bool runStatisticsUpdate = dataMap.GetBoolean("RunStatisticsUpdate"); int commandTimeout = dataMap.GetString("CommandTimeout").AsInteger(); int minimumIndexPageCount = dataMap.GetString("MinimumIndexPageCount").AsInteger(); int minimunFragmentationPercentage = dataMap.GetString("MinimumFragmentationPercentage").AsInteger(); int rebuildThresholdPercentage = dataMap.GetString("RebuildThresholdPercentage").AsInteger(); bool useONLINEIndexRebuild = dataMap.GetString("UseONLINEIndexRebuild").AsBoolean(); string alertEmail = dataMap.GetString("AlertEmail"); StringBuilder resultsMessage = new StringBuilder(); Stopwatch stopwatch; bool errorsFound = false; // run integrity check if (runIntegrityCheck) { string databaseName = new RockContext().Database.Connection.Database; string integrityQuery = $"DBCC CHECKDB('{ databaseName }',NOINDEX) WITH PHYSICAL_ONLY, NO_INFOMSGS"; stopwatch = Stopwatch.StartNew(); int errors = DbService.ExecuteCommand(integrityQuery, System.Data.CommandType.Text, null, commandTimeout); stopwatch.Stop(); resultsMessage.Append($"Integrity Check took {(stopwatch.ElapsedMilliseconds / 1000)}s"); if (errors > 0) { // oh no... errorsFound = true; string errorMessage = $"Some errors were reported when running a database integrity check on your Rock database. We'd recommend running the command below under 'Admin Tools > Power Tools > SQL Command' to get further details. <p>DBCC CHECKDB ('{ databaseName }') WITH NO_INFOMSGS, ALL_ERRORMSGS</p>"; resultsMessage.Append(errorMessage); if (alertEmail.IsNotNullOrWhiteSpace()) { var globalAttributes = GlobalAttributesCache.Get(); string emailHeader = globalAttributes.GetValue("EmailHeader"); string emailFooter = globalAttributes.GetValue("EmailFooter"); string messageBody = $"{emailHeader} {errorMessage} <p><small>This message was generated from the Rock Database Maintenance Job</small></p>{emailFooter}"; var emailMessage = new RockEmailMessage(); emailMessage.SetRecipients(alertEmail.Split(',').ToList()); emailMessage.Subject = "Rock: Database Integrity Check Error"; emailMessage.Message = messageBody; emailMessage.Send(); } } } if (!errorsFound) { // rebuild fragmented indexes if (runIndexRebuild) { Dictionary <string, object> parms = new Dictionary <string, object>(); parms.Add("@PageCountLimit", minimumIndexPageCount); parms.Add("@MinFragmentation", minimunFragmentationPercentage); parms.Add("@MinFragmentationRebuild", rebuildThresholdPercentage); parms.Add("@UseONLINEIndexRebuild", useONLINEIndexRebuild); stopwatch = Stopwatch.StartNew(); DbService.ExecuteCommand("spDbaRebuildIndexes", System.Data.CommandType.StoredProcedure, parms, commandTimeout); stopwatch.Stop(); resultsMessage.Append($", Index Rebuild took {(stopwatch.ElapsedMilliseconds / 1000)}s"); } // update statistics if (runStatisticsUpdate) { // derived from http://www.sqlservercentral.com/scripts/Indexing/31823/ // NOTE: Can't use sp_MSForEachtable because it isn't supported on AZURE (and it is undocumented) // NOTE: Can't use sp_updatestats because it requires membership in the sysadmin fixed server role, or ownership of the database (dbo) string statisticsQuery = @" DECLARE updatestats CURSOR FOR SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'BASE TABLE' ORDER BY TABLE_NAME OPEN updatestats DECLARE @tablename NVARCHAR(max) DECLARE @Statement NVARCHAR(max) FETCH NEXT FROM updatestats INTO @tablename WHILE (@@FETCH_STATUS = 0) BEGIN PRINT N'UPDATING STATISTICS [' + @tablename + ']' SET @Statement = 'UPDATE STATISTICS [' + @tablename + ']' EXEC sp_executesql @Statement FETCH NEXT FROM updatestats INTO @tablename END CLOSE updatestats DEALLOCATE updatestats "; stopwatch = Stopwatch.StartNew(); DbService.ExecuteCommand(statisticsQuery, System.Data.CommandType.Text, null, commandTimeout); stopwatch.Stop(); resultsMessage.Append($", Statistics Update took {(stopwatch.ElapsedMilliseconds / 1000)}s"); } } context.Result = resultsMessage.ToString().TrimStart(','); }
protected override object GetTargetObject() { JobDataMap jobDataMap = new JobDataMap(); jobDataMap["A"] = "B"; CalendarIntervalTriggerImpl t = new CalendarIntervalTriggerImpl(); t.Name = "test"; t.Group = "testGroup"; t.CalendarName = "MyCalendar"; t.Description = "CronTriggerDesc"; t.JobDataMap = jobDataMap; t.RepeatInterval = 5; t.RepeatIntervalUnit = IntervalUnit.Day; return t; }
public void Execute(IJobExecutionContext context) { int storyCount = 0; int newStoryCount = 0; JobDataMap dataMap = context.JobDetail.JobDataMap; List <Story> stories = new List <Story>(); RockContext rockContext = new RockContext(); ContentChannelService contentChannelService = new ContentChannelService(rockContext); ContentChannelItemService contentChannelItemService = new ContentChannelItemService(rockContext); BinaryFileService binaryFileService = new BinaryFileService(rockContext); BinaryFileType binaryFileType = new BinaryFileTypeService(rockContext).Get(Rock.SystemGuid.BinaryFiletype.MEDIA_FILE.AsGuid()); var storiesSeriesChannel = contentChannelService.Get(dataMap.GetString("StoriesContentChannel").AsGuid()); var dbCon = DBConnection.Instance(); dbCon.DatabaseName = "secccp_main"; if (dbCon.IsConnect()) { stories = GetStories(dbCon); foreach (var story in stories) { storyCount++; var item = contentChannelItemService.Queryable().Where(i => i.ForeignId == story.id && i.ContentChannelId == storiesSeriesChannel.Id).FirstOrDefault(); if (item == null) { newStoryCount++; item = new ContentChannelItem() { ContentChannelId = storiesSeriesChannel.Id, ForeignId = story.id, ContentChannelTypeId = storiesSeriesChannel.ContentChannelTypeId }; contentChannelItemService.Add(item); } item.Title = story.title; item.Content = story.description; item.StartDateTime = Helpers.FromUnixTime(story.datecreated); rockContext.SaveChanges(); item.LoadAttributes(); item.SetAttributeValue("Slug", story.slug); item.SetAttributeValue("VimeoId", story.vimeo_id); item.SetAttributeValue("VimeoStreamingUrl", story.vimeo_live_url); item.SetAttributeValue("VimeoDownloadUrl", story.vimeo_sd_url); item.SetAttributeValue("Tags", story.tags); item.SetAttributeValue("Duration", story.duration); if (string.IsNullOrWhiteSpace(item.GetAttributeValue("Image"))) { WebClient client = new WebClient(); try { using (MemoryStream stream = new MemoryStream(client.DownloadData(string.Format("http://panel.secc.org/upload/stories/cover-images/story-{0}.jpg", story.id)))) { BinaryFile binaryFile = new BinaryFile(); binaryFileService.Add(binaryFile); binaryFile.IsTemporary = false; binaryFile.BinaryFileTypeId = binaryFileType.Id; binaryFile.MimeType = "image/jpg"; binaryFile.FileName = string.Format("Story-{0}.jpg", story.id); binaryFile.ContentStream = stream; rockContext.SaveChanges(); item.SetAttributeValue("Image", binaryFile.Guid.ToString()); } } catch (Exception ex) { var a = ex; } } item.SaveAttributeValues(); } } context.Result = string.Format("Synced {0} sermons ({1} New Sermon)", storyCount, newStoryCount); }
/// <summary> /// Calls the equivalent method on the 'proxied' <see cref="QuartzScheduler" />, /// passing the <see cref="SchedulingContext" /> associated with this /// instance. /// </summary> public virtual void TriggerJobWithVolatileTrigger(string jobName, string groupName, JobDataMap data) { try { GetRemoteScheduler().TriggerJobWithVolatileTrigger(schedCtxt, jobName, groupName, data); } catch (RemotingException re) { throw InvalidateHandleCreateException("Error communicating with remote scheduler.", re); } }
/// <summary> /// Schedule specified trigger /// </summary> /// <param name="myTrigger"></param> public Guid ScheduleTrigger(BaseTrigger myTrigger) { Guid triggerId = Guid.Empty; // Set default values DateTimeOffset startAt = (DateTime.MinValue != myTrigger.StartDateTime) ? myTrigger.StartDateTime : DateTime.Now; // Set default trigger group myTrigger.Group = (!string.IsNullOrEmpty(myTrigger.Group)) ? myTrigger.Group : TriggerKey.DefaultGroup; // Check if jobDetail already exists var jobKey = new JobKey(myTrigger.JobName, myTrigger.JobGroup); // If jobDetail does not exist, throw if (!_scheduler.CheckExists(jobKey)) { throw new ArgumentException(string.Format("Job does not exist. Name = {0}, Group = {1}", myTrigger.CalendarName, myTrigger.JobGroup)); } IJobDetail jobDetail = _scheduler.GetJobDetail(jobKey); var jobDataMap = new JobDataMap(); if (myTrigger.JobDataMap != null && myTrigger.JobDataMap.Count > 0) { foreach (var jobData in myTrigger.JobDataMap) { jobDataMap.Add(jobData.Key, jobData.Value); } } var cronTrigger = myTrigger as CronTrigger; if (cronTrigger != null) { //SmartPolicy Action <CronScheduleBuilder> misFireAction = x => {}; switch (cronTrigger.MisfireInstruction) { case MisfireInstructionCron.DoNothing: misFireAction = builder => builder.WithMisfireHandlingInstructionDoNothing(); break; case MisfireInstructionCron.FireOnceNow: misFireAction = builder => builder.WithMisfireHandlingInstructionFireAndProceed(); break; case MisfireInstructionCron.Ignore: misFireAction = builder => builder.WithMisfireHandlingInstructionIgnoreMisfires(); break; } var trigger = (ICronTrigger)TriggerBuilder.Create() .WithIdentity(myTrigger.Name, myTrigger.Group) .ForJob(jobDetail) .UsingJobData(jobDataMap) .ModifiedByCalendar(!string.IsNullOrEmpty(cronTrigger.CalendarName) ? cronTrigger.CalendarName : null) .WithCronSchedule(cronTrigger.CronExpression, misFireAction) .StartAt(startAt) .Build(); trigger.TimeZone = TimeZoneInfo.Local; using (var tran = new TransactionScope()) { triggerId = _persistenceStore.UpsertTriggerKeyIdMap(myTrigger.Name, myTrigger.Group); _scheduler.ScheduleJob(trigger); tran.Complete(); } } var simpleTrigger = myTrigger as SimpleTrigger; if (simpleTrigger != null) { //SmartPolicy Action <SimpleScheduleBuilder> misFireAction = x => x.WithInterval(simpleTrigger.RepeatInterval) .WithRepeatCount(simpleTrigger.RepeatCount); switch (simpleTrigger.MisfireInstruction) { case MisfireInstructionSimple.FireNow: misFireAction = builder => builder.WithInterval(simpleTrigger.RepeatInterval) .WithRepeatCount(simpleTrigger.RepeatCount).WithMisfireHandlingInstructionFireNow(); break; case MisfireInstructionSimple.Ignore: misFireAction = builder => builder.WithInterval(simpleTrigger.RepeatInterval) .WithRepeatCount(simpleTrigger.RepeatCount).WithMisfireHandlingInstructionIgnoreMisfires(); break; case MisfireInstructionSimple.RescheduleNextWithExistingCount: misFireAction = builder => builder.WithInterval(simpleTrigger.RepeatInterval) .WithRepeatCount(simpleTrigger.RepeatCount).WithMisfireHandlingInstructionNextWithExistingCount(); break; case MisfireInstructionSimple.RescheduleNextWithRemainingCount: misFireAction = builder => builder.WithInterval(simpleTrigger.RepeatInterval) .WithRepeatCount(simpleTrigger.RepeatCount).WithMisfireHandlingInstructionNextWithRemainingCount(); break; case MisfireInstructionSimple.RescheduleNowWithExistingRepeatCount: misFireAction = builder => builder.WithInterval(simpleTrigger.RepeatInterval) .WithRepeatCount(simpleTrigger.RepeatCount).WithMisfireHandlingInstructionNowWithExistingCount(); break; case MisfireInstructionSimple.RescheduleNowWithRemainingRepeatCount: misFireAction = builder => builder.WithInterval(simpleTrigger.RepeatInterval) .WithRepeatCount(simpleTrigger.RepeatCount).WithMisfireHandlingInstructionNowWithRemainingCount(); break; } var trigger = (ISimpleTrigger)TriggerBuilder.Create() .WithIdentity(myTrigger.Name, myTrigger.Group) .ForJob(jobDetail) .UsingJobData(jobDataMap) .ModifiedByCalendar(!string.IsNullOrEmpty(simpleTrigger.CalendarName) ? simpleTrigger.CalendarName : null) .StartAt(startAt) .WithSimpleSchedule(misFireAction) .Build(); using (var tran = new TransactionScope()) { triggerId = _persistenceStore.UpsertTriggerKeyIdMap(myTrigger.Name, myTrigger.Group); _scheduler.ScheduleJob(trigger); tran.Complete(); } } return(triggerId); }
protected override object GetTargetObject() { var jobDataMap = new JobDataMap(); jobDataMap["A"] = "B"; var t = new CalendarIntervalTriggerImpl { Name = "test", Group = "testGroup", CalendarName = "MyCalendar", Description = "CronTriggerDesc", JobDataMap = jobDataMap, RepeatInterval = 5, RepeatIntervalUnit = IntervalUnit.Day }; return t; }
public async Task Execute(IJobExecutionContext context) { Debug.WriteLine(string.Format("'{0}' tick: {1}", context.JobDetail.Key.Name.ToString().Substring(0, 5), DateTime.Now)); // Notify the UI that the job is now running. BackupInfoViewModel.SetBackupItemStatus(context.JobDetail.Key.Name.ToString(), (int)StatusCodes.RUNNING); JobDataMap dataMap = context.JobDetail.JobDataMap; string originPath = dataMap.GetString("originPath"); string backupPath = dataMap.GetString("backupPath"); if (originPath.EndsWith(@"\")) { // Copy directory try { Debug.WriteLine(string.Format("{0} Copying directory: '{1}' to '{2}'", DateTime.Now, originPath, backupPath)); RoboCommand roboCopy = new RoboCommand(); // Copy options roboCopy.CopyOptions.Source = originPath; roboCopy.CopyOptions.Destination = Path.Combine(backupPath, Path.GetFileName(Path.GetDirectoryName(originPath))); roboCopy.CopyOptions.CopySubdirectories = true; roboCopy.CopyOptions.UseUnbufferedIo = true; roboCopy.CopyOptions.Mirror = true; // Selection options roboCopy.SelectionOptions.IncludeSame = false; roboCopy.SelectionOptions.IncludeTweaked = true; roboCopy.SelectionOptions.ExcludeOlder = true; // Retry options roboCopy.RetryOptions.RetryCount = 3; roboCopy.RetryOptions.RetryWaitTime = 5; // Start and wait for the robocopy to finish. await roboCopy.Start(); } catch (Exception e) { Debug.WriteLine("Error in BackupJob.Execute() folder " + e.Message); // Notify the UI that the job has errored. BackupInfoViewModel.SetBackupItemStatus(context.JobDetail.Key.Name.ToString(), (int)StatusCodes.ERROR); } } else { // Copy file try { Debug.WriteLine(string.Format("{0} Copying file: '{1}' to '{2}'", DateTime.Now, originPath, backupPath)); RoboCommand roboCopy = new RoboCommand(); //Debug.WriteLine(Path.GetDirectoryName(originPath)); //Debug.WriteLine(backupPath); //Debug.WriteLine(Path.GetFileName(originPath)); // Copy options roboCopy.CopyOptions.Source = Path.GetDirectoryName(originPath); roboCopy.CopyOptions.Destination = backupPath; roboCopy.CopyOptions.FileFilter = new string[] { Path.GetFileName(originPath) }; // Selection options roboCopy.SelectionOptions.IncludeSame = false; roboCopy.SelectionOptions.IncludeTweaked = true; roboCopy.SelectionOptions.ExcludeOlder = true; // Retry options roboCopy.RetryOptions.RetryCount = 3; roboCopy.RetryOptions.RetryWaitTime = 5; // Start and wait for the robocopy to finish. await roboCopy.Start(); } catch (Exception e) { Debug.WriteLine("Error in BackupJob.Execute() file" + e.Message); // Notify the UI that the job has errored. BackupInfoViewModel.SetBackupItemStatus(context.JobDetail.Key.Name.ToString(), (int)StatusCodes.ERROR); } } Debug.WriteLine(string.Format("Copy job '{0}' completed at: {1}", context.JobDetail.Key.Name.ToString().Substring(0, 5), DateTime.Now)); // If the backup was successful, then update information for the item and save the BackupItem collection config to file. BackupInfoViewModel.NotifyItemHasBeenBackedUp(context.JobDetail.Key.Name); BackupInfoViewModel.UpdateNextBackupDate(context.JobDetail.Key.Name); BackupInfoViewModel.SaveConfig(); }
/// <summary> /// Trigger the identified <see cref="IJob" /> (Execute it now) - with a non-volatile trigger. /// </summary> public virtual void TriggerJob(JobKey jobKey, JobDataMap data) { ValidateState(); // TODO: use builder IOperableTrigger trig = new SimpleTriggerImpl( NewTriggerId(), SchedulerConstants.DefaultGroup, jobKey.Name, jobKey.Group, SystemTime.UtcNow(), null, 0, TimeSpan.Zero); trig.ComputeFirstFireTimeUtc(null); if (data != null) { trig.JobDataMap = data; } bool collision = true; while (collision) { try { resources.JobStore.StoreTrigger(trig, false); collision = false; } catch (ObjectAlreadyExistsException) { trig.Key = new TriggerKey(NewTriggerId(), SchedulerConstants.DefaultGroup); } } NotifySchedulerThread(trig.GetNextFireTimeUtc()); NotifySchedulerListenersScheduled(trig); }
/// <summary> /// Sets the object properties. /// </summary> /// <param name="obj">The object to set properties to.</param> /// <param name="data">The data to set.</param> public virtual void SetObjectProperties(object obj, JobDataMap data) { Type paramType = null; foreach (string name in data.Keys) { string c = name.Substring(0, 1).ToUpper(CultureInfo.InvariantCulture); string propName = c + name.Substring(1); object o = data[name]; PropertyInfo prop = obj.GetType().GetProperty(propName); try { if (prop == null) { HandleError(string.Format(CultureInfo.InvariantCulture, "No property on Job class {0} for property '{1}'", obj.GetType(), name)); continue; } paramType = prop.PropertyType; if (o == null && (paramType.IsPrimitive || paramType.IsEnum)) { // cannot set null to these HandleError(string.Format(CultureInfo.InvariantCulture, "Cannot set null to property on Job class {0} for property '{1}'", obj.GetType(), name)); } if (paramType == typeof(char) && o is string && ((string)o).Length != 1) { // handle special case HandleError(string.Format(CultureInfo.InvariantCulture, "Cannot set empty string to char property on Job class {0} for property '{1}'", obj.GetType(), name)); } object goodValue = paramType == typeof(TimeSpan) ? ObjectUtils.GetTimeSpanValueForProperty(prop, o) : ObjectUtils.ConvertValueIfNecessary(paramType, o); prop.GetSetMethod().Invoke(obj, new object[] { goodValue }); } catch (FormatException nfe) { HandleError( string.Format(CultureInfo.InvariantCulture, "The setter on Job class {0} for property '{1}' expects a {2} but was given {3}", obj.GetType(), name, paramType, o), nfe); } catch (MethodAccessException) { HandleError(string.Format(CultureInfo.InvariantCulture, "The setter on Job class {0} for property '{1}' expects a {2} but was given a {3}", obj.GetType(), name, paramType, o.GetType())); } catch (ArgumentException e) { HandleError( string.Format(CultureInfo.InvariantCulture, "The setter on Job class {0} for property '{1}' expects a {2} but was given {3}", obj.GetType(), name, paramType, o.GetType()), e); } catch (UnauthorizedAccessException e) { HandleError( string.Format(CultureInfo.InvariantCulture, "The setter on Job class {0} for property '{1}' could not be accessed.", obj.GetType(), name), e); } catch (TargetInvocationException e) { HandleError( string.Format(CultureInfo.InvariantCulture, "The setter on Job class {0} for property '{1}' could not be accessed.", obj.GetType(), name), e); } catch (Exception e) { HandleError( string.Format(CultureInfo.InvariantCulture, "The setter on Job class {0} for property '{1}' threw exception when processing.", obj.GetType(), name), e); } } }