public void Start() { // file system watcher will fail if directory isn't there, ensure it is if (!Directory.Exists(_baseDirectory)) Directory.CreateDirectory(_baseDirectory); _scheduler = new TimerScheduler(new PoolFiber()); _channel = new ChannelAdapter(); _producer = new PollingFileSystemEventProducer(_baseDirectory, _channel, _scheduler, new PoolFiber(), 2.Minutes()); _connection = _channel.Connect(config => { config .AddConsumerOf<FileSystemEvent>() .BufferFor(3.Seconds()) .UseScheduler(_scheduler) .Distinct(fsEvent => GetChangedDirectory(fsEvent.Path)) .UsingConsumer(fsEvents => { fsEvents.Keys.Distinct().Each(key => { if (key == _baseDirectory) return; _serviceChannel.Send(new ServiceFolderChanged(key)); }); }) .HandleOnFiber(_fiber); }); }
public static Scheduler MakeInstance () { if (mInstance == null) { mInstance = Camera.main.gameObject.AddComponent<Scheduler>(); } return mInstance; }
public void PostShouldThrowExceptionAfterStop() { var scheduler = new Scheduler(new FakeLoggerFactory()); scheduler.Start(); scheduler.Stop(); Assert.Throws<InvalidOperationException>(() => scheduler.Post("testing", () => { })); }
public EndpointSubscriptionCache(Fiber fiber, Scheduler scheduler, SubscriptionObserver observer) { _fiber = fiber; _scheduler = scheduler; _observer = observer; _messageSubscriptions = new Dictionary<string, EndpointSubscription>(); }
public Shutter(string openPin, string closePin, int delay, IHardwareAbstractionLayer hal) { if (string.IsNullOrEmpty(openPin)) throw new ArgumentOutOfRangeException("openPin", "missing open pin"); if (string.IsNullOrEmpty(closePin)) throw new ArgumentOutOfRangeException("closePin", "missing close pin"); if (delay == 0) throw new ArgumentOutOfRangeException("delay", "delay must be greater than 0"); if (hal == null) throw new ArgumentNullException("hal"); _openPin = openPin; _closePin = closePin; _delay = delay; _hal = hal; _scheduler = Scheduler.GetScheduler(); //Stop motors on startup _hal.Write(_openPin, PinTypes.Output, 0); _hal.Write(_closePin, PinTypes.Output, 0); _status = StatusStop; Class = "Window"; Hardware = _hal.Info.Name; Version = CLASS_VERSION; }
static void Main(string[] args) { System.Console.WriteLine("Application started - Esc to quit"); var sessionFactory = NHibernateFactory.AssembleSessionFactory(DATABASE_FILE); ILog consoleLogger = new ConsoleLogger {VerbosityLevel = 2}; ILog databaseLogger = new Logger(new LogEntryDatabaseRepository(sessionFactory)) {VerbosityLevel = 1}; ILog log = new CompositeLogger(consoleLogger, databaseLogger); var iocContainer = new IocContainerForScheduler(); iocContainer.BindToConstant(sessionFactory); var taskDirectory = new DirectoryInfo(Assembly.GetExecutingAssembly().Location).Parent.FullName; var taskScheduler = new Scheduler(iocContainer, taskDirectory, new FileUtilities(), new TimerWithTimestamp(0, 10000), new TimerWithTimestamp(0, 2000), log); taskScheduler.Start(); try { while (System.Console.ReadKey().Key != ConsoleKey.Escape) { } } catch (Exception e) { log.WriteEntry(new LogEntry(e.Source, e.Message, DateTime.Now)); } }
public void Given_scheduler_with_daily_item() { var context = new TestSchedulerContext(); _scheduler = new Scheduler(context); var runsoon = new TimeSpan(DateTime.Now.Hour, DateTime.Now.Minute, DateTime.Now.Second); var foo1 = JobFactory.Daily("Foo1", () => { Console.WriteLine("Foo1"); foo1_calls += 1; }, runsoon, description: "Description for Foo1"); var foo2 = JobFactory.Daily("Foo2", () => Console.WriteLine("Foo2"), runsoon); _scheduler.Schedule(foo1); _scheduler.Schedule(foo2); _scheduler.Start(); Wait.Until(() => foo1_calls > 0, 30); context.MoveToTommorrow(); Wait.Until(() => foo1_calls > 1, 30); context.MoveForwardNDays(2); Wait.Until(() => foo1_calls > 2, 50); }
/// <summary> /// Create some necessary parts and some dummy Jobs. /// </summary> public BenchmarkSystemTest() { scheduler = system.GetScheduler(); Job1 = new Job( p => { return 0; }, 1, "Michael", 4, 4 ); Job2 = new Job( p => { return 0; }, 2, "Michael", 4, 0.5 ); Job3 = new Job( p => { return 0; }, 3, "Michael", 1, 0.5 ); Job4 = new Job( p => { return 0; }, 4, "Michael", 6, 1 ); }
public void Given_schduler_with_some_work() { _scheduler = new Scheduler(); _scheduler.Schedule("Foo1", () => Console.WriteLine("Foo1"), TimeSpan.FromMinutes(2)); _scheduler.Schedule("Foo2", () => Console.WriteLine("Foo2"), TimeSpan.FromMinutes(2)); }
static void Main(string[] args) { //Future Enchancement: Load file from local directory exe: \\myfile.txt Scheduler scheduler = new Scheduler(@"c:\users\s\documents\visual studio 2013\Projects\ConferenceTrackMgmt\ConferenceTrackMgmt\SupportFiles\input.txt"); string output = scheduler.Print(); Console.Write(output); }
/// <summary> /// Create some necessary parts and some dummy jobs. /// </summary> public BenchmarkSystemTest() { scheduler = system.GetScheduler(); Logger logger = new Logger(system); job1 = new Job( p => { Console.WriteLine("Job1 is running"); return 0; }, 4, 40, new Owner { name = "Michael" } ); job2 = new Job( p => { Console.WriteLine("Job2 is running"); return 0; }, 4, 5, new Owner { name = "Michael" } ); job3 = new Job( p => { Console.WriteLine("Job3 is running"); return 0; }, 1, 5, new Owner { name = "Michael" } ); job4 = new Job( p => { Console.WriteLine("Job4 is running"); return 0; }, 6, 10, new Owner { name = "Michael" } ); }
public void ItShouldStartAndStopWithoutExceptions() { var scheduler = new Scheduler(new FakeLoggerFactory()); scheduler.Start(); var stopped = scheduler.Stop().Wait(4000); Assert.True(stopped); }
static void Main(string[] args) { var scheduler = new Scheduler(); var random = new Random(); for (var i = 0; i < 1000; i++) { var id = "Foo" + i; scheduler.Schedule(id, () => { return Task.Factory.StartNew(() => { Console.WriteLine(id + " Started"); for (var a = 0; a < 10000 + random.Next(); a++) { Thread.Sleep(1); } Console.WriteLine(id + " Completed"); }); }, TimeSpan.FromMinutes(3)); } Console.WriteLine("S - to get starts, anything else to exit."); while (true) { var key = Console.ReadKey(); if (key.Key == ConsoleKey.S) { Console.WriteLine("tats>> " + scheduler.GatherOveralStats()); } else { break; } } }
public SchedulerWindow() { _warningsToDeploy = new List<DisplayEvent>(); _deployedWarnings = new List<DisplayEvent>(); _buttonToEventLookup = new Dictionary<int, Tuple<string, DateTime>>(); _scheduler = new Scheduler(); InitializeComponent(); this.MaximizeBox = false; this.FormBorderStyle = FormBorderStyle.FixedSingle; UpdateSchedulerTable(fieldEdited: true); UpdateCalendar(); ClearEventDetails(); _dailyReminderTask = new Task(DailyReminder); _dailyReminderTask.Start(); _urgentStyle = new DataGridViewCellStyle(); _urgentStyle.BackColor = Color.LightPink; _urgentStyle.SelectionBackColor = Color.LightPink; _warningStyle = new DataGridViewCellStyle(); _warningStyle.BackColor = Color.LightYellow; _warningStyle.SelectionBackColor = Color.LightYellow; _defaultStyle = new DataGridViewCellStyle(); _defaultStyle.BackColor = Color.LightGreen; _defaultStyle.SelectionBackColor = Color.LightGreen; EventTable.ClearSelection(); }
public void Start() { // file system watcher will fail if directory isn't there, ensure it is if (!System.IO.Directory.Exists(_baseDir)) System.IO.Directory.CreateDirectory(_baseDir); _channel = new ChannelAdapter(); FiberFactory fiberFactory = () => new SynchronousFiber(); _scheduler = new TimerScheduler(fiberFactory()); _producer = new PollingFileSystemEventProducer(_baseDir, _channel, _scheduler, fiberFactory(), 2.Minutes()); _channel.Connect(config => config .AddConsumerOf<FileSystemEvent>() .BufferFor(3.Seconds()) .Distinct(fsEvent => GetChangedDirectory(fsEvent.Path)) .UsingConsumer(fsEvents => fsEvents.Keys.ToList().ForEach(key => { if (key == _baseDir) return; _hostChannel.Send(new FileSystemChange { ShelfName = key }); }))); }
public override void SetUp() { base.SetUp(); world = new World("Test", DateTime.Now, 2012); scheduler = new Scheduler(); }
public void A_file_is_created() { _baseDirectory = AppDomain.CurrentDomain.BaseDirectory; _filename = "test2.dat"; _path = Path.Combine(_baseDirectory, _filename); System.IO.File.Delete(_path); _listener = new Future<FileCreated>(); _channel = new ChannelAdapter(); FiberFactory fiberFactory = () => new SynchronousFiber(); _scheduler = new TimerScheduler(fiberFactory()); _producer = new PollingFileSystemEventProducer(_baseDirectory, _channel, _scheduler, fiberFactory(), 20.Seconds()); Thread.Sleep(5.Seconds()); using (_channel.Connect(x => x.AddConsumerOf<FileCreated>().UsingConsumer(m => _listener.Complete(m)))) { System.IO.File.Create(_path); _listener.WaitUntilCompleted(25.Seconds()); } _producer.Dispose(); }
/// <summary> /// Initializes a new instance of the <see cref="GameSystemBase" /> class. /// </summary> /// <param name="registry">The registry.</param> /// <remarks>The GameSystem is expecting the following services to be registered: <see cref="IGame" /> and <see cref="AssetManager" />.</remarks> public ScriptSystem(IServiceRegistry registry) : base(registry) { Enabled = true; Scheduler = new Scheduler(); Services.AddService(typeof(ScriptSystem), this); }
/// <summary> /// Create some necessary parts and some dummy Jobs. /// </summary> public BenchmarkSystemTest() { scheduler = system.GetScheduler(); Job1 = new Job( p => { Console.WriteLine("Job1 is running"); return 0; }, "job1", "Michael", 4, 40 ); Job2 = new Job( p => { Console.WriteLine("Job2 is running"); return 0; }, "job2", "Michael", 4, 5 ); Job3 = new Job( p => { Console.WriteLine("Job3 is running"); return 0; }, "job3", "Michael", 1, 5 ); Job4 = new Job( p => { Console.WriteLine("Job4 is running"); return 0; }, "job4", "Michael", 6, 10 ); }
/// <summary> /// Creates a PollingFileSystemEventProducer /// </summary> /// <param name="directory">The directory to watch</param> /// <param name="channel">The channel where events should be sent</param> /// <param name="scheduler">Event scheduler</param> /// <param name="fiber">Fiber to schedule on</param> /// <param name="checkInterval">The maximal time between events or polls on a given file</param> /// <param name="checkSubDirectory">Indicates if subdirectorys will be checked or ignored</param> public PollingFileSystemEventProducer(string directory, UntypedChannel channel, [NotNull] Scheduler scheduler, Fiber fiber, TimeSpan checkInterval, bool checkSubDirectory) { if (scheduler == null) throw new ArgumentNullException("scheduler"); _directory = directory; _channel = channel; _fiber = fiber; _hashes = new Dictionary<string, Guid>(); _scheduler = scheduler; _checkInterval = checkInterval; _scheduledAction = scheduler.Schedule(3.Seconds(), _fiber, HashFileSystem); var myChannel = new ChannelAdapter(); _connection = myChannel.Connect(connectionConfigurator => { connectionConfigurator.AddConsumerOf<FileSystemChanged>().UsingConsumer(HandleFileSystemChangedAndCreated); connectionConfigurator.AddConsumerOf<FileSystemCreated>().UsingConsumer(HandleFileSystemChangedAndCreated); connectionConfigurator.AddConsumerOf<FileSystemRenamed>().UsingConsumer(HandleFileSystemRenamed); connectionConfigurator.AddConsumerOf<FileSystemDeleted>().UsingConsumer(HandleFileSystemDeleted); }); _fileSystemEventProducer = new FileSystemEventProducer(directory, myChannel, checkSubDirectory); }
public PeerSubscriptionCache(Fiber fiber, Scheduler scheduler, SubscriptionObserver observer) { _observer = observer; _fiber = fiber; _scheduler = scheduler; _endpoints = new Dictionary<Uri, EndpointSubscriptionCache>(); }
public ChooseNodegroup(string cluster) { InitializeComponent(); listBox.Items.Clear(); listBox.Items.Add("<Any>"); if (cluster != "") { try { Scheduler scheduler = new Scheduler(); scheduler.Connect(cluster); IStringCollection ngs = scheduler.GetNodeGroupList(); foreach (String ng in ngs) { listBox.Items.Add(ng /*+ " (" + scheduler.GetNodesInNodeGroup(ng).Count().ToString() + ")" */ ); } } catch { } } Mouse.OverrideCursor = null; }
public MainViewModel() { _loadTheme(); _errors = new ConcurrentDictionary<AbstractViewModel, ErrorViewModel>(); Scheduler = new Scheduler<AbstractViewModel>(); Scheduler.AktuellChanged += OnSchedulerAktuellChanged; }
public ScheduledReporter(string name, Func<Reporter> reporter, MetricsRegistry registry, Func<HealthStatus> healthStatus, TimeSpan interval, Scheduler scheduler) { this.reporter = reporter; this.registry = registry; this.healthStatus = healthStatus; this.interval = interval; this.scheduler = scheduler; }
public ScheduledReporter(MetricsReport report, MetricsDataProvider metricsDataProvider, Func<HealthStatus> healthStatus, TimeSpan interval, Scheduler scheduler) { this.report = report; this.metricsDataProvider = metricsDataProvider; this.healthStatus = healthStatus; this.scheduler = scheduler; this.scheduler.Start(interval, t => RunReport(t)); }
protected override void SetUp() { timer = NewMock<ITimer>(); clock = NewMock<IClock>(); eventFactory = NewMock<IScheduledEventFactory>(); scheduler = new Scheduler(clock, timer, eventFactory); }
public EventSchedule(int bufLen) { _scheduler = new Scheduler(this); _schedule = new List<IScheduleEvent>[bufLen]; for (int i = 0; i < bufLen; i++) _schedule[i] = new List<IScheduleEvent>(); }
public static Scheduler GetInstance() { if (instance == null) { instance = new Scheduler(); } return instance; }
public Cleaner(Scheduler scheduler) { scheduler.In(TimeSpan.FromDays(1), time => { Clean(Renamer.Tv, 30); Clean(Renamer.Movies, 60); }, true); }
public EndpointSubscriptionCache(Fiber fiber, Scheduler scheduler, SubscriptionObserver observer) { _fiber = fiber; _scheduler = scheduler; _observer = observer; _messageSubscriptions = new DictionaryCache<SubscriptionKey, EndpointSubscription>( key => new EndpointSubscription(_fiber, _scheduler, key.MessageName, key.CorrelationId, _observer)); }
//FEEDBACK RB //Het was netter (en logischer) geweest als de scheduler maar 1x aangemaakt zou worden. //Zoals ook aangepast voor User public string ConflictsClassroomEmpty() { Models.BU.Scheduler s = new Scheduler(); return(s.ConflictCheckClassroomEmpty()); }
public string ConflictsStudentgroupEmpty() { Models.BU.Scheduler s = new Scheduler(); return(s.ConflictCheckStudentgroupEmpty()); }
public string ConflictsTeacherEmpty() { Models.BU.Scheduler s = new Scheduler(); return(s.ConflictCheckTeacherEmpty()); }
/// <summary> /// Selection has been changed as the result of a user interaction. /// </summary> private void performUpdateSelected() { var beatmap = beatmapNoDebounce; var ruleset = rulesetNoDebounce; selectionChangedDebounce?.Cancel(); if (beatmapNoDebounce == null) { run(); } else { selectionChangedDebounce = Scheduler.AddDelayed(run, 200); } void run() { Logger.Log($"updating selection with beatmap:{beatmap?.ID.ToString() ?? "null"} ruleset:{ruleset?.ID.ToString() ?? "null"}"); if (transferRulesetValue()) { Mods.Value = Array.Empty <Mod>(); // transferRulesetValue() may trigger a refilter. If the current selection does not match the new ruleset, we want to switch away from it. // The default logic on WorkingBeatmap change is to switch to a matching ruleset (see workingBeatmapChanged()), but we don't want that here. // We perform an early selection attempt and clear out the beatmap selection to avoid a second ruleset change (revert). if (beatmap != null && !Carousel.SelectBeatmap(beatmap, false)) { beatmap = null; } } // We may be arriving here due to another component changing the bindable Beatmap. // In these cases, the other component has already loaded the beatmap, so we don't need to do so again. if (!EqualityComparer <BeatmapInfo> .Default.Equals(beatmap, Beatmap.Value.BeatmapInfo)) { Logger.Log($"beatmap changed from \"{Beatmap.Value.BeatmapInfo}\" to \"{beatmap}\""); WorkingBeatmap previous = Beatmap.Value; Beatmap.Value = beatmaps.GetWorkingBeatmap(beatmap, previous); if (beatmap != null) { if (beatmap.BeatmapSetInfoID == beatmapNoDebounce?.BeatmapSetInfoID) { sampleChangeDifficulty.Play(); } else { sampleChangeBeatmap.Play(); } } } if (this.IsCurrentScreen()) { ensurePlayingSelected(); } updateComponentFromBeatmap(Beatmap.Value); } }
public override IEnumerable <object> Run() { ArraySegment <byte> response = new ArraySegment <byte>(base.Operation.Buffer); string errorType = ""; string errorValue = ""; bool toClose = false; if (response.Array.Length >= 4 && response.Array[0] == 204 && response.Array[1] == 20 && response.Array[2] == 234 && response.Array[3] == 30) { errorType = "ClientCheat"; errorValue = Encoding.ASCII.GetString(response.Array, 4, response.Array.Length - 4); if (Log <RespondProcessor> .Logger.IsWarnEnabled) { string text = ""; for (int i = 0; i < response.Array.Length; i++) { text += string.Format("{0:X2} ", response.Array[i]); } Log <RespondProcessor> .Logger.WarnFormat("Client Hack/Cheat Detected {{ Entity.ID = {0:o} }} Msg = {1}", base.Entity.ID, text); } } else { AntiCpXSvr.Recommend recommend; AntiCpXSvr.Error error = AntiCpXSvr.VerifyResponse(base.Entity.Handle, response, out recommend); if (recommend == AntiCpXSvr.Recommend.KeepSession) { if (Log <RespondProcessor> .Logger.IsInfoEnabled) { Log <RespondProcessor> .Logger.InfoFormat("AntiCpXSvr.VerifyResponse(clientHandle, responseBuffer, out recommend = {0}) = {1} {{ Entity.ID = {2:o} }}", recommend, error, base.Entity.ID); } } else { if (Log <RespondProcessor> .Logger.IsWarnEnabled) { Log <RespondProcessor> .Logger.WarnFormat("AntiCpXSvr.VerifyResponse(clientHandle, responseBuffer, out recommend = {0}) = {1} {{ Entity.ID = {2:o} }}", recommend, error, base.Entity.ID); } errorType = error.ToString(); toClose = true; } } if (errorType.Length > 0) { using (HackShieldErrorLogDataContext hackShieldErrorLogDataContext = new HackShieldErrorLogDataContext()) { try { hackShieldErrorLogDataContext.AddHackShieldError(new long?(base.Operation.CharacterID), errorType, errorValue); } catch (Exception ex) { Log <RespondProcessor> .Logger.Error(string.Format("Error while making log : [CharacterID = {0}]", base.Operation.CharacterID), ex); } } if (toClose) { SendPacket packet; if (!base.Operation.IsCheat) { packet = SendPacket.Create <SystemMessage>(new SystemMessage(SystemMessageCategory.Dialog, "GameUI_HackSheild_Error")); } else { packet = SendPacket.Create <SystemMessage>(new SystemMessage(SystemMessageCategory.Notice, "GameUI_HackSheild_Contact")); } OperationSync sync = new OperationSync { Connection = base.Entity.FrontendConn, Operation = packet }; yield return(sync); Scheduler.Schedule(this.Service.Thread, Job.Create(new Action(base.Entity.Entity.Close)), 3000); } } base.Finished = true; yield break; }
private static DateTime SetPrayerTime(string _date, string _time, string prayerName, Scheduler a, string player) { DateTime dateOnly = DateTime.Parse(_date); TimeSpan timeOnly = DateTime.Parse(_time).TimeOfDay; DateTime setTime = dateOnly + timeOnly; a.CreateJob <T>(prayerName, setTime, player); return(setTime); }
public void Dispose() { Scheduler.Dispose(); }
internal SlotList(Scheduler lServers) { zServers = lServers; }
public void SchedulerRefreshSchedulesLimitedByQueue() { DateTime now = DateTime.UtcNow.FloorWithSeconds(); ScheduleRecord schedule = new ScheduleRecord() { ApplicationName = BlueCollarSection.Section.ApplicationName, Enabled = true, Id = 1, Name = "Test", QueueName = "first", RepeatType = ScheduleRepeatType.Days, RepeatValue = 1, StartOn = now }; ScheduledJobRecord scheduledJob = new ScheduledJobRecord() { Data = @"{""SleepDuration"":1000}", Id = 1, JobType = JobSerializer.GetTypeName(typeof(TestJob)), Schedule = schedule, ScheduleId = 1 }; schedule.ScheduledJobs.Add(scheduledJob); ScheduleRecord schedule2 = new ScheduleRecord() { ApplicationName = BlueCollarSection.Section.ApplicationName, Enabled = true, Id = 2, Name = "Test 2", QueueName = "second", RepeatType = ScheduleRepeatType.Days, RepeatValue = 1, StartOn = now }; ScheduledJobRecord scheduledJob2 = new ScheduledJobRecord() { Data = @"{""SleepDuration"":1000}", Id = 2, JobType = JobSerializer.GetTypeName(typeof(TestJob)), Schedule = schedule2, ScheduleId = 2 }; var transaction = new Mock <IDbTransaction>(); var repository = new Mock <IRepository>(); repository.Setup(r => r.BeginTransaction()).Returns(transaction.Object); repository.Setup(r => r.GetSchedules(BlueCollarSection.Section.ApplicationName, It.IsAny <IDbTransaction>())).Returns(new ScheduleRecord[] { schedule, schedule2 }); var factory = new Mock <IRepositoryFactory>(); factory.Setup(f => f.Create()).Returns(repository.Object); var logger = new Mock <ILogger>(); Scheduler scheduler = new Scheduler(1, BlueCollarSection.Section.ApplicationName, QueueNameFilters.Parse("second"), 1, factory.Object, logger.Object); Assert.AreEqual(0, scheduler.Schedules.Count()); scheduler.RefreshSchedules(); Assert.AreEqual(1, scheduler.Schedules.Count()); Assert.AreEqual("Test 2", scheduler.Schedules.First().Name); }
// should run inline where possible. this is to fix RunAllSteps potentially finding no steps if the steps are added in LoadComplete (else they get forcefully scheduled too late) private void schedule(Action action) => Scheduler.Add(action, false);
private void runNextStep(Action onCompletion, Action <Exception> onError, Func <StepButton, bool> stopCondition) { try { if (loadableStep != null) { if (loadableStep.IsMaskedAway) { scroll.ScrollTo(loadableStep); } loadableStep.PerformStep(); } } catch (Exception e) { onError?.Invoke(e); return; } string text = "."; if (actionRepetition == 0) { text = $"{(int)Time.Current}: ".PadLeft(7); if (actionIndex < 0) { text += $"{GetType().ReadableName()}"; } else { text += $"step {actionIndex + 1} {loadableStep?.ToString() ?? string.Empty}"; } } Console.Write(text); actionRepetition++; if (actionRepetition > (loadableStep?.RequiredRepetitions ?? 1) - 1) { actionIndex++; actionRepetition = 0; Console.WriteLine(); if (loadableStep != null && stopCondition?.Invoke(loadableStep) == true) { return; } } if (actionIndex > StepsContainer.Children.Count - 1) { onCompletion?.Invoke(); return; } if (Parent != null) { stepRunner = Scheduler.AddDelayed(() => runNextStep(onCompletion, onError, stopCondition), TimePerAction); } }
internal static void SetUpdateState(UpdateStates state) { Scheduler.Add(delegate { if (state == UpdateState) { return; } UpdateState = state; VoidDelegate d = UpdateStateChanged; if (d != null) { d(); } switch (UpdateState) { case UpdateStates.EmergencyFallback: //let's hope we never need this. OsuMain.Repair(true); break; case UpdateStates.Completed: ConfigManager.sUpdateFailCount.Value = 0; //Update has completed and doesn't need a restart. ConfigManager.sUpdatePending.Value = false; ConfigManager.SaveConfig(); string lastVersion = ConfigManager.sLastVersion; NotificationManager.ShowMessage( string.Format(LocalisationManager.GetString(OsuString.Update_Complete), General.BUILD_NAME) + '\n' + LocalisationManager.GetString(OsuString.GameBase_Updater_Changelog), Color.Pink, 10000, delegate { if (string.IsNullOrEmpty(General.SUBVERSION)) { //public releases GameBase.ProcessStart(string.Format(@"https://osu.ppy.sh/p/changelog?v={0}&s={1}&l={2}", General.BUILD_NAME, General.TargetedPublicStream.ToString().ToLower(), lastVersion)); } else { //beta or cutting edge GameBase.ProcessStart(@"https://osu.ppy.sh/p/changelog?v=next"); } }); break; case UpdateStates.Error: ConfigManager.sUpdatePending.Value = false; if (CommonUpdater.LastError != null) { if (CommonUpdater.LastError is MissingFrameworkVersionException) { CommonUpdater.ResetError(); NotificationManager.ShowMessage(LocalisationManager.GetString(OsuString.GameBase_UpdateFailedFrameworkVersion), Color.Red, 300000, delegate { OsuMain.ForceUpdate(true); }); } else { RunBackgroundThread(delegate { ErrorSubmission.Submit(new OsuError(CommonUpdater.LastError) { Feedback = @"update error", ILTrace = CommonUpdater.LastErrorExtraInformation ?? string.Empty }); CommonUpdater.ResetError(); }); } } ConfigManager.ResetHashes(); ConfigManager.sUpdateFailCount.Value++; break; case UpdateStates.NeedsRestart: //the update could have already moved the files into their new place, so we want to make sure we have reloaded the master config file. ConfigManager.ReloadHashCache(); ConfigManager.sUpdateFailCount.Value = 0; bool isNewUpdate = !ConfigManager.sUpdatePending.Value; ConfigManager.sUpdatePending.Value = true; //Update completed but needs a restart. We either want to force a restart or just wait for the next user-triggered restart. if (UpdateForceRestart) { CompleteUpdate(); } else if (Mode != OsuModes.Menu && isNewUpdate) { NotificationManager.ShowMessage(LocalisationManager.GetString(OsuString.General_NewVersion), Color.Pink, 10000); } UpdatePendingRestart = true; break; case UpdateStates.NoUpdate: ConfigManager.sUpdateFailCount.Value = 0; break; } }); }
/// <summary> /// 暂停任务 /// </summary> /// <param name="jobName">任务名称</param> /// <param name="groupName">任务组名称</param> public static void Pause(string jobName, string groupName = "") { Scheduler.PauseJob(GetJobKey(jobName, groupName)); }
private void onCommit(string value) { CommittedText = value; Scheduler.AddDelayed(() => IsLoading = false, 1000); }
public void MyTestInitialize() { Module.Initialize(); Scheduler.Run(); PrimaryScheduler.Run(); }
void data_DataImage(NikonObject sender, NkMAIDImageInfo imageInfo, IntPtr data) { NikonThumbnail thumbnail = new NikonThumbnail(imageInfo, data); Scheduler.Callback(new ThumbnailReadyDelegate(OnThumbnailReady), this, thumbnail); }
static HeroesContentsLoader() { string path = Path.Combine(Environment.CurrentDirectory, (IntPtr.Size == 4) ? "x86\\System.Data.SQLite.DLL" : "x64\\System.Data.SQLite.DLL"); HeroesContentsLoader.DataFileName = ServiceCoreSettings.Default.heroesContentsDB3; HeroesContentsLoader.LocalizedTextFileName = ServiceCoreSettings.Default.localizedTextDB3; Log <HeroesContentsLoader> .Logger.InfoFormat("HeroesContentsLoader in {0}bit", (IntPtr.Size == 4)? 32 : 64); HeroesContentsLoader.Assembly_Sqlite = Assembly.LoadFile(path); HeroesContentsLoader.Type_SQLiteConnection = HeroesContentsLoader.Assembly_Sqlite.GetType("System.Data.SQLite.SQLiteConnection"); HeroesContentsLoader.Type_SQLiteCommand = HeroesContentsLoader.Assembly_Sqlite.GetType("System.Data.SQLite.SQLiteCommand"); HeroesContentsLoader.Type_SQLiteDataReader = HeroesContentsLoader.Assembly_Sqlite.GetType("System.Data.SQLite.SQLiteDataReader"); HeroesContentsLoader.Method_Conn_Open = HeroesContentsLoader.Type_SQLiteConnection.GetMethod("Open"); HeroesContentsLoader.Method_Conn_Close = HeroesContentsLoader.Type_SQLiteConnection.GetMethod("Close"); HeroesContentsLoader.Method_Conn_CreateCommand = HeroesContentsLoader.Type_SQLiteConnection.GetMethod("CreateCommand"); HeroesContentsLoader.Method_Cmd_ExecuteReader = HeroesContentsLoader.Type_SQLiteCommand.GetMethod("ExecuteReader", new Type[0]); HeroesContentsLoader.Method_Reader_Read = HeroesContentsLoader.Type_SQLiteDataReader.GetMethod("Read"); HeroesContentsLoader.Method_Reader_GetValue = HeroesContentsLoader.Type_SQLiteDataReader.GetMethod("GetValue", new Type[] { typeof(int) }); HeroesContentsLoader.Method_Reader_GetOrdinal = HeroesContentsLoader.Type_SQLiteDataReader.GetMethod("GetOrdinal", new Type[] { typeof(string) }); HeroesContentsLoader.Method_Reader_GetString = HeroesContentsLoader.Type_SQLiteDataReader.GetMethod("GetString", new Type[] { typeof(int) }); HeroesContentsLoader.Method_Reader_GetName = HeroesContentsLoader.Type_SQLiteDataReader.GetMethod("GetName", new Type[] { typeof(int) }); HeroesContentsLoader.Method_Reader_Close = HeroesContentsLoader.Type_SQLiteDataReader.GetMethod("Close"); HeroesContentsLoader.Property_Cmd_CommandText = HeroesContentsLoader.Type_SQLiteCommand.GetProperty("CommandText"); HeroesContentsLoader.Property_Cmd_CommandType = HeroesContentsLoader.Type_SQLiteCommand.GetProperty("CommandType"); HeroesContentsLoader.Property_Reader_Item = HeroesContentsLoader.Type_SQLiteDataReader.GetProperty("Item", new Type[] { typeof(int) }); HeroesContentsLoader.Property_Reader_FieldCount = HeroesContentsLoader.Type_SQLiteDataReader.GetProperty("FieldCount"); string fullPath = Path.GetFullPath(ServiceCoreSettings.Default.heroesContentsDB3); string directoryName = Path.GetDirectoryName(fullPath); HeroesContentsLoader.FileSystemWatcher = new FileSystemWatcher { Path = directoryName, NotifyFilter = (NotifyFilters.Size | NotifyFilters.LastWrite), Filter = "*.db3", IncludeSubdirectories = false, EnableRaisingEvents = true }; HeroesContentsLoader.FileSystemWatcher.Changed += delegate(object _, FileSystemEventArgs __) { Scheduler.Schedule(JobProcessor.Current, Job.Create(delegate { if (HeroesContentsLoader.DB3Changed != null) { HeroesContentsLoader.DB3Changed(); } }), 3000); }; }
private void HandleNextEvent(T next) { var handlerAndEvent = new HandlerAndEvent <T>(Awaitable.SignalCompletion, next); Scheduler.Schedule(handlerAndEvent.Deliver, _persistable); }
private async void Scheduler_OnSchedule(object sender, SchedulerEventArgs e) { DateTime currentTime = Tracker.GetCurrentHourDateTime(); if (currentTime != _lastTrackerUpdate) { _lastTrackerUpdate = currentTime; Tracker.CalculatedTrackingHours(); } Scheduler scheduler = e.Scheduler; //Allowing event to be called to update tracked hours when not running if (!scheduler.Enabled) { return; } //Don't auto start when max level is hit if (UserSettings.MaxLevel != 0 && Level >= UserSettings.MaxLevel) { return; } //Should not start with these states if (AccountState == AccountState.PermAccountBan || AccountState == AccountState.NotVerified) { return; } int delay = 0; lock (_rand) { delay = _rand.Next(0, 45000); } if (e.Scheduler.WithinTime()) { if (State == Enums.BotState.Stopped) { //Only auto start when both are below min values //Otherwise we'll get constant start/stops if ((PokemonCaught <= scheduler.PokemonLimiter.Min || scheduler.PokemonLimiter.Option == SchedulerOption.Nothing) && (PokestopsFarmed <= scheduler.PokeStoplimiter.Min || scheduler.PokeStoplimiter.Option == SchedulerOption.Nothing)) { LogCaller(new LoggerEventArgs(String.Format("Auto starting (schedule) in {0} seconds...", delay / 1000), LoggerTypes.Debug)); await Task.Delay(delay); Start(); } } } else { if (State != Enums.BotState.Stopping && State != Enums.BotState.Stopped) { LogCaller(new LoggerEventArgs("Auto stopping (schedule) ...", LoggerTypes.Debug)); Stop(); } } if (!IsRunning) { return; } //Master stop if (scheduler.MasterOption == SchedulerOption.StartStop) { if (State != BotState.Stopping && State != BotState.Stopped) { if (PokemonCaught >= scheduler.PokemonLimiter.Max && PokestopsFarmed >= scheduler.PokeStoplimiter.Max) { LogCaller(new LoggerEventArgs("Max pokemon and pokestop limit reached. Stopping", LoggerTypes.Debug)); Stop(); return; } } } //Pokemon if (scheduler.PokemonLimiter.Option != SchedulerOption.Nothing) { if (PokemonCaught >= scheduler.PokemonLimiter.Max) { switch (scheduler.PokemonLimiter.Option) { case SchedulerOption.DisableEnable: //No extra checks if (UserSettings.CatchPokemon) { LogCaller(new LoggerEventArgs("Max pokemon limit reached. Disabling setting...", LoggerTypes.Debug)); UserSettings.CatchPokemon = false; } break; case SchedulerOption.StartStop: //Just stop it LogCaller(new LoggerEventArgs("Max pokemon limit reached. Stopping bot...", LoggerTypes.Debug)); Stop(); break; } } else if (PokemonCaught <= scheduler.PokemonLimiter.Min) { switch (scheduler.PokemonLimiter.Option) { case SchedulerOption.DisableEnable: //No extra checks if (!UserSettings.CatchPokemon) { LogCaller(new LoggerEventArgs("Min pokemon limit reached. Enabling catching...", LoggerTypes.Debug)); UserSettings.CatchPokemon = true; } break; case SchedulerOption.StartStop: //Start only if pokestop is disabled/nothing or pokestops below threshold if (scheduler.PokeStoplimiter.Option != SchedulerOption.StartStop || PokestopsFarmed <= scheduler.PokeStoplimiter.Min) { if (State == BotState.Stopped) { LogCaller(new LoggerEventArgs(String.Format("Min pokemon limit reached. Starting in {0} seconds", delay / 1000), LoggerTypes.Debug)); await Task.Delay(delay); Start(); } } break; } } } //Pokestops if (scheduler.PokeStoplimiter.Option != SchedulerOption.Nothing) { if (PokestopsFarmed >= scheduler.PokeStoplimiter.Max) { switch (scheduler.PokeStoplimiter.Option) { case SchedulerOption.DisableEnable: //No extra checks if (UserSettings.SearchFortBelowPercent != 0) { LogCaller(new LoggerEventArgs("Max pokestop limit reached. Disabling...", LoggerTypes.Debug)); UserSettings.SearchFortBelowPercent = 0; } break; case SchedulerOption.StartStop: //Just stop it LogCaller(new LoggerEventArgs("Max pokestop limit reached. Stopping ...", LoggerTypes.Debug)); Stop(); break; } } else if (PokestopsFarmed <= scheduler.PokeStoplimiter.Min) { switch (scheduler.PokeStoplimiter.Option) { case SchedulerOption.DisableEnable: //No extra checks if (UserSettings.SearchFortBelowPercent != 1000) { LogCaller(new LoggerEventArgs("Min pokestop limit reached. Enabling ...", LoggerTypes.Debug)); UserSettings.SearchFortBelowPercent = 1000; } break; case SchedulerOption.StartStop: //Start only if pokemon is disabled/nothing or pokemon caught below threshold if (scheduler.PokemonLimiter.Option != SchedulerOption.StartStop || PokemonCaught <= scheduler.PokemonLimiter.Min) { if (State == BotState.Stopped) { LogCaller(new LoggerEventArgs(String.Format("Min pokestop limit reached. Starting in {0} seconds", delay / 1000), LoggerTypes.Debug)); await Task.Delay(delay); Start(); } } break; } } } }
public void Test_Run_MasterTemplate() { MasterTemplate m = new Scheduler(); m.Run(); }
/// <summary> /// selection has been changed as the result of a user interaction. /// </summary> private void performUpdateSelected() { var beatmap = beatmapNoDebounce; var ruleset = rulesetNoDebounce; void run() { Logger.Log($"updating selection with beatmap:{beatmap?.ID.ToString() ?? "null"} ruleset:{ruleset?.ID.ToString() ?? "null"}"); bool preview = false; if (ruleset?.Equals(Ruleset.Value) == false) { Logger.Log($"ruleset changed from \"{Ruleset.Value}\" to \"{ruleset}\""); Beatmap.Value.Mods.Value = Enumerable.Empty <Mod>(); Ruleset.Value = ruleset; // force a filter before attempting to change the beatmap. // we may still be in the wrong ruleset as there is a debounce delay on ruleset changes. Carousel.Filter(null, false); // Filtering only completes after the carousel runs Update. // If we also have a pending beatmap change we should delay it one frame. selectionChangedDebounce = Schedule(run); return; } // We may be arriving here due to another component changing the bindable Beatmap. // In these cases, the other component has already loaded the beatmap, so we don't need to do so again. if (!Equals(beatmap, Beatmap.Value.BeatmapInfo)) { Logger.Log($"beatmap changed from \"{Beatmap.Value.BeatmapInfo}\" to \"{beatmap}\""); preview = beatmap?.BeatmapSetInfoID != Beatmap.Value?.BeatmapInfo.BeatmapSetInfoID; Beatmap.Value = beatmaps.GetWorkingBeatmap(beatmap, Beatmap.Value); if (beatmap != null) { if (beatmap.BeatmapSetInfoID == beatmapNoDebounce?.BeatmapSetInfoID) { sampleChangeDifficulty.Play(); } else { sampleChangeBeatmap.Play(); } } } if (this.IsCurrentScreen()) { ensurePlayingSelected(preview); } UpdateBeatmap(Beatmap.Value); } selectionChangedDebounce?.Cancel(); if (beatmap == null) { run(); } else { selectionChangedDebounce = Scheduler.AddDelayed(run, 200); } }
private void ContextAddItemClicked(object sender, ToolItemEventArgs e) { var item = e.Item; DBItem row = null; object tag = tree.SelectedDBItem; if (item.Name == "Work") { row = new Work(); } else if (item.Name == "Work Stage") { row = new Stage(); if (tag is Work) { ((Stage)row).Work = (Work)tag; } } else if (item.Name == "Stage Parameter") { row = new StageParam(); if (tag is Stage) { ((StageParam)row).Stage = (Stage)tag; } } else if (item.Name == "Stage Procedure") { row = new StageProcedure(); if (tag is Stage) { ((StageParam)row).Stage = (Stage)tag; } } else if (item.Name == "Stage Reference") { row = new StageReference(); if (tag is Stage) { ((StageParam)row).Stage = (Stage)tag; } } else if (item.Name == "Group") { row = new UserGroup(); } else if (item.Name == "Template") { row = new Template(); if (tag is Template) { ((Template)row).Parent = (Template)tag; } } else if (item.Name == "Template Data") { row = new TemplateData(); if (tag is Template) { ((TemplateData)row).Template = (Template)tag; } } else if (item.Name == "User") { row = new User(); if (tag is Department) { ((User)row).Department = (Department)tag; } else if (tag is Position) { ((User)row).Position = (Position)tag; } //row.Access.Create foreach (var access in row.Access.Items.ToList()) { row.Access.Add(access.Identity, access.Access & ~AccessType.Create); } } else if (item.Name == "Scheduler") { row = new Scheduler(); if (tag is DBProcedure) { ((Scheduler)row).Procedure = (DBProcedure)tag; } } ShowItem(row); }
public static void InitMultitasking() { Scheduler.Init(); }
/// <summary> /// 恢复任务 /// </summary> /// <param name="jobName">任务名称</param> /// <param name="groupName">任务组名称</param> public static void Resume(string jobName, string groupName = "") { Scheduler.ResumeJob(GetJobKey(jobName, groupName)); }
private BuildResultCode BuildSlave() { // Mount build path ((FileSystemProvider)VirtualFileSystem.ApplicationData).ChangeBasePath(builderOptions.BuildDirectory); PrepareDatabases(); VirtualFileSystem.CreateDirectory(VirtualFileSystem.ApplicationDatabasePath); // Open WCF channel with master builder var namedPipeBinding = new NetNamedPipeBinding(NetNamedPipeSecurityMode.None) { SendTimeout = TimeSpan.FromSeconds(300.0) }; var processBuilderRemote = ChannelFactory <IProcessBuilderRemote> .CreateChannel(namedPipeBinding, new EndpointAddress(builderOptions.SlavePipe)); try { RegisterRemoteLogger(processBuilderRemote); // Create scheduler var scheduler = new Scheduler(); var status = ResultStatus.NotProcessed; // Schedule command string buildPath = builderOptions.BuildDirectory; string buildProfile = builderOptions.BuildProfile; Builder.OpenObjectDatabase(buildPath, VirtualFileSystem.ApplicationDatabaseIndexName); var logger = builderOptions.Logger; MicroThread microthread = scheduler.Add(async() => { // Deserialize command and parameters Command command = processBuilderRemote.GetCommandToExecute(); BuildParameterCollection parameters = processBuilderRemote.GetBuildParameters(); // Run command var inputHashes = FileVersionTracker.GetDefault(); var builderContext = new BuilderContext(buildPath, buildProfile, inputHashes, parameters, 0, null); var commandContext = new RemoteCommandContext(processBuilderRemote, command, builderContext, logger); MicrothreadLocalDatabases.MountDatabase(commandContext.GetOutputObjectsGroups()); command.PreCommand(commandContext); status = await command.DoCommand(commandContext); command.PostCommand(commandContext, status); // Returns result to master builder processBuilderRemote.RegisterResult(commandContext.ResultEntry); }); while (true) { scheduler.Run(); // Exit loop if no more micro threads lock (scheduler.MicroThreads) { if (!scheduler.MicroThreads.Any()) { break; } } Thread.Sleep(0); } // Rethrow any exception that happened in microthread if (microthread.Exception != null) { builderOptions.Logger.Fatal(microthread.Exception.ToString()); return(BuildResultCode.BuildError); } if (status == ResultStatus.Successful || status == ResultStatus.NotTriggeredWasSuccessful) { return(BuildResultCode.Successful); } return(BuildResultCode.BuildError); } finally { // Close WCF channel // ReSharper disable SuspiciousTypeConversion.Global ((IClientChannel)processBuilderRemote).Close(); // ReSharper restore SuspiciousTypeConversion.Global } }
public void TestFlowContainer() { FillFlowContainer flowContainer; AddStep("Create new flow container with facade", () => { Add(trackingContainer = new TestLogoTrackingContainer { AutoSizeAxes = Axes.Both, Origin = Anchor.TopCentre, Anchor = Anchor.TopCentre, Child = flowContainer = new FillFlowContainer { AutoSizeAxes = Axes.Both, Origin = Anchor.TopCentre, Anchor = Anchor.TopCentre, Direction = FillDirection.Vertical, } }); flowContainer.Children = new Drawable[] { new Box { Origin = Anchor.TopCentre, Anchor = Anchor.TopCentre, Colour = Colour4.Azure, Size = new Vector2(visual_box_size) }, new Container { Alpha = 0.35f, RelativeSizeAxes = Axes.None, Size = new Vector2(visual_box_size), Origin = Anchor.TopCentre, Anchor = Anchor.TopCentre, Children = new Drawable[] { visualBox = new Box { Colour = Colour4.White, RelativeSizeAxes = Axes.Both, }, trackingContainer.LogoFacade, } }, new Box { Origin = Anchor.TopCentre, Anchor = Anchor.TopCentre, Colour = Colour4.Azure, Size = new Vector2(visual_box_size) }, }; }); AddStep("Perform logo movements", () => { trackingContainer.StopTracking(); logo.MoveTo(new Vector2(0.5f), 500, Easing.InOutExpo); visualBox.Colour = Colour4.White; Scheduler.AddDelayed(() => { trackingContainer.StartTracking(logo, 1000, Easing.InOutExpo); visualBox.Colour = Colour4.Tomato; }, 700); }); waitForMove(8); AddAssert("Logo is tracking", () => trackingContainer.IsLogoTracking); }
public virtual void GenerateProc(Schedule schedule, short times, DateTime runDate) { IEnumerable <ScheduleDet> occurrences = new Scheduler(this).MakeSchedule(schedule, times, runDate); ARInvoiceEntry invoiceEntry = CreateGraph(); using (PXTransactionScope transactionScope = new PXTransactionScope()) { foreach (ScheduleDet occurrence in occurrences) { foreach (PXResult <ARInvoice, Customer, CurrencyInfo> scheduledInvoiceResult in PXSelectJoin < ARInvoice, InnerJoin <Customer, On <Customer.bAccountID, Equal <ARInvoice.customerID> >, InnerJoin <CurrencyInfo, On <CurrencyInfo.curyInfoID, Equal <ARInvoice.curyInfoID> > > >, Where < ARInvoice.scheduleID, Equal <Required <ARInvoice.scheduleID> >, And <ARInvoice.scheduled, Equal <True> > > > .Select(this, schedule.ScheduleID)) { invoiceEntry.Clear(); invoiceEntry.customer.Current = scheduledInvoiceResult; ARInvoice scheduledInvoice = scheduledInvoiceResult; CurrencyInfo scheduledInvoiceCurrencyInfo = scheduledInvoiceResult; ARInvoice newInvoice = InsertDocument( invoiceEntry, occurrence, scheduledInvoiceResult, scheduledInvoiceResult, scheduledInvoiceCurrencyInfo); InsertDetails(invoiceEntry, scheduledInvoice, newInvoice); BalanceCalculation.ForceDocumentControlTotals(invoiceEntry, newInvoice); try { invoiceEntry.Save.Press(); } catch { if (invoiceEntry.Document.Cache.IsInsertedUpdatedDeleted) { throw; } } } schedule.LastRunDate = occurrence.ScheduledDate; Running_Schedule.Cache.Update(schedule); } transactionScope.Complete(this); } using (PXTransactionScope ts = new PXTransactionScope()) { Running_Schedule.Cache.Persist(PXDBOperation.Update); ts.Complete(this); } Running_Schedule.Cache.Persisted(false); }
/// <summary> /// 停止任务 /// </summary> public static void Stop() { Scheduler.Shutdown(); }
/// <summary> /// Update the controls. /// </summary> private void UpdateContent() { if (!Visible) { return; } // Update character's 'Adorned Name' and 'Portrait' in case they have changed lblCharName.Text = Character.LabelPrefix + Character.AdornedName; pbCharacterPortrait.Character = Character; lblTotalSkillPoints.Text = string.Format("{0:N0} SP", Character.SkillPoints); FormatBalance(); CCPCharacter ccpCharacter = Character as CCPCharacter; QueuedSkill trainingSkill = Character.CurrentlyTrainingSkill; // Character in training ? We have labels to fill if (Character.IsTraining || (ccpCharacter != null && trainingSkill != null && ccpCharacter.SkillQueue.IsPaused)) { // Update the skill in training label lblSkillInTraining.Text = trainingSkill.ToString(); DateTime endTime = trainingSkill.EndTime.ToLocalTime(); // Updates the time remaining label lblRemainingTime.Text = (ccpCharacter != null && ccpCharacter.SkillQueue. IsPaused) ? "Paused" : trainingSkill.RemainingTime.ToDescriptiveText( DescriptiveTextOptions.IncludeCommas); // Update the completion time lblCompletionTime.Text = (ccpCharacter != null && ccpCharacter.SkillQueue. IsPaused) ? string.Empty : $"{endTime:ddd} {endTime:G}"; // Changes the completion time color on scheduling block string blockingEntry; bool isAutoBlocking; bool isBlocking = Scheduler.SkillIsBlockedAt(endTime, out blockingEntry, out isAutoBlocking); lblCompletionTime.ForeColor = (m_showConflicts && isBlocking && (ccpCharacter == null || ccpCharacter.SkillQueue.Count == 1 || !isAutoBlocking)) ? Color.Red : m_settingsForeColor; // Update the skill queue training time label UpdateSkillQueueTrainingTime(); // Show the training labels m_hasSkillInTraining = true; m_hasCompletionTime = true; m_hasRemainingTime = true; m_hasSkillQueueTrainingTime = true; } else { // Hide the training labels m_hasSkillInTraining = false; m_hasCompletionTime = false; m_hasRemainingTime = false; m_hasSkillQueueTrainingTime = false; } // Adjusts all the controls layout PerformCustomLayout(m_isTooltip); }
protected override void LoadComplete() { base.LoadComplete(); Scheduler.Add(updateTimeWithReschedule); }