public string Build(AbstractJob job, TempFilePaths paths) { var inventory = _inventoryManager.GetCurrent(); return(job switch { SpeedStatJob ssj => $"{_force}-b -m {ssj.HashTypeId} --machine-readable", TemplateBruteforceJob tmj => $"{_force}{_options} --keyspace -a 3 " + BuildAttackConfiguration(tmj), TemplateWordListJob twl => $"{_force}{_options} --keyspace {BuildRule(twl.RuleId, inventory)}" + $" \"{Path.Combine(_workedFolders.WordlistPath, inventory.Map[twl.WordlistId].Name)}\"", HashListJob hlj => $"{_force}--left -m {hlj.HashTypeId} {BuildFilePaths(paths)}", BruteforceJob bfj => $"{_force}{_options} --skip={bfj.Skip} --limit={bfj.Limit} -m {bfj.HashTypeId} " + $" --outfile=\"{paths.OutputFile}\" " + $"{BuildFilePaths(paths)} -a 3 " + BuildAttackConfiguration(bfj), WordListJob wlj => $"{_force}{_options} --skip={wlj.Skip} --limit={wlj.Limit} -m {wlj.HashTypeId} " + BuildRule(wlj.RuleId, inventory) + $" --outfile=\"{paths.OutputFile}\" " + $"{BuildFilePaths(paths)} \"{Path.Combine(_workedFolders.WordlistPath, inventory.Map[wlj.WordlistId].Name)}\"", _ => throw new InvalidOperationException($"Can't build hascat arguments for {job}") });
public void FaultyMiddleJobExecution() { var jobList = new AbstractJob[] { new JobsHelper.JobA(), new JobsHelper.JobB(true), new JobsHelper.JobC(), new JobsHelper.JobD(), new JobsHelper.JobE(), new JobsHelper.JobF(), }; var jobs = RunJobsCommand.BuildJobsDict(jobList); var tasks = RunJobsCommand.ScheduleJobs(jobs); RunJobsCommand.WaitAll(tasks); var expected = new[] { TaskStatus.RanToCompletion, TaskStatus.Faulted, TaskStatus.RanToCompletion, TaskStatus.RanToCompletion, TaskStatus.RanToCompletion, TaskStatus.RanToCompletion, }; for (var i = 0; i < expected.Count(); i++) { Assert.Equal(expected[i], tasks[jobList[i].Id()].Status); } }
public virtual void QueueJob(AbstractJob job) { if (OutlookSession.OutlookProcessor != null) { OutlookSession.OutlookProcessor.QueueJob(_priority, job); } }
public IJobHandler BuildHashList(AbstractJob job) => new HashListJobHandler(_krakerApi, _agentIdManager.GetCurrent().Id, _workedFolders.TempFolderPath, _tempFileManager, job as HashListJob, _executorBuilder);
public IJobHandler BuildBruteforce(AbstractJob job) => new BruteforceJobHandler(_krakerApi, _workedFolders.TempFolderPath, _tempFileManager, _agentIdManager.GetCurrent().Id, _speedCalculator, _logger, job as BruteforceJob, _executorBuilder);
public AbstractActor() { RemoveStates = new Dictionary <string, Dictionary <string, AbstractState> >(); Stats = new Attribute(); MainHand = new AbstractWeapon(); Helmet = new AbstractArmor(); Body = new AbstractArmor(); Feet = new AbstractArmor(); Ring = new AbstractArmor(); Necklace = new AbstractArmor(); Job = new AbstractJob(); AbstractAbility aux = new AbstractAbility(); }
public void SuccessfulJobExecution() { var jobList = new AbstractJob[] { new JobsHelper.JobA(), new JobsHelper.JobB(), new JobsHelper.JobC(), new JobsHelper.JobD(), new JobsHelper.JobE(), }; var jobs = RunJobsCommand.BuildJobsDict(jobList); var tasks = RunJobsCommand.ScheduleJobs(jobs); RunJobsCommand.WaitAll(tasks); foreach (var task in tasks.Values) { Assert.Equal(TaskStatus.RanToCompletion, task.Status); } }
private void DoImportDB(IMirandaDB db) { IMConversationsManager convManager = new IMConversationsManager( ResourceTypes.MirandaConversation, "Miranda Conversation", "Subject", IniSettings.ConversationPeriodTimeSpan, Props.MirandaAcct, Props.FromAccount, Props.ToAccount, null); MirandaImportJob importJob = new MirandaImportJob("", convManager, null); importJob.ImportDB(db); while (true) { AbstractJob job = importJob.GetNextJob(); if (job == null) { break; } job.NextMethod.Invoke(); } importJob.EnumerationFinished(); }
internal TextIndexManager() : base(false) { _processPendingDocsDelegate = new DelegateJob("Indexing documents", new MethodInvoker(ProcessPendingDocs), new object[] {}); if (Core.ResourceStore.PropTypes.Exist("QueuedForIndexing")) { Core.ResourceStore.PropTypes.Delete(Core.ResourceStore.PropTypes["QueuedForIndexing"].Id); } _statusWriter = Core.UIManager.GetStatusWriter(typeof(FullTextIndexer), StatusPane.UI); _isJobTraceSuppressed = Core.SettingStore.ReadBool("TextIndexing", "SuppressJobTraces", false); _textIndexer = new FullTextIndexer(); _textIndexer.IndexLoaded += IndexLoadedNotification; Reenterable = false; ThreadName = "TextIndex AsyncProcessor"; ThreadPriority = System.Threading.ThreadPriority.BelowNormal; ThreadStarted += TextIndexProcessor_ThreadStarted; Core.PluginLoader.RegisterResourceTextProvider(null, new AnnotationTextIndexProvider()); Core.PluginLoader.RegisterResourceTextProvider(null, new TitleTextIndexProvider()); // Register predefined search providers CurrentSearchProvider = new OmeaGlobalSearchProvider(); RegisterSearchProvider(CurrentSearchProvider, "Omea Search", _cStandardProvidersGroupName); RegisterSearchProvider(new OmeaQuickSearchProvider(), "Local Search", _cStandardProvidersGroupName); DefragmentIndexJob._textIndexManager = IndexingJob._textIndexManager = this; SetupDefragmentationQueue(); _switchToIdleJob = new SwitchToIdleModeJob(this); QueueSwitchToIdleModeJob(); Core.UIManager.RegisterIndicatorLight("Text Index Manager", this, 30, MainFrame.LoadIconFromAssembly("textindex_idle.ico"), MainFrame.LoadIconFromAssembly("textindex_busy.ico"), MainFrame.LoadIconFromAssembly("textindex_stuck.ico")); }
public void QueueIdleJob(JobPriority priority, AbstractJob uow) { // TODO: Add MockAsyncProcessor.QueueIdleUnitOfWork implementation }
void IAsyncProcessor.QueueIdleJob(AbstractJob uow) { // TODO: Add MockAsyncProcessor.OmniaMea.OpenAPI.IAsyncProcessor.QueueIdleUnitOfWork implementation }
bool IAsyncProcessor.QueueJob(AbstractJob uow) { // TODO: Add MockAsyncProcessor.OmniaMea.OpenAPI.IAsyncProcessor.QueueJob implementation return(false); }
void IAsyncProcessor.QueueJobAt(DateTime when, AbstractJob uow) { // TODO: Add MockAsyncProcessor.OmniaMea.OpenAPI.IAsyncProcessor.QueueJobAt implementation }
public static bool CancelJob(AbstractJob job) // returns true - cancel, returns false - do not cancel { return(job is DownloadEnclosure); }
public void RunJob(AbstractJob uow) { // TODO: Add MockAsyncProcessor.RunUnitOfWork implementation }
public IJobHandler BuildWordlist(AbstractJob job) => new WordListJobHandler(job as WordListJob, _krakerApi, _tempFileManager, _workedFolders.TempFolderPath, _executorBuilder, _agentIdManager.GetCurrent().Id, _speedCalculator, _logger);
public IJobHandler Get(AbstractJob job) => _map.TryGetValue(job.Type, out var handlerBuilder) ? handlerBuilder(job) : throw new InvalidOperationException($"Try to work with job {job}");
public RunJobInProcessor(AsyncProcessor proc, AbstractJob job) { _proc = proc; _job = job; }
public override void QueueJob(AbstractJob job) { OutlookSession.OutlookProcessor.QueueJobAt(DateTime.Now.AddSeconds(_seconds), job); }
public bool DoFilter(AbstractJob job) { return(job is DelegateJob && ((DelegateJob)job).Method.Equals(_method)); }
public void Initialize(ref AbstractJob job) { SelectedJob = job; Init(); }
public void CorrectObjects(AbstractJob job) { var processedJob = job.SerializeAndDeserialize(); Assert.That(processedJob, Is.EqualTo(job)); }
public void CancelTimedJobs(AbstractJob uow) { // TODO: Add MockAsyncProcessor.CancelTimedUnitsOfWork implementation }
public IJobHandler BuildSpeedStat(AbstractJob job) => new SpeedstatsJobHandler(_krakerApi, _speedCalculator, _agentIdManager.GetCurrent().Id, _executorBuilder, job as SpeedStatJob);
public void RunUniqueJob(AbstractJob uow) { }
public IJobHandler BuildIncorrect(AbstractJob job) => new IncorrectJobHandler(job as IncorrectJob);
private void proc_JobQueued(object sender, AbstractJob job) { _jobCount--; }
protected override void Execute() { GroupStarting(); try { AbstractJob[] currentJobs = new AbstractJob[_numberOfSimultaneousJobs]; AbstractJob nextJob = GetNextJob(); if (nextJob == null) { return; } while (!Interrupted) { /** * get missing number of jobs and check there are jobs to continue */ bool finished = true; for (int i = 0; i < currentJobs.Length; ++i) { AbstractJob job = currentJobs[i]; if (job != null) { finished = false; } else if ((job = nextJob) != null) { Processor.QueueJob(ReenteringPriority, currentJobs[i] = job); nextJob = GetNextJob(); finished = false; } } if (finished) { break; } bool doJobs = true; while (!Interrupted && doJobs) { DoJobs(); /** * search for finished jobs * if at least one job finished, exit DoJobs loop */ for (int i = 0; i < currentJobs.Length; ++i) { AbstractJob job = currentJobs[i]; if (job != null && job.NextWaitHandle == null) { /** * job is finished */ currentJobs[i] = null; doJobs = false; } } } } } finally { if (NextWaitHandle == AsyncProcessor._nullHandle) { GroupFinished(); } } }
public JobRunner(AbstractJob job, AsyncProcessor proc) { _job = job; _proc = proc; }
public IJobHandler BuildTemplate(AbstractJob job) => new TemplateJobHandler(job as TemplateJob, _krakerApi, _executorBuilder, _agentIdManager.GetCurrent().Id);