static void Main(string[] args) { ts_priority1 = queueScheduler.ActivateNewQueue(1); ts_priority2 = queueScheduler.ActivateNewQueue(2); QueueValue(1, ts_priority2); QueueValue(2, ts_priority2); QueueValue(3, ts_priority2); QueueValue(4, ts_priority1); QueueValue(5, ts_priority1); QueueValue(6, ts_priority1); Console.ReadLine(); }
private static void CreateTaskScheduler() { if (_mainTaskSheduler != null) { _mainTaskSheduler.QueueIsEmpty += _mainTaskSheduler_QueueIsEmpty; } if (_monoConcurrencyTaskSheduler == null) { _monoConcurrencyTaskSheduler = new QueuedTaskScheduler(TaskScheduler.Default, 1); } //We will never start more thread than the number of Virutal processor on the computer ! int nbrThreads = ThreadsManager._isBoostMode ? _totThread + 4 : _totThread; _mainTaskSheduler = new QueuedTaskScheduler(TaskScheduler.Default, nbrThreads); //Create the Priority queues _highPrioritySchedduler = _mainTaskSheduler.ActivateNewQueue(0); _normalPrioritySchedduler = _mainTaskSheduler.ActivateNewQueue(1); _lowPrioritySchedduler = _mainTaskSheduler.ActivateNewQueue(2); }
public TaskScheduler CreateForLevel(int level) { TaskScheduler queue; if (!_queues.TryGetValue(level, out queue)) { queue = _scheduler.ActivateNewQueue(level); _queues.Add(level, queue); } return(queue); }
private TaskScheduler AcquireScheduler(ScheduledTask task) { TaskScheduler scheduler; if (!_schedulers.TryGetValue(task.Priority, out scheduler)) { scheduler = _scheduler.ActivateNewQueue(task.Priority); TaskFactory factory = new TaskFactory(_cancel.Token, TaskCreationOptions.LongRunning, TaskContinuationOptions.LongRunning, scheduler); _schedulers.TryAdd(task.Priority, scheduler); _factories.TryAdd(scheduler, factory); } return(scheduler); }
public CommandDispatcher(string boundedContext, int threadCount = 1, long failedCommandRetryDelay = 60000) { m_FailedCommandRetryDelay = failedCommandRetryDelay; m_QueuedTaskScheduler = new QueuedTaskScheduler(threadCount); foreach (var value in Enum.GetValues(typeof(CommandPriority))) { m_TaskFactories[(CommandPriority)value] = new TaskFactory( ((CommandPriority)value) == CommandPriority.Normal ? new CurrentThreadTaskScheduler() : m_QueuedTaskScheduler.ActivateNewQueue((int)value)); } m_BoundedContext = boundedContext; }
private TaskScheduler AcquireScheduler(BackgroundTask task) { if (_schedulers.TryGetValue(task.Priority, out var scheduler)) { return(scheduler); } scheduler = _scheduler.ActivateNewQueue(task.Priority); var factory = new TaskFactory(_cancel.Token, TaskCreationOptions.LongRunning, TaskContinuationOptions.LongRunning, scheduler); _schedulers.TryAdd(task.Priority, scheduler); _factories.TryAdd(scheduler, factory); return(scheduler); }
private TaskFactory getTaskFactory(int priority) { if (priority < 0) { throw new ArgumentException("priority should be >0", "priority"); } lock (m_TaskFactories) { TaskFactory factory; if (!m_TaskFactories.TryGetValue(priority, out factory)) { var scheduler = m_TaskScheduler.ActivateNewQueue(priority); factory = new TaskFactory(scheduler); m_TaskFactories.Add(priority, factory); } return(factory); } }
private void CreateEmails() { int threadCount = 0; bool isOperationCancelled = false; try { TaskScheduler taskScheduler = TaskScheduler.Current; List <Task> taskList = new List <Task>(dtRecords.Rows.Count); string threadcnt = txtThreads.Text; if (!int.TryParse(txtThreads.Text, out threadCount)) { MessageBox.Show("Please enter valid thread count."); } cancellationTokenSource = new CancellationTokenSource(); QueuedTaskScheduler qts = new QueuedTaskScheduler(TaskScheduler.Default, threadCount); TaskScheduler pri0 = qts.ActivateNewQueue(priority: 0); int i = 0; List <Task <ThreadResult> > lst = new List <Task <ThreadResult> >(); for (i = 0; i < dtRecords.Rows.Count; i = i + 9) { CreateParallelRequests(pri0, i); } } catch (OperationCanceledException) { isOperationCancelled = true; } catch (Exception ex) { MessageBox.Show(string.Format("Exception occured: {0}\nPlease try again", ex.Message)); } finally { if (isOperationCancelled) { //MessageBox.Show("Operation Cancelled."); } } }
protected void parseSources(ParserFactory factory, IEnumerable <InputDescriptor> sources) { Stopwatch startTime = Stopwatch.StartNew(); Thread.VolatileWrite(ref tokenCount, 0); int sourceCount = 0; int inputSize = 0; #if NET40PLUS BlockingCollection <int> threadIdentifiers = new BlockingCollection <int>(); for (int i = 0; i < NUMBER_OF_THREADS; i++) { threadIdentifiers.Add(i); } ICollection <Task <int> > results = new List <Task <int> >(); QueuedTaskScheduler executorServiceHost = new QueuedTaskScheduler(NUMBER_OF_THREADS); TaskScheduler executorService = executorServiceHost.ActivateNewQueue(); #else ICollection <Func <int> > results = new List <Func <int> >(); #endif foreach (InputDescriptor inputDescriptor in sources) { ICharStream input = inputDescriptor.GetInputStream(); sourceCount++; input.Seek(0); inputSize += input.Size; #if NET40PLUS Task <int> futureChecksum = Task.Factory.StartNew <int>(new Callable_1(input, factory, threadIdentifiers).call, CancellationToken.None, TaskCreationOptions.None, executorService); #else Func <int> futureChecksum = new Callable_1(input, factory).call; #endif results.Add(futureChecksum); } Checksum checksum = new CRC32(); foreach (var future in results) { #if NET40PLUS int value = future.Result; #else int value = future(); #endif if (COMPUTE_CHECKSUM) { updateChecksum(checksum, value); } } #if NET40PLUS executorServiceHost.Dispose(); #endif Console.Out.WriteLine("Total parse time for {0} files ({1} KB, {2} tokens, checksum 0x{3:X8}): {4}ms", sourceCount, inputSize / 1024, Thread.VolatileRead(ref tokenCount), COMPUTE_CHECKSUM ? checksum.Value : 0, startTime.ElapsedMilliseconds); if (sharedLexers.Length > 0) { Lexer lexer = sharedLexers[0]; LexerATNSimulator lexerInterpreter = lexer.Interpreter; DFA[] modeToDFA = lexerInterpreter.atn.modeToDFA; if (SHOW_DFA_STATE_STATS) { int states = 0; int configs = 0; HashSet <ATNConfig> uniqueConfigs = new HashSet <ATNConfig>(); for (int i = 0; i < modeToDFA.Length; i++) { DFA dfa = modeToDFA[i]; if (dfa == null || dfa.states == null) { continue; } states += dfa.states.Count; foreach (DFAState state in dfa.states.Values) { configs += state.configs.Count; uniqueConfigs.UnionWith(state.configs); } } Console.Out.WriteLine("There are {0} lexer DFAState instances, {1} configs ({2} unique), {3} prediction contexts.", states, configs, uniqueConfigs.Count, lexerInterpreter.atn.ContextCacheSize); } } if (RUN_PARSER && sharedParsers.Length > 0) { Parser parser = sharedParsers[0]; // make sure the individual DFAState objects actually have unique ATNConfig arrays ParserATNSimulator interpreter = parser.Interpreter; DFA[] decisionToDFA = interpreter.atn.decisionToDFA; if (SHOW_DFA_STATE_STATS) { int states = 0; int configs = 0; HashSet <ATNConfig> uniqueConfigs = new HashSet <ATNConfig>(); for (int i = 0; i < decisionToDFA.Length; i++) { DFA dfa = decisionToDFA[i]; if (dfa == null || dfa.states == null) { continue; } states += dfa.states.Count; foreach (DFAState state in dfa.states.Values) { configs += state.configs.Count; uniqueConfigs.UnionWith(state.configs); } } Console.Out.WriteLine("There are {0} parser DFAState instances, {1} configs ({2} unique), {3} prediction contexts.", states, configs, uniqueConfigs.Count, interpreter.atn.ContextCacheSize); } int localDfaCount = 0; int globalDfaCount = 0; int localConfigCount = 0; int globalConfigCount = 0; int[] contextsInDFAState = new int[0]; for (int i = 0; i < decisionToDFA.Length; i++) { DFA dfa = decisionToDFA[i]; if (dfa == null || dfa.states == null) { continue; } if (SHOW_CONFIG_STATS) { foreach (DFAState state in dfa.states.Keys) { if (state.configs.Count >= contextsInDFAState.Length) { Array.Resize(ref contextsInDFAState, state.configs.Count + 1); } if (state.IsAcceptState) { bool hasGlobal = false; foreach (ATNConfig config in state.configs) { if (config.ReachesIntoOuterContext) { globalConfigCount++; hasGlobal = true; } else { localConfigCount++; } } if (hasGlobal) { globalDfaCount++; } else { localDfaCount++; } } contextsInDFAState[state.configs.Count]++; } } if (EXPORT_LARGEST_CONFIG_CONTEXTS) { foreach (DFAState state in dfa.states.Keys) { foreach (ATNConfig config in state.configs) { string configOutput = config.ToDotString(); if (configOutput.Length <= configOutputSize) { continue; } configOutputSize = configOutput.Length; writeFile(tmpdir, "d" + dfa.decision + ".s" + state.stateNumber + ".a" + config.Alt + ".config.dot", configOutput); } } } } if (SHOW_CONFIG_STATS && currentPass == 0) { Console.Out.WriteLine(" DFA accept states: {0} total, {1} with only local context, {2} with a global context", localDfaCount + globalDfaCount, localDfaCount, globalDfaCount); Console.Out.WriteLine(" Config stats: {0} total, {1} local, {2} global", localConfigCount + globalConfigCount, localConfigCount, globalConfigCount); if (SHOW_DFA_STATE_STATS) { for (int i = 0; i < contextsInDFAState.Length; i++) { if (contextsInDFAState[i] != 0) { Console.Out.WriteLine(" {0} configs = {1}", i, contextsInDFAState[i]); } } } } } }
static TaskScheduler _ts_pri2; // default priority. static BblTask() { _ts_pri0 = _queuedScheduler.ActivateNewQueue(0); _ts_pri1 = _queuedScheduler.ActivateNewQueue(1); _ts_pri2 = _queuedScheduler.ActivateNewQueue(2); }
void InitDataFlow() { //Create schedulers scheduler = new QueuedTaskScheduler(System.Threading.Tasks.TaskScheduler.Default, PARALLEL_READS); highPriorityScheduler = scheduler.ActivateNewQueue(0); lowPriorityScheduler = scheduler.ActivateNewQueue(1); //create options optionsReadHighP = new ExecutionDataflowBlockOptions { TaskScheduler = highPriorityScheduler, MaxDegreeOfParallelism = PARALLEL_READS, CancellationToken = cancelTokenSrc.Token }; //create options optionsReadHighP = new ExecutionDataflowBlockOptions { TaskScheduler = highPriorityScheduler, MaxDegreeOfParallelism = PARALLEL_READS, CancellationToken = cancelTokenSrc.Token }; optionsReadLowP = new ExecutionDataflowBlockOptions { TaskScheduler = lowPriorityScheduler, MaxDegreeOfParallelism = PARALLEL_READS, CancellationToken = cancelTokenSrc.Token }; optionsWriteBlock = new ExecutionDataflowBlockOptions { CancellationToken = cancelTokenSrc.Token }; optionsBatchBlock = new GroupingDataflowBlockOptions { Greedy = true, CancellationToken = cancelTokenSrc.Token, }; optionsLink = new DataflowLinkOptions { PropagateCompletion = true, }; // CollectionInfoSaver collectionInfoSaver = new CollectionInfoSaver(logger); //create blocks bufferBlockHighP = new BufferBlock <SchedulerJob>(); bufferBlockLowP = new BufferBlock <SchedulerJob>(); highPriorityReadInfoBlock = new TransformBlock <SchedulerJob, CollectionResult>(async(sqlJob) => { if (sqlJob != null) { if (sqlJob.JobUpdater != null) { return(await sqlJob.JobUpdater.UpdateJob(sqlJob)); } } return(null); }, optionsReadHighP); lowPriorityReadInfoBlock = new TransformBlock <SchedulerJob, CollectionResult>(async(sqlJob) => { if (sqlJob != null) { if (sqlJob.JobUpdater != null) { return(await sqlJob.JobUpdater.UpdateJob(sqlJob)); } } return(null); }, optionsReadLowP); batchBlock = new BatchBlock <CollectionResult>(1, optionsBatchBlock); writeInfoBlock = new ActionBlock <CollectionResult[]>(sqlInfoArray => ResultSaver.SaveResults(sqlInfoArray), optionsWriteBlock); //link blocks bufferBlockHighP.LinkTo(highPriorityReadInfoBlock, optionsLink); bufferBlockLowP.LinkTo(lowPriorityReadInfoBlock, optionsLink); highPriorityReadInfoBlock.LinkTo(batchBlock, optionsLink); lowPriorityReadInfoBlock.LinkTo(batchBlock, optionsLink); batchBlock.LinkTo(writeInfoBlock, optionsLink); }
protected void parseSources(ParserFactory factory, IEnumerable<ICharStream> sources) { Stopwatch startTime = Stopwatch.StartNew(); Thread.VolatileWrite(ref tokenCount, 0); int sourceCount = 0; int inputSize = 0; #if NET_4_0 BlockingCollection<int> threadIdentifiers = new BlockingCollection<int>(); for (int i = 0; i < NUMBER_OF_THREADS; i++) threadIdentifiers.Add(i); ICollection<Task<int>> results = new List<Task<int>>(); QueuedTaskScheduler executorServiceHost = new QueuedTaskScheduler(NUMBER_OF_THREADS); TaskScheduler executorService = executorServiceHost.ActivateNewQueue(); #else ICollection<Func<int>> results = new List<Func<int>>(); #endif foreach (ICharStream input in sources) { sourceCount++; input.Seek(0); inputSize += input.Size; #if NET_4_0 Task<int> futureChecksum = Task.Factory.StartNew<int>(new Callable_1(input, factory, threadIdentifiers).call, CancellationToken.None, TaskCreationOptions.None, executorService); #else Func<int> futureChecksum = new Callable_1(input, factory).call; #endif results.Add(futureChecksum); } Checksum checksum = new CRC32(); foreach (var future in results) { #if NET_4_0 int value = future.Result; #else int value = future(); #endif if (COMPUTE_CHECKSUM) { updateChecksum(checksum, value); } } #if NET_4_0 executorServiceHost.Dispose(); #endif Console.Out.WriteLine("Total parse time for {0} files ({1} KB, {2} tokens, checksum 0x{3:X8}): {4}ms", sourceCount, inputSize / 1024, Thread.VolatileRead(ref tokenCount), COMPUTE_CHECKSUM ? checksum.Value : 0, startTime.ElapsedMilliseconds); if (sharedLexers.Length > 0) { Lexer lexer = sharedLexers[0]; LexerATNSimulator lexerInterpreter = lexer.Interpreter; DFA[] modeToDFA = lexerInterpreter.atn.modeToDFA; if (SHOW_DFA_STATE_STATS) { int states = 0; int configs = 0; HashSet<ATNConfig> uniqueConfigs = new HashSet<ATNConfig>(); for (int i = 0; i < modeToDFA.Length; i++) { DFA dfa = modeToDFA[i]; if (dfa == null || dfa.states == null) { continue; } states += dfa.states.Count; foreach (DFAState state in dfa.states.Values) { configs += state.configs.Count; uniqueConfigs.UnionWith(state.configs); } } Console.Out.WriteLine("There are {0} lexer DFAState instances, {1} configs ({2} unique), {3} prediction contexts.", states, configs, uniqueConfigs.Count, lexerInterpreter.atn.GetContextCacheSize()); } } if (RUN_PARSER && sharedParsers.Length > 0) { Parser parser = sharedParsers[0]; // make sure the individual DFAState objects actually have unique ATNConfig arrays ParserATNSimulator interpreter = parser.Interpreter; DFA[] decisionToDFA = interpreter.atn.decisionToDFA; if (SHOW_DFA_STATE_STATS) { int states = 0; int configs = 0; HashSet<ATNConfig> uniqueConfigs = new HashSet<ATNConfig>(); for (int i = 0; i < decisionToDFA.Length; i++) { DFA dfa = decisionToDFA[i]; if (dfa == null || dfa.states == null) { continue; } states += dfa.states.Count; foreach (DFAState state in dfa.states.Values) { configs += state.configs.Count; uniqueConfigs.UnionWith(state.configs); } } Console.Out.WriteLine("There are {0} parser DFAState instances, {1} configs ({2} unique), {3} prediction contexts.", states, configs, uniqueConfigs.Count, interpreter.atn.GetContextCacheSize()); } int localDfaCount = 0; int globalDfaCount = 0; int localConfigCount = 0; int globalConfigCount = 0; int[] contextsInDFAState = new int[0]; for (int i = 0; i < decisionToDFA.Length; i++) { DFA dfa = decisionToDFA[i]; if (dfa == null || dfa.states == null) { continue; } if (SHOW_CONFIG_STATS) { foreach (DFAState state in dfa.states.Keys) { if (state.configs.Count >= contextsInDFAState.Length) { Array.Resize(ref contextsInDFAState, state.configs.Count + 1); } if (state.isAcceptState) { bool hasGlobal = false; foreach (ATNConfig config in state.configs) { if (config.ReachesIntoOuterContext) { globalConfigCount++; hasGlobal = true; } else { localConfigCount++; } } if (hasGlobal) { globalDfaCount++; } else { localDfaCount++; } } contextsInDFAState[state.configs.Count]++; } } if (EXPORT_LARGEST_CONFIG_CONTEXTS) { foreach (DFAState state in dfa.states.Keys) { foreach (ATNConfig config in state.configs) { string configOutput = config.ToDotString(); if (configOutput.Length <= configOutputSize) { continue; } configOutputSize = configOutput.Length; writeFile(tmpdir, "d" + dfa.decision + ".s" + state.stateNumber + ".a" + config.Alt + ".config.dot", configOutput); } } } } if (SHOW_CONFIG_STATS && currentPass == 0) { Console.Out.WriteLine(" DFA accept states: {0} total, {1} with only local context, {2} with a global context", localDfaCount + globalDfaCount, localDfaCount, globalDfaCount); Console.Out.WriteLine(" Config stats: {0} total, {1} local, {2} global", localConfigCount + globalConfigCount, localConfigCount, globalConfigCount); if (SHOW_DFA_STATE_STATS) { for (int i = 0; i < contextsInDFAState.Length; i++) { if (contextsInDFAState[i] != 0) { Console.Out.WriteLine(" {0} configs = {1}", i, contextsInDFAState[i]); } } } } } }
private void CheckForAvailability() { int threadCount = 0; bool isOperationCancelled = false; try { TaskScheduler taskScheduler = TaskScheduler.Current; List <Task> taskList = new List <Task>(dtRecords.Rows.Count); string threadcnt = txtThreads.Text; if (!int.TryParse(txtThreads.Text, out threadCount)) { MessageBox.Show("Please enter valid thread count."); } cancellationTokenSource = new CancellationTokenSource(); QueuedTaskScheduler qts = new QueuedTaskScheduler(TaskScheduler.Default, threadCount); TaskScheduler pri0 = qts.ActivateNewQueue(priority: 0); int i = 0; List <Task <ThreadResult> > lst = new List <Task <ThreadResult> >(); for (i = 0; i < dtRecords.Rows.Count; i++) { string domain = !string.IsNullOrWhiteSpace(Convert.ToString(dtRecords.Rows[i]["domain"])) ? Convert.ToString(dtRecords.Rows[i]["domain"]) : Convert.ToString(dtRecords.Rows[i][1]); string dns1 = !string.IsNullOrWhiteSpace(Convert.ToString(dtRecords.Rows[i]["dns1"])) ? Convert.ToString(dtRecords.Rows[i]["dns1"]) : Convert.ToString(dtRecords.Rows[i][2]); string dns2 = !string.IsNullOrWhiteSpace(Convert.ToString(dtRecords.Rows[i]["dns2"])) ? Convert.ToString(dtRecords.Rows[i]["dns2"]) : Convert.ToString(dtRecords.Rows[i][3]); string proxy = !string.IsNullOrWhiteSpace(Convert.ToString(dtRecords.Rows[i]["proxy"])) ? Convert.ToString(dtRecords.Rows[i]["proxy"]) : Convert.ToString(dtRecords.Rows[i][4]); int index = i; if (i % 9 == 0) { CreateWebRequest(dtRecords, lstEmails, index, domain, dns1, dns2, proxy, cancellationTokenSource.Token); } else { Task.Factory.StartNew <ThreadResult>(() => CreateWebRequest(dtRecords, lstEmails, index, domain, dns1, dns2, proxy, cancellationTokenSource.Token), CancellationToken.None, TaskCreationOptions.None, pri0); } } } catch (OperationCanceledException) { isOperationCancelled = true; } catch (Exception ex) { MessageBox.Show(string.Format("Exception occured: {0}\nPlease try again", ex.Message)); } finally { if (isOperationCancelled) { //MessageBox.Show("Operation Cancelled."); } } }