public void Request(int i) { var multiContextMetrics = new MultiContextMetrics(_metrics); multiContextMetrics.Run(); for (var j = 0; j < 5; j++) { var multiContextInstanceMetrics = new MultiContextInstanceMetrics(_metrics); multiContextInstanceMetrics.Run(); } using (_timer.NewContext(i.ToString())) // measure until disposed { _someValue *= i + 1; // will be reflected in the gauge _concurrentRequestsCounter.Increment(); // increment concurrent requests counter _totalRequestsCounter.Increment(); // increment total requests counter _meter.Mark(); // signal a new request to the meter _histogramOfData.Update(new Random().Next(5000), "user-value-" + i); // update the histogram with the input data var item = "Item " + new Random().Next(5); _setCounter.Increment(item); _setMeter.Mark(item); // simulate doing some work var ms = Math.Abs(new Random().Next(3000)); Thread.Sleep(ms); _concurrentRequestsCounter.Decrement(); // decrement number of concurrent requests } }
/// <inheritdoc /> public override void UnSubscribe(int id) { if (_clients.TryRemove(id, out _)) { _clientCounter.Decrement(1); } }
public void Decrement() { Precondition(); mCounter.Decrement(); Assert.AreEqual(-1, mCounter.Count); }
/// <summary> /// Fires after each task is complete. /// </summary> /// <param name="t">The t.</param> /// <param name="wir">The work item results</param> private void PostExecuteWorkItemCallback(Task t, object wir) { var possibleState = wir; if (possibleState is StateInformation information && information.Group != null) //if not null, this is a work group { var state = information; DecrementCounter(); DecrementGroup(state.Group); _groups[state.Group].MetricCounter.Decrement(1); _taskCounter.Decrement(_groups[state.Group].GroupInfo.Name, 1); SetWaitHandle(state.Group); }
protected virtual Data GetNextOp() { Data data; if (writeQueue.TryDequeue(out data)) { if (trace.IsEnabled()) { trace.DequeueWriteOp(name); } counterWriteQueue.Decrement(); return(data); } return(Data.Empty); }
public void DequeueReadOp() { opDequeueReadPerSec.Increment(); readQueueLength.Decrement(); }
public void DequeueWriteOp() { opDequeueWritePerSec.Increment(); writeQueueLength.Decrement(); }
/// <summary> /// Handles the specified message. /// </summary> /// <typeparam name="T">the type of the message.</typeparam> /// <param name="workGroup">The work group.</param> /// <param name="message">The message.</param> /// <param name="notifications">The notifications.</param> /// <param name="functionToRun">The function to run.</param> /// <param name="taskFactory">The task factory.</param> /// <returns></returns> public Task HandleAsync <T>(IWorkGroup workGroup, IReceivedMessage <T> message, IWorkerNotification notifications, Action <IReceivedMessage <T>, IWorkerNotification> functionToRun, ITaskFactory taskFactory) where T : class { Guard.NotNull(() => message, message); Guard.NotNull(() => notifications, notifications); Guard.NotNull(() => functionToRun, functionToRun); Guard.NotNull(() => taskFactory, taskFactory); while (true) { //verify that we are not canceling or stopping before trying to queue the item //however, the transport must support rollbacks if (!ShouldHandle(notifications)) { return(null); } if (taskFactory.TryStartNew(state => { WrappedFunction(message, notifications, functionToRun); }, new StateInformation(workGroup), task => { if (task.IsFaulted && task.Exception?.InnerException is OperationCanceledException) { //bubble the cancel exception; the queue will rollback the message if possible throw new OperationCanceledException("user canceled", task.Exception.InnerException); //explicitly throw this } if (task.IsFaulted && task.Exception != null) { //need to throw it throw new DotNetWorkQueueException("Message processing exception", task.Exception.InnerException); } }, out var start).Success()) { try { return(start); } finally { //block here if the scheduler is full try { _waitingOnFreeThreadCounter.Increment(); taskFactory.Scheduler.WaitForFreeThread.Wait(workGroup); } finally { _waitingOnFreeThreadCounter.Decrement(); } } } //block if the scheduler is full try { _waitingOnFreeThreadCounter.Increment(); taskFactory.Scheduler.WaitForFreeThread.Wait(workGroup); } finally { _waitingOnFreeThreadCounter.Decrement(); } } }
public static long Decrement(this ICounter counter) { return(counter.Decrement(1)); }
/// <summary> /// Executing this method will perform all the operations of the sample /// </summary> public static void Run() { // Initialize cache InitializeCache(); // Create or get counter _counter = GetOrCreateCounter(); Console.WriteLine("\n--- Set Counter value ---"); // set counter value long counterValue = _counter.SetValue(10); // output counter value Console.WriteLine("Counter value set to {0}", counterValue); Console.WriteLine(); Console.WriteLine("\n--- Get Counter value ---"); // get counter value counterValue = _counter.Value; // output counter value Console.WriteLine("Counter Value: {0}", counterValue); Console.WriteLine(); Console.WriteLine("\n--- Increment Counter ---"); // increment counter value counterValue = _counter.Increment(); // output counter value Console.WriteLine("Counter value incremented by 1"); Console.WriteLine("New Counter value is {0}", counterValue); Console.WriteLine(); Console.WriteLine("\n--- Increment Counter by value ---"); // increment counter by 5 counterValue = _counter.IncrementBy(5); // output counter value Console.WriteLine("Counter value incremented by {0}", 5); Console.WriteLine("New Counter value is {0}", counterValue); Console.WriteLine(); Console.WriteLine("\n--- Decrement Counter ---"); // decrement counter counterValue = _counter.Decrement(); // output counter value Console.WriteLine("Counter value decremented by 1"); Console.WriteLine("New Counter value is {0}", counterValue); Console.WriteLine(); // decrement counter by value Console.WriteLine("\n--- Decrement Counter by value ---"); // store decremented counter value in local variable counterValue = _counter.DecrementBy(2); // output counter value Console.WriteLine("Counter value decremented by {0}", 2); Console.WriteLine("New Counter value is {0}", counterValue); Console.WriteLine(); // output counter value Console.WriteLine("\n--- Display Counter Value ---"); Console.WriteLine("Counter Value: {0}", _counter.Value); Console.WriteLine(); // Remove the distributed counter from cache Console.WriteLine("\n--- Remove Counter from Cache --- "); _cache.DataTypeManager.Remove(_counterName); Console.WriteLine(); // Dispose the cache once done _cache.Dispose(); }
private void PerformReceive() { fill: // no data to process => read the socket if (socket.ReadBuffer.IsEmpty) { if (LogTraceEnabled) { log.Trace("Read buffer is empty, ask for more."); } socket.ScheduleReceive(success => { if (success) { MarkAsReady(); owner.NeedsIO(this); } else { // this is a soft fail (cannot throw from other thread), // so we requeue for IO and exception will be thrown by Receive() FailMe(new IOException("Failed receiving from " + endpoint)); } }); return; } // process the commands in the readQueue while (readQueue.Count > 0) { // continue filling the previously unfinished response, // or create a new one var response = inprogressResponse ?? CreateResponse(); // continue filling the Response object from the buffer // Read() returns true if further data (IO) is required // (usually when the current response data is larger than the receive buffer size) if (response.Read(socket.ReadBuffer)) { inprogressResponse = response; if (LogTraceEnabled) { log.Trace("Response is not read fully, continue reading from the socket."); } // refill the buffer // TODO if Receive returns synchrously several times, a node with a huge inprogress response can monopolize the IO thread goto fill; } // successfully read a response from the read buffer inprogressResponse = null; var matching = false; while (!matching && readQueue.Count > 0) { var data = readQueue.Peek(); Debug.Assert(!data.IsEmpty); // if the response does not matches the current op, it means it's a // response to later command in the queue, so all commands before it are silent commands // successful silent ops will receive null as response (since we have no real response) // (or we've ran into a bug) matching = data.Op.Handles(response); if (LogTraceEnabled) { log.Trace("Command {0} handles reponse: {1}", data.Op, matching); } // returns false when no more IO is required => command is processed // otherwise continue filling the buffer if (!data.Op.ProcessResponse(matching ? response : null)) { readQueue.Dequeue(); counterReadQueue.Decrement(); counterOpReadPerSec.Increment(); if (trace.IsEnabled()) { trace.DequeueReadOp(name); } if (data.Task != null) { data.Task.TrySetResult(data.Op); } } } response.Dispose(); } // set the node into send mode and requeue for IO Volatile.Write(ref runMode, MODE_SEND); MarkAsReady(); owner.NeedsIO(this); }
private void button_decrement(object sender, RoutedEventArgs e) { counter.Decrement(); }