private void SubscriptionDropped(EventStorePersistentSubscriptionBase sub, SubscriptionDropReason reason, Exception ex) { Live = false; Logger.Write(LogLevel.Info, () => $"Disconnected from subscription. Reason: {reason} Exception: {ex}"); // Todo: is it possible to ACK an event from a reconnection? //if (_toAck.Any()) // throw new InvalidOperationException( // $"Eventstore subscription dropped and we need to ACK {_toAck.Count} more events"); // Need to clear ReadyEvents of events delivered but not processed before disconnect ResolvedEvent e; while (!_waitingEvents.IsEmpty) { Queued.Decrement(Id); QueuedEvents.Decrement(Id); _waitingEvents.TryDequeue(out e); } if (reason == SubscriptionDropReason.UserInitiated) { return; } // Task.Run(Connect, _token); }
public void DecrementTest() { var counter = new Counter(); Assert.AreEqual(0, counter.Value); counter.Decrement(); Assert.AreEqual(-1, counter.Value); counter.Decrement(); Assert.AreEqual(-2, counter.Value); counter.Decrement(); Assert.AreEqual(-3, counter.Value); }
protected void Send(IList <TelemetryEvent> events, bool throwExceptions = false) { if (events.Count == 0) { return; } try { if (events.Count > 1) { var sb = new StringBuilder(); foreach (var telemetryEvent in events) { sb.AppendFormat("{{ \"index\" : {{ \"_index\":\"{0}\", \"_type\":\"{1}\" }} }}\n", BuildIndex(telemetryEvent.PublishDateTime), telemetryEvent.Type); sb.Append(JsonConvert.SerializeObject(telemetryEvent.Data, _jsonSerializerSettings) + "\n"); } var bulkBody = sb.ToString(); using (_requestTimer.NewContext()) { _elasticClient.LowLevel.Bulk <VoidResponse>(bulkBody); } } else { var doc = events[0]; var index = BuildIndex(doc.PublishDateTime); var jsonPayload = JsonConvert.SerializeObject(doc.Data); using (_requestTimer.NewContext()) { _elasticClient.LowLevel.Index <VoidResponse>(index, doc.Type, jsonPayload); } } _eventsInQueueCounter.Decrement(events.Count); _eventsSentMeter.Mark(events.Count); } catch (Exception ex) { _errorsMeter.Mark(events.Count); _eventsInQueueCounter.Decrement(events.Count); _logger.ErrorException("Failed to send events to elastic search", ex); if (throwExceptions) { throw; } } }
private IBatchContainer RemoveLastMessage() { var removedBatchContainer = LastItem.Batch; // Removing the last message _cachedMessages.RemoveLast(); // Some bucket updating var bucket = _cacheCursorHistogram[0]; // same as: var bucket = last.Value.CacheBucket; bucket.UpdateNumItems(-1); if (bucket.NumCurrentItems == 0) { Log(_logger, "TimedQueueCache for QueueId:{0}, RemoveLastMessage: Last bucket is empty, removing it", Id.ToString()); _cacheCursorHistogram.RemoveAt(0); } else { _cacheCursorHistogram[0].OldestMemberTimestamp = LastItem.Timestamp; } _meterCacheEvacuationsPerSecond.Mark(Id.ToString(), 1); _counterMessagesInCache.Decrement(Id.ToString(), 1); return(removedBatchContainer); }
void DecrementCounter(Vector3Int position) { if (!posToCounter.ContainsKey(position)) { return; } Counter counter = posToCounter[position]; if (counter.Value() == 1) { payedUpEvent.Invoke(); posToCounter.Remove(position); } if (counter.Value() > 0) { counter.Decrement(); } if (posToCounter.Count == 0) { int currentLevelIndex = SceneManager.GetActiveScene().buildIndex; if (currentLevelIndex < lastLevel) { PlayerPrefs.SetInt("Progress", currentLevelIndex + 1); } // Level Complete winEvent.Invoke(); screenFader.SetBool("Fading", true); } }
private void QueueTask(Job x) { Interlocked.Increment(ref _processingQueueSize); _queueSize.Increment(); if (_processingQueueSize % 10 == 0 || Logger.IsDebugEnabled) { var eventType = _mapper.GetMappedTypeFor(x.Event.GetType()); var msg = String.Format("Queueing event {0} at position {1}. Size of queue: {2}/{3}", eventType.FullName, x.Position, _processingQueueSize, _maxQueueSize); if (_processingQueueSize % 10 == 0) { Logger.Info(msg); } else { Logger.Debug(msg); } } _processor.Queue(async() => { await Process(x.Event, x.Descriptor, x.Position); _queueSize.Decrement(); Interlocked.Decrement(ref _processingQueueSize); }); }
public void Request(int i) { new MultiContextMetrics().Run(); MultiContextInstanceMetrics.RunSample(); using (_timer.NewContext(i.ToString())) // measure until disposed { _someValue *= (i + 1); // will be reflected in the gauge _concurrentRequestsCounter.Increment(); // increment concurrent requests counter _totalRequestsCounter.Increment(); // increment total requests counter _meter.Mark(); // signal a new request to the meter _histogramOfData.Update(new Random().Next(5000), i.ToString()); // update the histogram with the input data var item = "Item " + new Random().Next(5); _setCounter.Increment(item); _setMeter.Mark(item); // simulate doing some work int ms = Math.Abs((int)(new Random().Next(3000))); Thread.Sleep(ms); _concurrentRequestsCounter.Decrement(); // decrement number of concurrent requests } }
public override void Use(Creature user, Creature target) { int power = Power(user); counter.Decrement(); target.Hit(user, power); }
public async Task Handle(IOwinContext ctx, Func <Task> next) { _activeRequests.Increment(); var httpMethod = ctx.Environment["owin.RequestMethod"].ToString().ToUpper(); if (httpMethod == "POST" || httpMethod == "PUT") { var headers = (IDictionary <string, string[]>)ctx.Environment["owin.RequestHeaders"]; if (headers != null && headers.ContainsKey("Content-Length")) { _payloadSizeHistogram.Update(long.Parse(headers["Content-Length"].First())); } } using (_timer.NewContext()) { await next.Invoke(); } var responseCode = int.Parse(ctx.Environment["owin.ResponseStatusCode"].ToString()); if (responseCode == (int)HttpStatusCode.InternalServerError) { _errorMeter.Mark(); } _activeRequests.Decrement(); }
public bool IsUnderPressure() { // empty cache if (_cachedMessages.Count == 0) { _counterNumberOfCursorsCausingPressure.Decrement(Id.ToString(), _numOfCursorsCausingPressure); _numOfCursorsCausingPressure = 0; return(false); } // no cursors yet - zero consumers basically yet. if (_cacheCursorHistogram.Count == 0) { _counterNumberOfCursorsCausingPressure.Decrement(Id.ToString(), _numOfCursorsCausingPressure); _numOfCursorsCausingPressure = 0; return(false); } // If the cache still has room, no problem of adding if (Size < _maxCacheSize) { _counterNumberOfCursorsCausingPressure.Decrement(Id.ToString(), _numOfCursorsCausingPressure); _numOfCursorsCausingPressure = 0; CalculateMessagesToAdd(); return(false); } // cache is full. Need Check how many cursors we have in the oldest bucket // AND that we don't break our timespan guarantee. var numCursorsInLastBucket = _cacheCursorHistogram[0].NumCurrentCursors; var currentCacheTimespan = DateTime.UtcNow - _cacheCursorHistogram[0].NewestMemberTimestamp; if (numCursorsInLastBucket > 0 || currentCacheTimespan <= _cacheTimeSpan) { _counterNumberOfCursorsCausingPressure.Increment(Id.ToString(), numCursorsInLastBucket - _numOfCursorsCausingPressure); _numOfCursorsCausingPressure = numCursorsInLastBucket; return(true); } // Cache is full yet we can add messages, calculating how many messages we can put _counterNumberOfCursorsCausingPressure.Decrement(Id.ToString(), _numOfCursorsCausingPressure); _numOfCursorsCausingPressure = 0; CalculateMessagesToAdd(); return(false); }
public void ShouldExplicitlyDecrementCounter() { var counter = new Counter(new AtomicCounter(), new CounterMetricName("foo")); counter.Increment(); counter.Increment(); counter.Decrement(2); counter.Current.Should().Be(0); }
public void should_decrement_counter_by_one() { var counter = new Counter(new MetricConfig("counter1"), 10); counter.Decrement(); double count = Testing.Sync(counter, counter.GetMeasure, counter.context_); Assert.That(count, Is.EqualTo(10 - 1)); }
public void should_decrement_counter_by_given_delta() { var counter = new Counter(new MetricConfig("counter1"), 15); counter.Decrement(12); double count = Testing.Sync(counter, counter.GetMeasure, counter.context_); Assert.That(count, Is.EqualTo(15 - 12)); }
private static void Main(string[] args) { var counter = new Counter(1); counter.Increment(); counter.Decrement(); counter.Adjust(2); }
public void Decrement(int n, int expectedCount) { var counter = new Counter(); for (int i = 0; i < n; i++) { counter.Decrement(); } Assert.Equal(expectedCount, counter.Count.Value); }
public override void Use(Creature user, Creature target) { if (!Available) { return; } int power = 1; counter.Decrement(); target.Hit(user, power); user.ApplyBuff <Stunned>(-1); }
public override void OnActionExecuted(HttpActionExecutedContext actionExecutedContext) { if (string.IsNullOrWhiteSpace(_address) || string.IsNullOrWhiteSpace(_db)) { //do nothing.... } else { _tpsRequestsCounter.Decrement(); } base.OnActionExecuted(actionExecutedContext); }
protected static void TestMethod([CallerMemberName] string?name = null) { try { Thread.Sleep(200); Counter.Increment(); Thread.Sleep(200); } finally { Counter.Decrement(); } }
public async Task Shutdown(TimeSpan timeout) { if (_currentCommitTask != null) { await Task.WhenAny(_currentCommitTask, Task.Delay(timeout)); _currentCommitTask = null; _logger.Info("KafkaQueueAdapterReceiver - The receiver had finished a commit and was shutted down"); } CounterActiveReceivers.Decrement(); }
public void CounterTest01() { Counter c = new Counter(1000); int value = c.Increment(); Assert.AreEqual(1001, value); value = c.Increment(); Assert.AreEqual(1002, value); value = c.Decrement(); Assert.AreEqual(1001, value); }
async Task IEventUnitOfWork.End(Exception ex) { if (ex == null) { await Commit(); } else { _errorsMeter.Mark(); } _eventsConcurrent.Decrement(); _timerContext.Dispose(); }
public SnapshotReader(IStoreEvents store, IEventStoreConsumer consumer) { _consumer = consumer; if (Interlocked.CompareExchange(ref _truncating, 1, 0) == 1) { return; } // Writes truncateBefore metadata to snapshot streams to let ES know it can delete old snapshots // its done here so that we actually get the snapshot before its deleted _truncate = Timer.Repeat(async(state) => { var eventstore = state as IStoreEvents; var truncates = TruncateBefore.Keys.ToList(); await truncates.WhenAllAsync(async x => { long tb; if (!TruncateBefore.TryRemove(x, out tb)) { return; } try { await eventstore.WriteMetadata(x, truncateBefore: tb).ConfigureAwait(false); } catch { } }); }, store, TimeSpan.FromMinutes(5), "snapshot truncate before"); _snapshotExpiration = Timer.Repeat(() => { var expired = Snapshots.Where(x => (DateTime.UtcNow - x.Value.Item1) > TimeSpan.FromMinutes(5)).Select(x => x.Key) .ToList(); Tuple <DateTime, ISnapshot> temp; foreach (var key in expired) { if (Snapshots.TryRemove(key, out temp)) { StoredSnapshots.Decrement(); } } return(Task.CompletedTask); }, TimeSpan.FromMinutes(5), "expires snapshots from the cache"); }
public void Setup(BenchmarkContext context) { MsgReceived = context.GetCounter("MsgReceived"); System = ActorSystem.Create("PerfSys", Config); Action <IActorDsl> actor = d => d.ReceiveAny((o, c) => { MsgReceived.Increment(); }); TestActor = System.ActorOf(Props.Create(() => new Act(actor)).WithDispatcher("calling-thread-dispatcher"), "testactor"); // force initialization of the actor TestActor.Tell("warmup"); MsgReceived.Decrement(); }
public async Task Invoke(IDictionary <string, object> environment) { if (PerformMetric(environment)) { _activeRequests.Increment(); await _next(environment); _activeRequests.Decrement(); } else { await _next(environment); } }
public async Task Invoke(HttpContext context) { if (PerformMetric(context)) { _activeRequests.Increment(); await _next(context); _activeRequests.Decrement(); } else { await _next(context); } }
private static void ConfigureNES() { Wireup wireup = Wireup.Init(); //PersistenceWireup persistenceWireup = ConfigureSql(wireup); PersistenceWireup persistenceWireup = ConfigureMongo(wireup); counter.Increment(Iterations); using ( var storeEvents = persistenceWireup.InitializeStorageEngine() .UsingBinarySerialization() .Build()) { storeEvents.Advanced.Purge(); Parallel.For(1, Iterations, new ParallelOptions { MaxDegreeOfParallelism = 4 }, x => { using (timer.NewContext()) { do { try { var streamId = x % 10; var stream = storeEvents.OpenStream("default", streamId.ToString(), 0, int.MaxValue); stream.Add(new EventMessage() { Body = "abc" }); stream.CommitChanges(Guid.NewGuid()); break; } catch (ConcurrencyException ex) { concurrency.Increment(); } } while (true); } counter.Decrement(); }); Console.WriteLine("Press any key to exit..."); Console.ReadLine(); } }
private void ProcessingComplete(FailedLogEntry failedLog) { lock (logsBeingProcessed) { var index = logsBeingProcessed.FindIndex(en => en.ID == failedLog.ID); if (index < 0) { throw new ArgumentException("Failed log does not exist in this registry", "entry"); // Argument name comes from NotifyFailedLogParsed } logsBeingProcessed.RemoveAt(index); logsBeingProcessedCounter.Decrement(); failedLog.LogRegistryNotified(); if (!failedLog.IsEmpty) { storage.AddLogSorted(failedLog); } } }
void OnPeerDisconnected(INetChannel channel) { _netChannelsCounter.Decrement(); _netChannelsDisconnectedMeter.Mark(); TNetChannel savedPeer; if (ChannelsById.TryRemove(channel.TransportChannel.LocalId, out savedPeer)) { if (!ReferenceEquals(savedPeer, channel)) { throw new Exception("WTF! channel in dictionary is not the same as in disconnect event"); } _onNodeDisconnectedSubject.OnNext(savedPeer); _logger.NetChannelDisconnected(this, channel); } }
private void CompleteFire() { if (m_ammoCounter != null) { m_ammoCounter.Decrement(); } var bullet = Instantiate(m_bulletPrefab, transform.position + (Vector3)Velocity.normalized * m_fireOffset, Quaternion.identity); bullet.name = name + " bullet"; var body = bullet.GetComponent <Rigidbody2D>(); body.velocity = Velocity; if (m_destroyOffscreenBullets) { var offscreen = bullet.gameObject.AddComponent <OffScreenTrigger>(); offscreen.OnExitScreen.AddListener(bullet.gameObject.DestroySelf); } if (m_bulletsOnScreenMax > 0) { ++m_bulletsOnScreenCount; var offscreen = bullet.gameObject.GetComponent <OffScreenTrigger>(); if (offscreen == null) { offscreen = bullet.gameObject.AddComponent <OffScreenTrigger>(); } offscreen.OnExitScreen.AddListener(() => { --m_bulletsOnScreenCount; }); var onDestroyed = bullet.gameObject.AddComponent <DestroyedTrigger>(); onDestroyed.OnDestroyed.AddListener(() => { --m_bulletsOnScreenCount; }); } else { Destroy(bullet.gameObject, m_lifeTime); } m_secSinceLastShot = 0.0f; }
static void Main(string[] args) { counterStore = Ops<int>.CreateStore(Counter.Reduce); counterStore.StateChanged += CounterStoreStateChanged; Console.WriteLine("Initial state: " + counterStore.State); // 0 SendAction(Counter.Increment()); // 1 SendAction(Counter.Decrement()); // 0 SendAction(Counter.Increment()); // 1 SendAction(Counter.Increment()); // 2 // Shouldn't output anything. SendAction(new ReduxAction("DUMMY_ACTION")); SendAction(Counter.ChangeBy(-3)); // -1 SendAction(Counter.ChangeBy(5)); // 4 Console.ReadKey(); }