public IActionResult Post([FromBody] SequenceId value) { var sequenceId = Guid.NewGuid().ToString(); Console.WriteLine(value); try { string textKey = "SEQUENCE_" + sequenceId; this.SaveDataToRedis(value, textKey); this.makeEvent(ConnectionMultiplexer.Connect(properties["REDIS_SERVER"]), textKey); } catch (Exception e) { Console.WriteLine(e.Message); } ConnectionMultiplexer redis = ConnectionMultiplexer.Connect(properties["REDIS_SERVER"]); IDatabase sequenceDb = redis.GetDatabase(Convert.ToInt32(properties["SEQUENCE_DB"])); string sequenceResult = null; for (short i = 0; i < 5; ++i) { sequenceResult = sequenceDb.StringGet("SEQUENCE_RESULT_" + value.id); if (sequenceResult == null) { Thread.Sleep(200); } else { return(Ok(sequenceResult)); } } return(new StatusCodeResult(402)); }
public bool Contains(SequenceId value) { int difference = (this.Latest - value); if (difference == 0) return true; return this.historyArray.Contains(difference - 1); }
private async void btnXacNhan_Click(object sender, EventArgs e) { for (int i = 0; i < grvCanBo.RowCount; i++) { CAN_BO canBo = grvCanBo.GetRow(i) as CAN_BO; if (grvCanBo.IsRowSelected(i)) { if (!_lstIdCanBo.Contains(canBo.IdCanBo)) { _db.TRUC_TUAN_CAN_BO.Add(new TRUC_TUAN_CAN_BO() { IdTrucTuan_CanBo = SequenceId.TRUC_TUAN_CAN_BO(), IdTrucTuan = _idTrucTuan, IdCanBo = canBo.IdCanBo }); } } else { var delete = _db.TRUC_TUAN_CAN_BO.Local.FirstOrDefault(p => p.IdCanBo == canBo.IdCanBo); if (delete != null) { _db.TRUC_TUAN_CAN_BO.Remove(delete); } } } await _db.SaveChangesAsync(); this.DialogResult = DialogResult.OK; }
public bool WriteUpdatePayload(IOutOctetStream octetStream) { var canSendUpdatePacket = tendOut.CanIncrementOutgoingSequence; pendingOutSequenceNumberUsed = false; if (canSendUpdatePacket) { outgoingSequenceNumber = outgoingSequenceNumber.Next(); pendingOutSequenceNumber = outgoingSequenceNumber; pendingOutSequenceNumberUsed = true; var tendSequenceId = tendOut.IncreaseOutgoingSequenceId(); WriteHeader(octetStream, NormalMode, outgoingSequenceNumber.Value, connectionId); TendSerializer.Serialize(octetStream, tendIn, tendOut); var now = monotonicClock.NowMilliseconds(); timestampedHistory.SentPacket(outgoingSequenceNumber, now, log); if (useDebugLogging) { log.Trace($"SendOneUpdatePacket: tendSeq{tendSequenceId}"); } return(true); } WritePing(octetStream); return(false); }
/// <summary> /// Removes any acked or expired outgoing events. /// </summary> private void CleanOutgoingEvents(SequenceId ackedEventId) { if (ackedEventId.IsValid == false) { return; } // Stop attempting to send acked events foreach (RailEvent evnt in outgoingEvents) { if (evnt.EventId <= ackedEventId) { evnt.Attempts = 0; } } // Clean out any events with zero attempts left while (outgoingEvents.Count > 0) { if (outgoingEvents.Peek().Attempts > 0) { break; } RailPool.Free(outgoingEvents.Dequeue()); } }
internal void CompareTo(SequenceId sequenceId1, object obj, int expected) { if (obj is SequenceId sequenceId2) { Math.Sign(sequenceId1.CompareTo(sequenceId2)).Should().Be(expected); } }
private void grvXuatCanh_InitNewRow(object sender, DevExpress.XtraGrid.Views.Grid.InitNewRowEventArgs e) { XUAT_CANH_TRAI_PHEP xctp = grvXuatCanh.GetRow(e.RowHandle) as XUAT_CANH_TRAI_PHEP; xctp.ID = SequenceId.XUAT_CANH_TRAI_PHEP(); xctp.ID_NGUOI = NguoiXuatCanh.ID; }
public bool Equals(Asp330SequenceTest that) { if (that is null) { return(false); } if (ReferenceEquals(this, that)) { return(true); } if (!SequenceId.Equals(that.SequenceId)) { return(false); } if (!TestId.Equals(that.TestId)) { return(false); } if (!SequenceName.Equals(that.SequenceName)) { return(false); } if (!TestName.Equals(that.TestName)) { return(false); } if (!TestTableName.Equals(that.TestTableName)) { return(false); } return(true); }
public bool IsNewId(SequenceId id) { if (this.ValueTooOld(id)) return false; if (this.Contains(id)) return false; return true; }
/// <summary> /// Calculates a hash code for the seuqence /// </summary> /// <returns></returns> public override int GetHashCode() { var hash = 17; hash = hash * 23 + SequenceId.GetHashCode(); hash = hash * 23 + MsnFeatureId.GetHashCode(); hash = hash * 23 + DatasetId.GetHashCode(); return(hash); }
private void btnThemTuanMoi_Click(object sender, EventArgs e) { tRUC_TUANBindingSource.Position = tRUC_TUANBindingSource.Add(new TRUC_TUAN() { IdTrucTuan = SequenceId.TRUC_TUAN(), TuNgay = DateTime.Now, DenNgay = DateTime.Now.AddDays(7) }); }
public void SentPacket(SequenceId sequenceId, long time, ILog log) { //log.Info($"sent packet at timen {sequenceId} time {time}"); var p = new PacketHistoryItem { Time = time, SequenceId = sequenceId }; items.Enqueue(p); }
private void btnThemNgayBaoCaoMoi_Click(object sender, EventArgs e) { var ngayBaoCao = new BAO_CAO_DINH_KY_NGAY_BAO_CAO(); ngayBaoCao.Id = SequenceId.BAO_CAO_DINH_KY_NGAY_BAO_CAO(); ngayBaoCao.IdBaoCaoDinhKy = this.Current.IdBaoCaoDinhKy; nGAY_BAO_CAOBindingSource.Position = nGAY_BAO_CAOBindingSource.Add(ngayBaoCao); nGAY_BAO_CAOBindingSource.EndEdit(); }
private void SaveDataToRedis(SequenceId value, String id) { var redisDb = ConnectionMultiplexer.Connect(properties["REDIS_SERVER"]) .GetDatabase(Convert.ToInt32(properties["SEQUENCE_DB"])); string json = JsonConvert.SerializeObject(value); redisDb.StringSet(id, json); Console.WriteLine(id + ": " + value + " - saved to redis SEQUENCE_DB"); }
public void Increment() { var sequenceId1 = new SequenceId(byte.MaxValue); (++sequenceId1).Should().Be(SequenceId.Empty); var sequenceId2 = SequenceId.Empty; (++sequenceId2).Should().Be(new SequenceId(1)); }
public void AddPacket(SequenceId sequenceId, byte[] octets) { var now = monotonicClock.NowMilliseconds(); var snapshot = new PacketPayload(octets, sequenceId, now); lock (packets) { packets.Add(snapshot); } }
private void grvCongVan_InitNewRow(object sender, DevExpress.XtraGrid.Views.Grid.InitNewRowEventArgs e) { CONG_VAN cv = grvCongVan.GetRow(e.RowHandle) as CONG_VAN; if (cv != null) { cv.IdCongVan = SequenceId.CONG_VAN(); cv.NgayNhan = DateTime.Now; } }
public bool Contains(SequenceId value) { int difference = (this.Latest - value); if (difference == 0) { return(true); } return(this.historyArray.Contains(difference - 1)); }
internal void EqualsTest(SequenceId sequenceId1, object obj, bool expected) { if (obj is SequenceId sequenceId2) { sequenceId1.Equals(sequenceId2).Should().Be(expected); (sequenceId1 == sequenceId2).Should().Be(expected); (sequenceId1 != sequenceId2).Should().Be(!expected); sequenceId1.GetHashCode().Equals(sequenceId2.GetHashCode()).Should().Be(expected); } sequenceId1.Equals(obj).Should().Be(expected); }
private void btnThemBaoCaoMoi_Click(object sender, EventArgs e) { var baoCao = new BAO_CAO_DINH_KY(); baoCao.IdBaoCaoDinhKy = SequenceId.BAO_CAO_DINH_KY(); int pos = bAO_CAO_DINH_KYBindingSource.Add(baoCao); _db.SaveChanges(); bAO_CAO_DINH_KYBindingSource.Position = pos; }
public bool IsNewId(SequenceId id) { if (ValueTooOld(id)) { return(false); } if (Contains(id)) { return(false); } return(true); }
private void btnThemCanBo_Click(object sender, EventArgs e) { CAN_BO canBo = new CAN_BO() { IdCanBo = SequenceId.CAN_BO(), TrinhDoLyLuanChinhTri = "Chưa có", TrinhDoNgoaiNgu = "Chưa có", TrinhDoTinHoc = "Chưa có" }; cAN_BOBindingSource.Position = cAN_BOBindingSource.Add(canBo); }
public void Initialize( Tick senderTick, Tick lastAckTick, SequenceId lastAckEventId, IEnumerable <RailEvent> events) { SenderTick = senderTick; LastAckTick = lastAckTick; LastAckEventId = lastAckEventId; pendingEvents.AddRange(events); EventsWritten = 0; }
/// <summary> /// Selects outgoing events to send. /// </summary> private IEnumerable <RailEvent> FilterOutgoingEvents() { // The receiving client can only store a limited size sequence history // of events in its received buffer, and will skip any events older than // its latest received minus that history length, including reliable // events. In order to make sure we don't force the client to skip an // event with attempts remaining, we will throttle the outgoing events // if we've been sending them too fast. For example, if we have a live // event with ID 3 pending, and a maximum history length of 64 (see // RailConfig.HISTORY_CHUNKS) then the highest ID we can send would be // ID 67. Were we to send an event with ID 68, then the client may ignore // ID 3 when it comes in for being too old, even though it's still live. // // In practice this shouldn't be a problem unless we're sending way // more events than is reasonable(/possible) in a single packet, or // something is wrong with reliable event acking. You can always increase // the number of history chunks if this becomes an issue. SequenceId firstId = SequenceId.Invalid; foreach (RailEvent evnt in outgoingEvents) { // Ignore dead events, they'll be cleaned up eventually if (evnt.Attempts <= 0) { continue; } // Don't send an event if it's out of scope for this peer if (Scope != null && Scope.Includes(evnt) == false) { // Skipping due to out of scope counts as an attempt evnt.RegisterSkip(); continue; } if (firstId.IsValid == false) { firstId = evnt.EventId; } RailDebug.Assert(firstId <= evnt.EventId); if (eventHistory.AreInRange(firstId, evnt.EventId) == false) { RailDebug.LogWarning("Throttling events due to lack of ack"); break; } yield return(evnt); } }
private bool Contains(SequenceId value) { int difference = Latest - value; if (difference < 0) { return(false); } if (difference == 0) { return(true); } return(history.Get(difference - 1)); }
/// <summary> /// Queues an event to send directly to this peer (used internally). /// </summary> internal void SendEvent([NotNull] RailEvent evnt, ushort attempts, bool bMakeCopy) { // TODO: Event scoping RailEvent toSend = evnt; if (bMakeCopy) { toSend = evnt.Clone(Resource); } toSend.EventId = lastQueuedEventId; toSend.Attempts = attempts; outgoingEvents.Enqueue(toSend); lastQueuedEventId = lastQueuedEventId.Next; }
private static int ValidateSequenceIds(SequenceId expectedSequenceId, IList <IElement> elements) { int validationCount = 0; foreach (IElement element in elements) { NUnit.Framework.Assert.IsTrue(element is AbstractIdentifiableElement); NUnit.Framework.Assert.IsTrue(element is IAbstractElement); NUnit.Framework.Assert.AreEqual(expectedSequenceId, SequenceIdManager.GetSequenceId((AbstractIdentifiableElement )element)); validationCount += 1; validationCount += ValidateSequenceIds(expectedSequenceId, ((IAbstractElement)element).GetChildren()); } return(validationCount); }
public async Task GetAsync_ExistingSequence_SequenceReturned() { var store = new DefaultSequenceStore(new IntegrationLoggerSubstitute <DefaultSequenceStore>()); var context = ConsumerPipelineContextHelper.CreateSubstitute(sequenceStore: store); await store.AddAsync(new ChunkSequence("aaa", 10, context)); await store.AddAsync(new ChunkSequence("bbb", 10, context)); await store.AddAsync(new ChunkSequence("ccc", 10, context)); var result = await store.GetAsync <ChunkSequence>("bbb"); result.Should().NotBeNull(); result !.SequenceId.Should().Be("bbb"); }
public void Enqueue(SequenceId sequenceId, bool wasReceived) { if (sequenceId.Value != expectedSequenceId.Value) { throw new Exception($"wrong packet notification. Expected {expectedSequenceId} but received {sequenceId}"); } var info = new PacketReceivedNotification { SequenceId = sequenceId, WasReceived = wasReceived }; queue.Enqueue(info); expectedSequenceId = expectedSequenceId.Next(); }
private void Reset() { state = ConnectionState.Idle; lastIncomingSequence = SequenceId.Max; outgoingSequenceNumber = SequenceId.Max; pendingOutSequenceNumber = outgoingSequenceNumber; outSequenceNumber = 0; challengeNonce = 0; connectInfo = new ConnectInfo(); tendIn.Clear(); tendOut.Clear(); incomingPacketBuffer.Clear(); receivedNotifications.Clear(); lastStateChange = monotonicClock.NowMilliseconds(); lastValidHeader = monotonicClock.NowMilliseconds(); lastSentPackets = monotonicClock.NowMilliseconds(); }
private static void HandleSequence(string value, ConnectionMultiplexer redisConnection) { try { SequenceId sId = JsonConvert.DeserializeObject <SequenceId>(value); int grammarDbNumber = Convert.ToInt32(properties["GRAMMAR_DB"]); IDatabase grammarDb = redisConnection.GetDatabase(grammarDbNumber); string grammarStr = grammarDb.StringGet("GRAMMAR_" + sId.id); Grammar grammar = JsonConvert.DeserializeObject <Grammar>(grammarStr); int newGrammarDbNumber = Convert.ToInt32(properties["NEW_GRAMMAR_DB"]); IDatabase newGrammarDb = redisConnection.GetDatabase(newGrammarDbNumber); string newGrammarStr = newGrammarDb.StringGet("NEW_GRAMMAR_" + sId.id); NewGrammar newGrammar = JsonConvert.DeserializeObject <NewGrammar>(newGrammarStr); int mTableDbNumber = Convert.ToInt32(properties["TABLE_M_DB"]); IDatabase mTableDb = redisConnection.GetDatabase(mTableDbNumber); string mTableStr = mTableDb.StringGet("TABLE_M_" + sId.id); MTable mTable = JsonConvert.DeserializeObject <MTable>(mTableStr); SequenceHandler sequenceHandler = new SequenceHandler(grammar.startSymbol, newGrammar.terminals, newGrammar.noTerminals, mTable.mTable); sequenceHandler.Process(sId.sequence); IDatabase redisDb = ConnectionMultiplexer.Connect(properties["REDIS_SERVER"]) .GetDatabase(Convert.ToInt32(properties["SEQUENCE_DB"])); Sequence sequence = sequenceHandler.GetSequence(); sequence.grammarId = "NEW_GRAMMAR_" + sId.id; string json = JsonConvert.SerializeObject(sequence); String newId = "SEQUENCE_RESULT_" + sId.id; redisDb.StringSet(newId, json); Console.WriteLine(newId + ": " + json + " - saved to redis SEQUENCE_DB"); MakeStatisticEvent(newId); } catch (Exception e) { Console.WriteLine(e.Message); } }
public async Task GetAsync_PartialId_SequenceReturnedIfMatchPrefixIsTrue(bool matchPrefix) { var store = new DefaultSequenceStore(new IntegrationLoggerSubstitute <DefaultSequenceStore>()); var context = ConsumerPipelineContextHelper.CreateSubstitute(sequenceStore: store); await store.AddAsync(new ChunkSequence("aaa-123", 10, context)); var result = await store.GetAsync <ChunkSequence>("aaa", matchPrefix); if (matchPrefix) { result.Should().NotBeNull(); result !.SequenceId.Should().Be("aaa-123"); } else { result.Should().BeNull(); } }
public SequenceWindow Store(SequenceId value) { SequenceId latest = this.latest; BitArray64 historyArray = this.historyArray; int difference = this.latest - value; if (difference > 0) { historyArray = this.historyArray.Store(difference - 1); } else { int offset = -difference; historyArray = (this.historyArray << offset).Store(offset - 1); latest = value; } return new SequenceWindow(latest, historyArray); }
private SequenceWindow(SequenceId latest, BitArray64 history) { RailDebug.Assert(latest.IsValid); this.latest = latest; this.historyArray = history; }
public SequenceWindow(SequenceId latest) { RailDebug.Assert(latest.IsValid); this.latest = latest; this.historyArray = new BitArray64(); }
public static bool AreInRange(SequenceId lowest, SequenceId highest) { return (highest - lowest) <= SequenceWindow.HISTORY_LENGTH; }
public bool ValueTooOld(SequenceId value) { return ((this.Latest - value) > SequenceWindow.HISTORY_LENGTH); }