public override GameState GetMessage(MjsonMessageAll msgobj) { if (msgobj.IsNONE()) { getMsgList.Add(msgobj); if (getMsgList.Count < Constants.PLAYER_NUM) { return(this); } //受け継いだメッセージ内に4人共アクションがない場合 if (prevMsgList.Count(e => e.IsNONE()) == Constants.PLAYER_NUM) { //終局判定 if (serverController.CanFinishKyoku()) { serverController.Ryukyoku(); getMsgList.Dispose(); return(new AfterRyukyokuState(this)); } else { //ツモ状態へ移行 serverController.Tsumo(); return(new AfterTsumoState(this)); } } //hora else if (prevMsgList.Count(e => e.IsDAIMINKAN()) == 1) { var daiminkanObj = prevMsgList.First(e => e.IsDAIMINKAN()); serverController.Daiminkan(daiminkanObj.actor, daiminkanObj.target, daiminkanObj.pai, daiminkanObj.consumed); prevMsgList.Dispose(); return(new AfterKanState(this)); } else if (prevMsgList.Count(e => e.IsPON()) == 1) { var ponObj = prevMsgList.First(e => e.IsPON()); serverController.Pon(ponObj.actor, ponObj.target, ponObj.pai, ponObj.consumed); prevMsgList.Dispose(); return(new AfterTsumoState(this)); } else if (prevMsgList.Count(e => e.IsCHI()) == 1) { var chiObj = prevMsgList.First(e => e.IsCHI()); serverController.Chi(chiObj.actor, chiObj.target, chiObj.pai, chiObj.consumed); prevMsgList.Dispose(); return(new AfterTsumoState(this)); } } else { //errorhandling serverController.SendErrorToRoomMember(msgobj); } return(this); }
public string SolvePart1(IEnumerable <string> inputs) { var machine = new IntCodeMachine(inputs.First()); var maxThrusterSignal = 0; foreach (var phaseSettings in GetPhaseSettings(0, 1, 2, 3, 4)) { var outputSignal = 0; foreach (var phaseSetting in phaseSettings) { var input = new BlockingCollection <int>(new ConcurrentQueue <int>(new[] { phaseSetting, outputSignal })); var output = new BlockingCollection <int>(); machine.Execute(input, output); outputSignal = output.First(); } if (outputSignal > maxThrusterSignal) { maxThrusterSignal = outputSignal; } } return(maxThrusterSignal.ToString()); }
public void Publish_Where_Channel_Publication_Fails_Results_In_A_Non_Completed_Publication_Task_Which_Is_Placed_Back_On_The_Publication_Queue() { var _connection = Substitute.For<IConnection>(); var _channel = Substitute.For<IModel>(); var _publicationQueue = new BlockingCollection<Publication>(); _connection.CreateModel().Returns(_channel); _channel .When(channel => channel.BasicPublish(Arg.Any<string>(), Arg.Any<string>(), Arg.Any<IBasicProperties>(), Arg.Any<byte[]>())) .Do(callInfo => { throw new ApplicationException("Bang !"); }); var _messageDelivery = new MessageDelivery("EXCHANGE", typeof(MyEvent).Name, MessageDeliveryMode.Persistent, message => "ARoutingKey"); var _myEvent = new MyEvent(Guid.NewGuid(), "CorrlationId_1", "Detail", 1); var _taskCompletionSource = new TaskCompletionSource<PublicationResult>(); var _publication = new Publication(_messageDelivery, _myEvent, _taskCompletionSource); var _SUT = new Publisher(_connection, _publicationQueue, CancellationToken.None); var _publisherTask = _SUT.Start(); _publicationQueue.Add(_publication); try { _publisherTask.Wait(); } catch { } Assert.IsFalse(_publication.ResultTask.IsCompleted); Assert.AreSame(_publication, _publicationQueue.First()); }
public void Connected_UnsubscribeRequestAdded() { BlockingCollection <MyRequest> requestCollection = null; this.SetupRequestSenderMocks((_0, pendingRequests, _1) => { requestCollection = pendingRequests; }); this.SetupReceiverMocks(); this.SetupDispatcherMocks(); this._manager.OnConnected(null, EventArgs.Empty); var subscription = new MySubscription(null); this._subscriptions.Add(subscription.QueryID, subscription); this._manager.RemoveSubscription(subscription.QueryID); var request = requestCollection.First(); Assert.Equal(MyRequestType.Unsubscribe, request.Type); Assert.Same(subscription, request.Subscription); }
public void Publish_Where_Channel_Publication_Fails_Results_In_A_Non_Completed_Publication_Task_Which_Is_Placed_Back_On_The_Publication_Queue() { var _connection = Substitute.For <IConnection>(); var _channel = Substitute.For <IModel>(); var _publicationQueue = new BlockingCollection <Publication>(); _connection.CreateModel().Returns(_channel); _channel .When(channel => channel.BasicPublish(Arg.Any <string>(), Arg.Any <string>(), Arg.Any <IBasicProperties>(), Arg.Any <byte[]>())) .Do(callInfo => { throw new ApplicationException("Bang !"); }); var _messageDelivery = new MessageDelivery("EXCHANGE", typeof(MyEvent).Name, MessageDeliveryMode.Persistent, message => "ARoutingKey"); var _myEvent = new MyEvent(Guid.NewGuid(), "CorrlationId_1", "Detail", 1); var _taskCompletionSource = new TaskCompletionSource <PublicationResult>(); var _publication = new Publication(_messageDelivery, _myEvent, _taskCompletionSource); var _SUT = new Publisher(_connection, _publicationQueue, CancellationToken.None); var _publisherTask = _SUT.Start(); _publicationQueue.Add(_publication); try { _publisherTask.Wait(); } catch { } Assert.IsFalse(_publication.ResultTask.IsCompleted); Assert.AreSame(_publication, _publicationQueue.First()); }
public void SendException() { // Setup: Create a blocking collection to collect the output var bc = new BlockingCollection <Message>(new ConcurrentQueue <Message>()); // If: I write an error as an exception with the request context const string errorMessage = "error"; var e = new Exception(errorMessage); var rc = new RequestContext <CommonObjects.TestMessageContents>(CommonObjects.RequestMessage, bc); rc.SendError(e); // Then: // ... The message writer should have sent an error Assert.Single(bc); var firstMessage = bc.First(); Assert.Equal(MessageType.ResponseError, firstMessage.MessageType); // ... The error object it built should have the reuired fields set var contents = firstMessage.GetTypedContents <Error>(); Assert.Equal(e.HResult, contents.Code); Assert.Equal(errorMessage, contents.Message); }
protected void RunBatching() { int maxBatchingSize = config.MaxMessageBatchingSize; while (true) { if (Cts.IsCancellationRequested) { return; } var mlist = new List <T>(); try { T firstRequest = requestQueue.Take(); mlist.Add(firstRequest); while (requestQueue.Count != 0 && mlist.Count < maxBatchingSize && requestQueue.First().IsSameDestination(firstRequest)) { mlist.Add(requestQueue.Take()); } } catch (InvalidOperationException) { Log.Info(ErrorCode.Runtime_Error_100312, "Stop request processed"); break; } #if TRACK_DETAILED_STATS if (StatisticsCollector.CollectQueueStats) { foreach (var request in mlist) { queueTracking.OnDeQueueRequest(request); } } if (StatisticsCollector.CollectThreadTimeTrackingStats) { threadTracking.OnStartProcessing(); } #endif ProcessBatch(mlist); #if TRACK_DETAILED_STATS if (StatisticsCollector.CollectThreadTimeTrackingStats) { threadTracking.OnStopProcessing(); threadTracking.IncrementNumberOfProcessed(mlist.Count); } #endif } }
public void SendEvent() { // Setup: Create a blocking collection to collect the output var bc = new BlockingCollection <Message>(new ConcurrentQueue <Message>()); // If: I write an event with the request context var rc = new RequestContext <CommonObjects.TestMessageContents>(CommonObjects.RequestMessage, bc); rc.SendEvent(CommonObjects.EventType, CommonObjects.TestMessageContents.DefaultInstance); // Then: The message writer should have sent an event Assert.Single(bc); Assert.Equal(MessageType.Event, bc.First().MessageType); }
private T MapFromCache <T>(DbDataReader reader, T entity) { EntityColumnSchema entitySchema = cachedSchemas.First(schema => schema.EntityType == typeof(T)); string[] columnNames = Enumerable.Range(0, reader.FieldCount) .Select(reader.GetName).ToArray(); foreach (ColumnSchema column in entitySchema.Columns) { if (columnNames.Contains(column.Name)) { column.Property.SetValue(entity, reader[column.Name]); } } return(entity); }
public int drainTo <T>(BlockingCollection <T> blockingCollection, IList <T> toDrain) { while (blockingCollection.Count > 0) { T item; blockingCollection.TryTake(out item); } int countItems = 0; while (blockingCollection.Count > 0) { T item; toDrain.Add(blockingCollection.First <T>()); blockingCollection.TryTake(out item); countItems++; } return(countItems); }
public void Feeder_AddToDownloadQueue() { BlockingCollection <ParentLink> _newLinks = new BlockingCollection <ParentLink>(new ConcurrentQueue <ParentLink>(), 1000); BlockingCollection <ParentLink> _downloadQueue = new BlockingCollection <ParentLink>(new ConcurrentQueue <ParentLink>(), 1000); BlockingCollection <DownloadResult> _downloadResults = new BlockingCollection <DownloadResult>(new ConcurrentQueue <DownloadResult>(), 10); var seedParentLink = new ParentLink(Const.SEED, null); var feeder = new Feeder(_newLinks, _downloadQueue, 1, 1, new MockProgess()); feeder.Start(); _newLinks.Add(seedParentLink); Thread.Sleep(40); Assert.IsTrue(_newLinks.Count == 0); Assert.AreEqual(_downloadQueue.First(), seedParentLink); _downloadQueue.Take(); feeder.Stop(); Thread.Sleep(1000); }
private DbParameter[] BuildFromCache(object parametersObject) { EntityParameterSchema entitySchema = cachedSchemas .First(schema => schema.EntityType == parametersObject.GetType()); DbParameter[] parameters = new DbParameter[entitySchema.Parameters.Count]; for (int i = 0; i < entitySchema.Parameters.Count; i++) { DbParameter parameter = parameterFactory(); parameter.ParameterName = entitySchema.Parameters[i].Name; parameter.DbType = entitySchema.Parameters[i].Type; parameter.Value = entitySchema.Parameters[i].Property.GetValue(parametersObject); parameters[i] = parameter; } return(parameters); }
public async Task SaveAsync_ShouldPlaceMimeMessageInQueue_WhenCorrectlyParsed() { // Arrange var logger = new Mock <ILogger <SmtpServerBackgroundService> >(); var messageQueue = new BlockingCollection <IMimeMessage>(); var messageStore = new SimpleMessageStore(logger.Object, messageQueue); var message = new MimeMessage(new[] { new MailboxAddress(Encoding.UTF8, "Some sender", "*****@*****.**") }, new[] { new MailboxAddress(Encoding.UTF8, "Some recipient", "*****@*****.**") }, "subject", new TextPart(new TextFormat())); await using var stream = new MemoryStream(); await message.WriteToAsync(FormatOptions.Default, stream, CancellationToken.None); // Act await messageStore.SaveAsync(new Mock <ISessionContext>().Object, new Mock <IMessageTransaction>().Object, new ReadOnlySequence <byte>(stream.GetBuffer()), CancellationToken.None); // Assert messageQueue.Should().HaveCount(1); messageQueue.First().Address.Should().Be("*****@*****.**"); }
public T First() { if (Constants.USE_BLOCKING_COLLECTION) { return(queue.First()); } while (true) { bool lockTaken = false; try { Monitor.Enter(lockable, ref lockTaken); { if (list.Count > 0) { return(list[0]); } if (IsAddingCompleted) { throw new InvalidOperationException("IsAddingCompleted and the queue is empty."); } Monitor.Wait(lockable); continue; // loop and try again. } } finally { if (lockTaken) { Monitor.Exit(lockable); } } } }
static Block Mine(List <Block> tempBlocks, Hash.Hash hashuok, int triesAllowed, int timeAllowed) { Object lockMe = new Object(); Console.WriteLine("Limitations - Tries: " + triesAllowed + " Time: " + timeAllowed + " ms"); //Block minedblock = new Block(); BlockingCollection <Block> minedblock = new BlockingCollection <Block>(); CancellationTokenSource cts = new CancellationTokenSource(); string hashVal = ""; ParallelOptions po = new ParallelOptions(); po.MaxDegreeOfParallelism = numberOfCores; po.CancellationToken = cts.Token; var watch = System.Diagnostics.Stopwatch.StartNew(); //guessing Hash Parallel.ForEach(tempBlocks, (Block temp, ParallelLoopState state) => { for (int i = 0; i < triesAllowed; i++) { if (state.ShouldExitCurrentIteration) { //state.Break(); } if (watch.ElapsedMilliseconds > timeAllowed) { //state.Break(); } temp.Nonce += 1; string baseStr = temp.PrevBlockHash + temp.Date + temp.Version + temp.MerkelRootHash + temp.Nonce + temp.DifficultyTarget + temp.Nonce; hashuok.Value = baseStr; temp.Hash = hashuok.FingerPrint; if (temp.Hash.StartsWith(temp.DifficultyTarget)) { //lock (lockMe) //{ //minedblock = temp; minedblock.Add(temp); //} //Console.WriteLine("Temp block that has been mined hash: " + temp.Hash); hashVal = temp.Hash; state.Break(); } } } ); if (minedblock.Count != 0) { Console.WriteLine("YOU HAVE MINED A BLOCK"); //Console.WriteLine("Blocks hash was: " + minedblock.First().Hash); Console.WriteLine("Blocks hash was: " + hashVal); return(minedblock.First()); } else { return(new Block()); } //return minedblock; }
public SlotMaster(AmazonDrive Drive, FileMetadata_Info downitem, CancellationToken ct = default(CancellationToken)) { cts = CancellationTokenSource.CreateLinkedTokenSource(cts_internal.Token, ct); this.Drive = Drive; targetItem = downitem; lastslot = ((downitem.OrignalLength ?? 0) - 1) / AmazonDriveStreamConfig.slotsize; lockslot1 = AmazonDriveStreamConfig.lockslotfirstnum; lockslot2 = lastslot - AmazonDriveStreamConfig.lockslotlastnum; if (lockslot2 < lockslot1) { lockslot2 = lockslot1; } int extraslot = 0; Task.Run(() => { foreach (var newitem in SlotBuffer.GetConsumingEnumerable(cts.Token)) { try { slot.GetOrAdd(newitem.Key, newitem.Value); while (slot.Count > AmazonDriveStreamConfig.slotbacklog + extraslot) { Task.Delay(100, cts.Token).Wait(cts.Token); } if (cts.Token.IsCancellationRequested) { return; } } catch { } } }, cts.Token); Task.Run(() => { while (!cts.Token.IsCancellationRequested) { try { const int slotnumc = AmazonDriveStreamConfig.slotbacklog; // slotが多すぎるのでいらないものから消す if (slot.Count > slotnumc) { var pos = slot .OrderByDescending(x => x.Value.Age) .OrderBy(x => x.Value.ReadAge) .First().Key; var s = StartLock; if (s != null) { pos = s.Value; } //Config.Log.LogOut(string.Format("AmazonDriveStream : Removing slots current pos {0}", pos)); var deleteitem = slot .Where(x => !(x.Key >= StartLock && x.Key <= EndLock)) .Where(x => x.Key > lockslot1 && x.Key < lockslot2); deleteitem = deleteitem .Where(x => x.Key <pos - AmazonDriveStreamConfig.slotkeepold || x.Key> pos + AmazonDriveStreamConfig.slotbacklog * 2) .OrderByDescending(x => x.Value.ReadAge) .Take(slot.Count - slotnumc).ToArray(); foreach (var item in deleteitem) { MemoryStreamSlot o; if (slot.TryRemove(item.Key, out o)) { //Config.Log.LogOut(string.Format("AmazonDriveStream : Remove slot {0} pos {1:#,0} len {2:#,0}", item.Key, o.Offset, o.Length)); if (!(item.Key >= StartLock && item.Key <= EndLock)) { o.Dispose(); } else { slot.GetOrAdd(item.Key, o); } } } extraslot = slot.Count - slotnumc; } // 終了したタスクを除去する if (Tasks.Any(x => x.Done)) { var deleteitem = Tasks.Where(x => x.Done).ToArray(); foreach (var item in deleteitem) { SlotTask o; if (Tasks.TryTake(out o)) { //Config.Log.LogOut(string.Format("AmazonDriveStream : Remove end Task slot {0}", o.ReadingSlotno)); o.Dispose(); } } } // 走りすぎているスレットを消す if (accesslog.Count() > 0) { var min_point = accesslog.OrderByDescending(x => x.Value).Take(1).Min(x => x.Key); var max_point = min_point + AmazonDriveStreamConfig.slotbacklog; min_point = Math.Max(min_point - AmazonDriveStreamConfig.slotnearby * 2, 0); max_point = Math.Min(max_point + AmazonDriveStreamConfig.slotnearby * 2, lastslot); //Config.Log.LogOut(string.Format("AmazonDriveStream : min_point {0}", min_point)); //Config.Log.LogOut(string.Format("AmazonDriveStream : max_point {0}", max_point)); if (min_point < lockslot2 && Tasks.Any(x => x.ReadingSlotno < min_point && x.ReadingSlotno > lockslot1)) { var deleteitem = Tasks.Where(x => x.ReadingSlotno <min_point && x.ReadingSlotno> lockslot1).ToList(); SlotTask o; while (deleteitem.Count > 0 && Tasks.TryTake(out o)) { if (deleteitem.Contains(o)) { //Config.Log.LogOut(string.Format("AmazonDriveStream : Remove1 Task slot {0} too far({1})", o.ReadingSlotno, min_point)); deleteitem.Remove(o); o.Dispose(); } else { Tasks.Add(o); } } } if (Tasks.Count == 1) { Tasks.First().leadThread = true; } else { foreach (var item in Tasks) { item.leadThread = false; } } if (max_point > lockslot1 && Tasks.Any(x => x.ReadingSlotno > max_point && x.ReadingSlotno < lockslot2)) { var deleteitem = Tasks.Where(x => x.ReadingSlotno > max_point && x.ReadingSlotno < lockslot2).ToList(); SlotTask o; while (deleteitem.Count > 0 && Tasks.TryTake(out o)) { if (deleteitem.Contains(o)) { if (o.leadThread) { //Config.Log.LogOut(string.Format("AmazonDriveStream : LeadThread Task slot {0} too far({1})", o.ReadingSlotno, min_point)); deleteitem.Remove(o); Tasks.Add(o); } else { //Config.Log.LogOut(string.Format("AmazonDriveStream : Remove2 Task slot {0} too far({1})", o.ReadingSlotno, min_point)); deleteitem.Remove(o); o.Dispose(); } } else { Tasks.Add(o); } } } } } catch { } //Config.Log.LogOut(string.Format("AmazonDriveStream : Tasks {0} slots {1}", Tasks.Count, slot.Count)); //Config.Log.LogOut(string.Format("AmazonDriveStream : slot {0}", string.Join(",",Tasks.Select(x=>x.ReadingSlotno.ToString())))); Task.Delay(500, cts.Token).Wait(cts.Token); } }, cts.Token); }
internal Packet PeekProcessQueue() { return(_processQueue.First()); }
public void Employees3() { var threadCnt = 20; var iterations = 100; var cnt = 100; var data = new BlockingCollection <Employee>(); var deleted = new BlockingCollection <Employee>(); for (var i = 0; i < threadCnt; i++) { for (var j = 0; j < cnt; j++) { var emp = makeEmployee(new GDID((uint)(i + 1), 0, (ulong)j)); emp.Department = "DPT" + (i % 2).ToString(); data.Add(emp); } } // initial population using (var ds = makeDataStore()) { data.ForEach(e => ds.Insert(e)); } Parallel.For(0, threadCnt, (i) => { using (var ds = makeDataStore()) { var department = "DPT" + (i % 2).ToString(); for (var ii = 0; ii < iterations; ii++) { var fetchQry = new Query <Employee>("CRUD.Queries.Employee.FetchByDepartment") { new Query.Param("pDepartment", department) }; var stored = ds.LoadEnumerable(fetchQry); if (stored.Any()) { var idx = Ambient.Random.NextScaledRandomInteger(0, stored.Count()); var toLoad = stored.ElementAt(idx); var emp = data.First(e => e.GDID == toLoad.GDID); var fetchOne = new Query <Employee>("CRUD.Queries.Employee.FetchByGDID") { new Query.Param("pGDID", toLoad.GDID) }; var row = ds.LoadDoc(fetchOne); if (row == null) { continue; // element was deleted from database by another thread } Aver.IsTrue(emp.Equals(row)); // random deletion if (Ambient.Random.NextScaledRandomInteger(0, 2) == 1) { ds.Delete(row); deleted.Add(row); } } // random insertion if (Ambient.Random.NextScaledRandomInteger(0, 2) == 1) { var newRow = makeEmployee(new GDID((uint)(i + 1), 0, (ulong)(cnt + ii + 1))); newRow.Department = department; data.Add(newRow); ds.Insert(newRow); } } } }); var resultData = new List <Employee>(); foreach (var emp in data) { if (!deleted.Any(e => e.GDID == emp.GDID)) { resultData.Add(emp); } } using (var ds = makeDataStore()) { var fetchQry = new Query <Employee>("CRUD.Queries.Employee.FetchAll"); var employees = ds.LoadEnumerable(fetchQry); Aver.AreEqual(resultData.Count(), employees.Count()); foreach (var emp in resultData) { var row = employees.FirstOrDefault(e => e.GDID == emp.GDID); Aver.IsNotNull(row); Aver.IsTrue(emp.Equals(row)); } } }