private Task ProcessQueue() { try { while (!_cancellationToken.IsCancellationRequested) { KeyValuePair <int, Package> pair; if (_queue.TryPeek(out pair)) { if (pair.Key == _lastSequenceNumber + 1) { if (_queue.TryDequeue(out pair)) { _lastSequenceNumber = pair.Key; HandlePackage(pair.Value, this); if (_queue.Count == 0) { WaitHandle.SignalAndWait(_mainWaitEvent, _waitEvent, TimeSpan.FromMilliseconds(50), true); } } } else if (pair.Key <= _lastSequenceNumber) { if (Log.IsDebugEnabled) { Log.Warn($"{Username} - Resent. Expected {_lastSequenceNumber + 1}, but was {pair.Key}."); } if (_queue.TryDequeue(out pair)) { pair.Value.PutPool(); } } else { if (Log.IsDebugEnabled) { Log.Warn($"{Username} - Wrong sequence. Expected {_lastSequenceNumber + 1}, but was {pair.Key}."); } WaitHandle.SignalAndWait(_mainWaitEvent, _waitEvent, TimeSpan.FromMilliseconds(50), true); } } else { if (_queue.Count == 0) { WaitHandle.SignalAndWait(_mainWaitEvent, _waitEvent, TimeSpan.FromMilliseconds(50), true); } } } } catch (Exception e) { Log.Error($"Exit receive handler task for player", e); } return(Task.CompletedTask); }
public BaseEvent Dequeue() { KeyValuePair <long, BaseEvent> pair; while (!pQueue.TryDequeue(out pair)) { ; } return(pair.Value); }
///// SUB FUNCTIONS /// <summary> /// Run the function on the queue /// </summary> protected override void jobFunction() { while (queue.Count > 0) { if (queue.TryDequeue(out KeyValuePair <PriorityType, QueueItemType> queueItemWithPriority)) { // if the item was cancled or is invalid, skip it. if (itemIsCanceled(queueItemWithPriority.Value) || !isAValidQueueItem(queueItemWithPriority.Value)) { continue; } // if the item is ready, offer it up to the running jobs to pick up. if (itemIsReady(queueItemWithPriority.Value)) { queueJobFor(queueItemWithPriority.Value); // update priority and requeue } else { queue.Enqueue(getPriorityAndPackageItem(queueItemWithPriority.Value)); } } } }
/// <summary> /// Update loop. Get the work from the leve management queue and activate chunks and meshes /// </summary> void Update() { if (!isLoaded) { return; } // first schedule all jobs that are ready from the levelManager thread runReadyJobs(); // check for jobs that finished running checkForFinishedJobs(); // NOTE:: Newly activated chunks goes first so we don't mesh then activate in the same frame /// go through the chunk activation queue and activate chunks if (chunksToActivate.TryDequeue(out KeyValuePair <float, Coordinate> chunkToActivate)) { // if the chunk doesn't have a meshed and baked controller yet, we can't activate it, so wait. if (!tryToGetAssignedChunkController(chunkToActivate.Value, out ChunkController assignedController) || // has a controller !(assignedController.isActive && assignedController.isMeshed) || // is active and meshed !assignedController.checkColliderIsBaked() // colliders are baked ) { chunksToActivate.Enqueue(chunkToActivate); } else { assignedController.enableObjectVisible(); } } /// try to assign newly mehsed chunks that are waiting on controllers, if we run out. if (chunkMeshesWaitingForAFreeController.TryDequeue(out KeyValuePair <float, VoxelMeshData> chunkMeshWaitingForController)) { if (!tryToAssignNewlyMeshedChunkToController(chunkMeshWaitingForController.Value)) { chunkMeshesWaitingForAFreeController.Enqueue(chunkMeshWaitingForController); } } /// try to assign meshes to the chunks with newly generated meshes if (chunksToMesh.TryDequeue(out KeyValuePair <float, ChunkController> chunkToMesh)) { chunkToMesh.Value.updateMeshWithChunkData(); } /// try to remove meshes for the given chunk and reset it's mesh data if (chunksToDeMesh.TryDequeue(out KeyValuePair <float, ChunkController> chunkToDemesh)) { chunkToDemesh.Value.deactivateAndClear(); } /// go through the de-activation queue if (chunksToDeactivate.TryDequeue(out KeyValuePair <float, Coordinate> deactivatedChunkLocation)) { if (tryToGetAssignedChunkController(deactivatedChunkLocation.Value, out ChunkController assignedController)) { assignedController.disableObjectVisible(); } } }
public async Task ConcurrentEnqueueSequentialDequeue() { var q = new ConcurrentPriorityQueue <int>(); var values = new int[10000]; var r = new Random(0); for (int i = 0; i < values.Length; i++) { values[i] = 0x7FFFFFFF & r.Next(); } await Task.WhenAll(values.Select(value => Task.Run(() => q.Enqueue(value, value)))); int dequeuedPriority, dequeuedValue; var dequeuedValues = new List <int>(); while (q.TryDequeue(out dequeuedPriority, out dequeuedValue)) { dequeuedValues.Add(dequeuedValue); } Array.Sort(values); dequeuedValues.Sort(); XAssert.IsTrue(values.SequenceEqual(dequeuedValues)); }
/// <summary> /// Functionas running in separate thread /// </summary> void LoaderTask() { while (!stopLoader) { VTAddress address; #if USE_PRIORITY_QUEUE address = default(VTAddress); KeyValuePair <int, VTAddress> result; if (!requestQueue.TryDequeue(out result)) { //Thread.Sleep(1); continue; } else { address = result.Value; } #else if (!requestQueue.TryDequeue(out address)) { //Thread.Sleep(1); continue; } #endif var fileName = address.GetFileNameWithoutExtension(".tile"); //Log.Message("...vt tile load : {0}", fileName ); try { var tile = new VTTile(address); tile.Read(storage.OpenFile(fileName, FileMode.Open, FileAccess.Read)); loadedTiles.Enqueue(tile); } catch (IOException ioex) { var tile = new VTTile(address); tile.Clear(Color.Magenta); loadedTiles.Enqueue(tile); Log.Warning("{0}", ioex); } } }
private void QueryProcess() { //if (m_dblog) //{ // m_LogWriterExt = // new StreamWriter(AppDomain.CurrentDomain.BaseDirectory + "DbLogs" + Path.DirectorySeparatorChar // + m_Name + "_" + DateTime.Now.ToString("yyyy-MM-dd") + "_ext.log", true); //} Action <DataSet> null_action = (ds) => { }; m_Logger.Info("{0} thread start", Thread.CurrentThread.Name); while (Status == DbStatus.Running) { Interlocked.Increment(ref m_CallingItemsCount); KeyValuePair <DateTime, DBReadItem> item; if (m_QueryWorkItems.TryDequeue(out item)) { Action <DataSet> callback = item.Value.CallBack != null ? item.Value.CallBack : null_action; DataSet ds = null; Performance.Record( item.Value.SQL, item.Value.GroupName, () => { try { ds = m_Provider.Query(m_QueryConnString, item.Value.SQL); } catch (Exception ex) { m_Logger.Error(item.Value.SQL + Environment.NewLine, ex); } }, MonitoringType.DBExcute); m_CallBackItems.Enqueue(item.Key, new DBCallBackItem() { GroupName = item.Value.GroupName, CallBack = callback, Data = ds, Name = item.Value.SQL }); Interlocked.Increment(ref m_QueriedItemsCount); Interlocked.Decrement(ref m_CallingItemsCount); } else { Interlocked.Decrement(ref m_CallingItemsCount); Thread.Sleep(IDLE_SLEEP_MILISECONDS); } } if (m_LogWriterExt != null) { m_LogWriterExt.Close(); m_LogWriterExt = null; } Log.InfoFormat("{0} query thread stop", Thread.CurrentThread.Name); }
public void Priorities() { var q = new ConcurrentPriorityQueue <int>(); var values = new int[10000]; var r = new Random(0); for (int i = 0; i < values.Length; i++) { int value; switch (r.Next(3)) { case 0: value = 0; break; case 1: value = int.MaxValue; break; default: value = 0x7FFFFFFF & r.Next(); break; } values[i] = value; q.Enqueue(value, value); } Array.Sort(values); Array.Reverse(values); int dequeuedPriority0, dequeuedValue0, dequeuedPriority1, dequeuedValue1; foreach (int t in values) { XAssert.IsTrue(q.TryPeek(out dequeuedPriority0, out dequeuedValue0)); XAssert.IsTrue(q.TryDequeue(out dequeuedPriority1, out dequeuedValue1)); XAssert.AreEqual(dequeuedPriority0, dequeuedPriority0); XAssert.AreEqual(dequeuedPriority1, dequeuedPriority1); XAssert.AreEqual(t, dequeuedValue1); } XAssert.IsFalse(q.TryPeek(out dequeuedPriority0, out dequeuedValue0)); XAssert.IsFalse(q.TryDequeue(out dequeuedPriority1, out dequeuedValue1)); }
public static bool CallBack() { KeyValuePair <DateTime, DBCallBackItem> item; if (m_CallBackItems.TryDequeue(out item)) { item.Value.CallBack(item.Value.Data); Interlocked.Increment(ref m_CallbackedItemsCount); return(true); } return(false); }
/// <summary> /// Manage which chunks are queued for loading with what level apertures. /// </summary> void ManageQueue() { while (runLevelManagerQueue) { /// Step 1: listen for focus changes. level.forEachFocus(focus => { if (focus.currentChunk != focus.previousChunk) { /// Step 2: get all the aperture jobs for the focus changes into the queue. level.forEachAperture(aperture => { foreach (ChunkResolutionAperture.ApetureChunkAdjustment adjustment in aperture.getAdjustmentsForFocusLocationChange(focus)) { apertureJobQueue.Enqueue(getCurrentPriorityForChunk(adjustment.chunkID, aperture, adjustment.type), adjustment); } }); // callback, to update the focus' chunk location focus.onFocusUpdatedForLevel(focus.currentChunk); } }); /// Step 3: Iterate over the next item in the queue and try to schedule a job for it if (apertureJobQueue.TryDequeue(out KeyValuePair <ApertureWorkQueuePriority, ChunkResolutionAperture.ApetureChunkAdjustment> queueItemWithPriority)) { // if the item was cancled or is invalid, skip it. ChunkResolutionAperture apertureForQueueItem = level.getApetureByPriority(queueItemWithPriority.Key.aperturePriority); if (!isAValidQueueItem(queueItemWithPriority.Value, apertureForQueueItem)) { continue; } // if the item is ready, offer it up to the running jobs to pick up. // we'll try to add it to the running jobs list if (itemIsReady(queueItemWithPriority.Value.chunkID, apertureForQueueItem) && !runningJobs.ContainsKey(queueItemWithPriority.Value) ) { adjustmentJobsReadyToRun.TryAdd(queueItemWithPriority.Value, apertureForQueueItem); World.Debugger.log($"Apeture Job type {apertureForQueueItem.GetType()} ready for {queueItemWithPriority.Value.chunkID}"); // if it's not ready, or there's a conflict requeue // if there's a conflict, it means a job is already running on this chunk and we should wait for that one to finish } else { apertureJobQueue.Enqueue( getCurrentPriorityForChunk(queueItemWithPriority.Value.chunkID, apertureForQueueItem, queueItemWithPriority.Key.adjustmentType), queueItemWithPriority.Value ); } } } }
/// <summary> /// Get the next message that should be sent /// </summary> /// <param name="msg">Message to send</param> /// <returns><c>true</c> if there was a message to send.</returns> public bool TryDequeue(out object msg) { KeyValuePair <int, object> kvp; if (!_queue.TryDequeue(out kvp)) { msg = null; return(false); } msg = kvp.Value; return(true); }
public void Update() { ChunkTaskRequest request; while (activeRequests.Count < concurrencyLevel && requests.TryDequeue(out request)) { var task = tasks[request.TaskType]; ThreadPool.QueueUserWorkItem(task.WaitCallbackMethod, request); lock (activeRequests) { activeRequests[request] = request; } } }
void ProcessUpdateBufferRequests() { int updateBufferCount = 0; UpdateBufferRequest request; while (updateBufferCount < updateBufferCountPerFrame && updateBufferRequests.TryDequeue(out request)) { var verticesBuilder = request.VertexBuilder; var chunk = verticesBuilder.Chunk; // チャンクの非アクティブ化は、このメソッドと同じスレッドで処理される。 // このため、Active プロパティが示す値は、このメソッドで保証される。 if (!chunk.Active) { continue; } var segment = request.Segment; var translucence = request.Translucece; // クローズ中は頂点バッファ反映をスキップ。 if (!chunkManager.Closing) { if (UpdateMeshSegmentBuffer(verticesBuilder, segment.X, segment.Y, segment.Z, translucence)) { // バッファに変更があったならばチャンクのノードを更新。 chunk.Node.Update(false); updateBufferCount++; } } // バッファを反映済みとしてマーク。 var vertices = verticesBuilder.GetVertices(segment.X, segment.Y, segment.Z, translucence); vertices.Consumed = true; // 関連する全てのバッファへ反映したならば頂点ビルダを解放。 if (verticesBuilder.ConsumedAll()) { verticesBuilder.Clear(); vertexBuilderPool.Return(verticesBuilder); } } }
void ProcessBuildVertexRequests() { ChunkVertexBuilder vertexBuilder; while (vertexBuilderPool.TryBorrow(out vertexBuilder)) { BuildVertexRequest request; if (!buildVertexRequests.TryDequeue(out request)) { vertexBuilderPool.Return(vertexBuilder); break; } request.AttachVertexBuilder(vertexBuilder); ThreadPool.QueueUserWorkItem(buildVertexTask.WaitCallbackMethod, request); } }
public void Test_CoordinationDataStructures_ConcurrentPriorityQueue() { //模式:优先队列 //实现原理,最小堆 var pQ = new ConcurrentPriorityQueue <int, string>(); pQ.Enqueue(0, "a0"); pQ.Enqueue(1, "a1"); pQ.Enqueue(2, "a2"); pQ.Enqueue(3, "a3"); pQ.Enqueue(4, "a4"); pQ.Enqueue(-1, "-a1"); KeyValuePair <int, string> kv; pQ.TryDequeue(out kv); Assert.AreEqual("-a1", kv.Value); }
/// <summary> /// 主线程函数 /// </summary> private void runStateMachineLoop() { while (_isrunning || priorityQueueEvents.Count > 0) { runRunStateMachine(); foreach (var station in Stations) { station.Value.runRunStateMachine(); } while (priorityQueueEvents.Count > 0) { KeyValuePair <int, UserEvent> e; if (priorityQueueEvents.TryDequeue(out e)) { e.Value.Execute(); } } Thread.Sleep(16); } }
public List <ISpatialmHGResult> SpatialmHGWrapper3D(List <Tuple <double, double, double, bool> > input, bool runViaAzure = false) { var coordinates = input.Select(c => (ICoordinate) new Coordinate3D(c.Item1, c.Item2, c.Item3)).ToList(); Normalizer nrm = new Normalizer(coordinates); var normcoords = nrm.Normalize(coordinates).Select(c => (Coordinate3D)c).ToList(); var labels = input.Select(c => c.Item4).ToList(); InitializeMHG(labels); int idx = -1; var solutions = new ConcurrentPriorityQueue <double, SpatialmHGResult3D>(); var planeList = new ConcurrentPriorityQueue <double, Plane>(); //minheap based, smaller is better //Foreach perpendicular bisector plane for (var i = 0; i < coordinates.Count; i++) { for (var j = 0; j < coordinates.Count; j++) { if (labels[i] != labels[j]) { //Reduce to 2D problem var plane = Plane.Bisector(normcoords[i], normcoords[j]); planeList.Enqueue(1.0, plane); } } } var numPlanes = planeList.Count(); if ((Config.ActionList & Actions.Search_EmpricalSampling) != 0) { var problem = normcoords.Zip(labels, (a, b) => new Tuple <ICoordinate, bool>(a, b)).ToList(); var gr = new Gridding(); var problemSize = MathExtensions.Binomial(numPlanes, 3) + MathExtensions.Binomial(numPlanes, 2) + numPlanes + 1; gr.GenerateEmpricialDensityGrid((long)Math.Min(problemSize, 100000), problem); var results = new ConcurrentPriorityQueue <double, ISpatialmHGResult>(); Parallel.ForEach(gr.GetPivots(), pivot => { var binvec = problem.OrderBy(c => c.Item1.EuclideanDistance(pivot)).Select(c => c.Item2).ToArray(); var res = mHGJumper.minimumHypergeometric(binvec); results.Enqueue(res.Item1, new SpatialmHGResult3D(res.Item1, res.Item2, (Coordinate3D)pivot)); while (results.Count > Config.GetTopKResults) { results.TryDequeue(out var junk); } }); return(results.Select(v => v.Value).ToList()); } if ((Config.ActionList & Actions.Search_CellSkipping) != 0) { if (runViaAzure) { var fileList = new List <string>(); foreach (var file in Directory.EnumerateFiles(@"3D\Planes\")) { File.Delete(file); } foreach (var file in Directory.EnumerateFiles(@"3D\2dProblems\")) { File.Delete(file); } var asList = planeList.ToList(); Parallel.ForEach(asList, currPlane => { var currIdx = Interlocked.Increment(ref idx); Console.Write($"\r\r\r\r\r\rGenerating 2D projection {currIdx}/{numPlanes}."); var plane = currPlane.Value; if (StaticConfigParams.WriteToCSV) { Config.Log.WriteLine("Selected plane {0}/{1} at distance {2}", currIdx, numPlanes, currPlane.Key); } var subProblemIn2D = plane.ProjectOntoAndRotate(normcoords, out PrincipalComponentAnalysis pca); pca.NumberOfOutputs = 3; //project back to 3D pca.Save($@"3D\PCA\pca{currIdx}.bin"); Generics.SaveToCSV(plane, $@"3D\Planes\plane{currIdx}.csv", true); Generics.SaveToCSV(subProblemIn2D.Zip(labels, (c, l) => c.ToString() + "," + l), $@"3D\2dProblems\coords{currIdx}.csv", true); fileList.Add($@"3D\2dProblems\coords{currIdx}.csv"); }); Console.WriteLine(@"Done. Initializing Batch pool."); AzureBatchExecution.MainAsync(fileList).Wait(); } else { while (planeList.TryDequeue(out var currPlane)) { var plane = currPlane.Value; idx++; if (StaticConfigParams.WriteToCSV) { Generics.SaveToCSV(plane, $@"Planes\plane{idx}.csv", true); } Config.Log.WriteLine("Selected plane {0}/{1} at distance {2}", idx, numPlanes, currPlane.Key); var subProblemIn2D = plane.ProjectOntoAndRotate(normcoords, out PrincipalComponentAnalysis pca); pca.NumberOfOutputs = 3; //project back to 3D //Solve 2D problem StaticConfigParams.filenamesuffix = idx.ToString(); var res = Solve2DProblem(subProblemIn2D, labels, normcoords, pca); foreach (var mHGresult2D in res) { var projectedResult = new SpatialmHGResult3D(mHGresult2D, pca, idx); solutions.Enqueue(projectedResult.pvalue, projectedResult); } solutions.TryPeek(out var bestCell); var bestCellCenter = bestCell.Value.GetCenter(); var remainingPlanes = planeList.Select(t => t.Value).ToList(); planeList.Clear(); foreach (var p in remainingPlanes) { planeList.Enqueue(bestCellCenter.DistanceToPlane(p), p); } } } //Combine 2D solutions var combinedResultsNaive = new List <SpatialmHGResult3D>(); for (var i = 0; i < Config.GetTopKResults; i++) { KeyValuePair <double, SpatialmHGResult3D> bestCell; solutions.TryDequeue(out bestCell); if (bestCell.Key <= Config.SIGNIFICANCE_THRESHOLD) { bestCell.Value.Denormalize(nrm); combinedResultsNaive.Add(bestCell.Value); } else { break; } } Config.Log.updater?.Wait(); return(combinedResultsNaive.Cast <ISpatialmHGResult>().ToList()); } return(null); }
private void ProcessOrderedQueue() { try { while (!_cancellationToken.IsCancellationRequested && !ConnectionInfo.IsEmulator) { if (_orderingBufferQueue.TryPeek(out KeyValuePair <int, Packet> pair)) { if (pair.Key == _lastOrderingIndex + 1) { if (_orderingBufferQueue.TryDequeue(out pair)) { //Log.Debug($"Handling packet ordering index={pair.Value.ReliabilityHeader.OrderingIndex}. Current index={_lastOrderingIndex}"); _lastOrderingIndex = pair.Key; HandlePacket(pair.Value); if (_orderingBufferQueue.Count == 0) { WaitHandle.SignalAndWait(_packetHandledWaitEvent, _packetQueuedWaitEvent, 500, true); } } } else if (pair.Key <= _lastOrderingIndex) { if (Log.IsDebugEnabled) { Log.Debug($"{Username} - Resent. Expected {_lastOrderingIndex + 1}, but was {pair.Key}."); } if (_orderingBufferQueue.TryDequeue(out pair)) { pair.Value.PutPool(); } } else { if (Log.IsDebugEnabled) { Log.Debug($"{Username} - Wrong sequence. Expected {_lastOrderingIndex + 1}, but was {pair.Key}."); } WaitHandle.SignalAndWait(_packetHandledWaitEvent, _packetQueuedWaitEvent, 500, true); } } else { if (_orderingBufferQueue.Count == 0) { WaitHandle.SignalAndWait(_packetHandledWaitEvent, _packetQueuedWaitEvent, 500, true); } } } } catch (ObjectDisposedException) { // Ignore. Comes from the reset events being waited on while being disposed. Not a problem. } catch (Exception e) { Log.Error($"Exit receive handler task for player", e); } }