public static void Add(Action action) { QueueEntry qe = new QueueEntry(); qe.Action = action; qe.LastStart = new DateTime(1983, 11, 15); queue.Enqueue(qe); }
public bool WillDeliver(QueueEntry entry) { lock (syncRoot) { // TODO is the queue still active? var lastEntrySeen = lastEntry; while (lastEntrySeen != null && !lastEntrySeen.IsAvailable) // TODO filter out entries that this subscription doesn't care about { var nextEntry = lastEntrySeen.GetNextValidEntry(); // loop until we get to an available entry to deliver if (nextEntry != null) { lastEntrySeen = lastEntry = nextEntry; } else { lastEntrySeen = null; } } if (lastEntrySeen == entry) { // If the first entry that subscription can process is the one we are trying to deliver to it, then we are // good return true; } else { return false; } } }
public QueueEntry Enqueue(object item) { var n = new QueueEntry(item, DateTime.UtcNow, uint.MaxValue, 0, QueueEntryStateEnum.AVAILABLE); while (true) { // p = previous node var p = tail; var p_next = p.Next; if (ReferenceEquals(p, tail)) // Are tail and next consistent? { if (p_next == null) // Was Tail pointing to the last node? { n.SeqNum = p.SeqNum + 1; if (p.CompareAndSwapNext(n, p_next)) // compare and swap, returns if successful { // Enqueue is done. Try to swing Tail to the inserted node if (Interlocked.CompareExchange(ref tail, n, p) != p) System.Diagnostics.Debug.Fail("CAS(tail, p, n) failed!"); return n; // exit loop } } else { // Tail was not pointing to the last node // Try to swing Tail to the next node Interlocked.CompareExchange(ref tail, p_next, p); } } } }
public ConcurrentQueueEntryList() { head = tail = new QueueEntry(null, DateTime.MinValue, uint.MinValue, 0, QueueEntryStateEnum.ARCHIVED) { SeqNum = 0, }; }
public void TryDelivery(QueueEntry head) { // TODO: check link credit to determine if we can deliver if (!HasCreditToDeliver()) return; lock (syncRoot) { var next = head.GetNextValidEntry(); while (next != null) { if (next.IsAvailable) { // TODO: FILTER out entries we don't care about if (queue.TryAcquire(next, this)) break; // if not acquired, we'll try again with the next one } // loop until we get an available entry to deliver // or null next = next.GetNextValidEntry(); } if (next == null) return; // nothing to deliver if (next != null) { // message acquired and ready to be delivered OnMessageAquired(next); } } }
protected override void OnMessageAquired(QueueEntry next) { var message = (AnnotatedMessage)next.Item; var payloadBuffer = new ByteBuffer(AnnotatedMessage.GetEstimatedMessageSize(message), false); AnnotatedMessage.Encode(message, payloadBuffer); var deliveryTag = Guid.NewGuid().ToByteArray(); link.SendTransfer(deliveryTag, payloadBuffer); }
private static void WriteEntry(QueueEntry e, IEnumerable<Argument> arguments) { bool printProjectString = arguments.FirstOrDefault(a => a.Type == ArgumentType.PrintProjectString) != null ? true : false; bool showAll = arguments.FirstOrDefault(a => a.Type == ArgumentType.ShowAll) != null ? true : false; Console.Write(" Index {0}: {1}", e.Index, e.EntryStatusLiteral); if (e.SpeedFactor == 0) { Console.WriteLine(); } else { Console.WriteLine(" {0} X min speed", e.SpeedFactor); } Console.WriteLine(" server: {0}:{1}; project: {2}", e.ServerIP, e.ServerPort, e.ProjectID); string misc4aEndian = e.Misc4aBigEndian ? "be" : "le"; Console.WriteLine(" Folding: run {0}, clone {1}, generation {2}; benchmark {3}; misc: {4}, {5}, {6} ({7})", e.ProjectRun, e.ProjectClone, e.ProjectGen, e.Benchmark, e.Misc1a, e.Misc1b, e.Misc4a, misc4aEndian); if (printProjectString) { Console.WriteLine(" Project: {0} (Run {1}, Clone {2}, Gen {3})", e.ProjectID, e.ProjectRun, e.ProjectClone, e.ProjectGen); } Console.WriteLine(" issue: {0:ddd MMM dd HH:mm:ss yyyy}; begin: {1:ddd MMM dd HH:mm:ss yyyy}", e.ProjectIssuedLocal, e.BeginTimeLocal); Console.Write(" "); if (e.EntryStatus == 3 || e.EntryStatus == 7) { Console.Write("end: {0:ddd MMM dd HH:mm:ss yyyy}; ", e.EndTimeLocal); } TimeSpan preferred = e.DueTimeLocal.Subtract(e.BeginTimeLocal); Console.WriteLine("due: {0:ddd MMM dd HH:mm:ss yyyy} ({1} days)", e.DueTimeLocal, Math.Ceiling(preferred.TotalDays)); Console.WriteLine(" core URL: {0}", e.CoreDownloadUrl); Console.Write(" core number: 0x{0}", e.CoreNumber); Console.WriteLine("; core name: {0}", e.CoreName); Console.WriteLine(" CPU: {0},{1} {2}; OS: {3},{4} {5}", e.CpuType, e.CpuSpecies, e.CpuString, e.OsType, e.OsSpecies, e.OsString); Console.WriteLine(" smp cores: {0}; cores to use: {1}", e.NumberOfSmpCores, e.UseCores); Console.WriteLine(" tag: {0}", e.WorkUnitTag); if (e.Passkey.Length != 0 && showAll) { Console.WriteLine(" passkey: {0}", e.Passkey); } Console.WriteLine(" flops: {0} ({1:0.000000} megaflops)", e.Flops, e.MegaFlops); Console.WriteLine(" memory: {0} MB", e.Memory); Console.WriteLine(" client type: {0} {1}", e.RequiredClientType, e.RequiredClientTypeLiteral); string assignmentInfoEndian = e.AssignmentInfoBigEndian ? "be" : "le"; if (e.AssignmentInfoPresent) { Console.WriteLine(" assignment info ({0}): {1:ddd MMM dd HH:mm:ss yyyy}; {2}", assignmentInfoEndian, e.AssignmentTimeStampLocal, e.AssignmentInfoChecksum); } Console.Write(" CS: {0}; ", e.CollectionServerIP); if (e.NumberOfUploadFailures != 0) { Console.Write("upload failures: {0}; ", e.NumberOfUploadFailures); } Console.WriteLine("P limit: {0}", e.PacketSizeLimit); Console.WriteLine(" user: {0}; team: {1}; ID: {2}; mach ID: {3}", e.FoldingID, e.Team, e.ID, e.MachineID); Console.WriteLine(" work/wudata_{0:00}.dat file size: {1}; WU type: {2}", e.Index, e.WuDataFileSize, e.WorkUnitType); }
public void SelectServicedCustomer(QueueEntry customer) { for (int i = 0; i < _queueBindingSource.List.Count; i++) { QueueEntry entry = _queueBindingSource.List[i] as QueueEntry; if (entry.QueueEntryID == customer.QueueEntryID) { _queueBindingSource.Position = i; break; } } }
public void CreateJsonFromBytes() { TestEntryFull originalEntry = TestEntryFull.Create(); byte[] bytes = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(originalEntry)); QueueEntry queueEntry = QueueEntry.FromBytes(bytes); TestEntryFull entryFromBytes = JsonConvert.DeserializeObject <TestEntryFull>(queueEntry.Data); Assert.AreEqual(originalEntry.Tag, queueEntry.Tag); Assert.IsTrue(originalEntry.Equals(entryFromBytes)); }
public void QueueGeneration(Vector3Int location, Region region, LOD_Mode mode, User requester, Action <QueueEntry, Column> callback, object Meta) { if (Columns.ContainsKey(location)) { Subscribe(location, requester, mode); return; } QueueEntry ent = new QueueEntry(location, region, mode, requester, callback, Meta); Columns[location] = ent; genQueue.Enqueue(location); }
public CustomerSummaryViewPresenter ( [ComponentDependency("QueueEntry")] QueueEntry queueEntry, [ServiceDependency] ICustomerAccountService accountService, [ServiceDependency] ICustomerAlertService alertService, [ServiceDependency] ICustomerQueueService queueService ) { _queueEntry = queueEntry; _accountService = accountService; _alertService = alertService; _queueService = queueService; }
public PurchaseCDViewPresenter ( [ComponentDependency("QueueEntry")] QueueEntry queueEntry, [ServiceDependency] IQuoteService quoteService, [ServiceDependency] ICustomerAccountService customerAccountsService, [ServiceDependency] IAccountService accountService ) { _queueEntry = queueEntry; _quoteService = quoteService; _customerAccountsService = customerAccountsService; _accountService = accountService; }
public void AddRange(IEnumerable <Node> targets) { // bfs will add all ancestors to the queue. bfs.SearchFrom(targets); done = false; while (queue.Count > 0) { QueueEntry entry = queue[0]; // make sure the cost of this entry is up-to-date (inefficient, but safe) UpdateEntry(entry); if (entry.QueuePosition == 0) { if (ApplyThreshold && (entry.Cost.CompareTo(Threshold) >= 0)) { return; } queue.ExtractMinimum(); Node node = entry.Node; //Console.WriteLine("scheduling " + node); if (addToSchedule != null) { if (!addToSchedule(node)) { // put back on the queue queue.Add(entry); continue; } } if (entry.Cost.CompareTo(MaxScheduledCost) > 0) { MaxScheduledCost = entry.Cost; } IsScheduled[node] = true; if (StopScheduling != null && StopScheduling(node)) { done = true; return; } //Console.WriteLine("updating targets:"); foreach (Node target in successors(node)) { if (!IsScheduled[target]) { UpdateCost(target); } } //Console.WriteLine("done with targets"); } } done = true; }
private void InitializeTarget() { CreateTarget(); if (InputBatchSize > 1) { //Setup batched target var batchBlock = Network.CreateBatchBlock <TInput, TInputMsg>(InputBatchSize); var batchedMessagesToSingleMsg = new ActionBlock <TInputMsg[]>(msgs => { if (msgs == null) { return; } var msgsEntry = new QueueEntry <TOutput, TOutputMsg>(); lock (msgsEntry.SyncObj) { AddOutputMessages(msgsEntry); HandleMessages(msgsEntry, msgs); } ProcessOutputQueueEntries(); }); batchBlock.Completion.ContinueWith(task => { //Now complete the single transformation batchedMessagesToSingleMsg.Complete(); }); batchedMessagesToSingleMsg.Completion.ContinueWith(task => { //Now complete the output source Source.Complete(); }); //Link the batch block - to the output generating action Network.Link(batchBlock, batchedMessagesToSingleMsg); //The new target is the batch block Target = batchBlock; } else { Target.Completion.ContinueWith(task => { OnTargetCompletion(); //Now complete the output source Source.Complete(); }); } }
public DownloadEngine DownloadIcon(long gameID, string token, string outputPath, Action <DownloadEngine.DownloadStatus, long, DownloadTypes> completeFunction = null, bool ignoreThatSameEntryExists = false, bool highRes = false) { if (FindEntry(gameID, outputPath) != null && !ignoreThatSameEntryExists) { Debug.LogWarning("Download entry already exist, returning"); return(downloadEngine); } QueueEntry entry = new QueueEntry(gameID, outputPath, token, completeFunction, DownloadTypes.icon, highRes); queue.Add(entry); return(downloadEngine); }
/// <summary> /// Add new item to queue /// </summary> /// <param name="item"></param> /// <returns>return <see cref="QueueState"/></returns> public QueueState AddItem(QueueEntry item) { if (item == null || item.IsEmpty) { return(QueueState.ArgumentsError); } using (var message = new QueueMessage() { Command = QueueCmd.AddItem, Key = item.Key, BodyStream = item.BodyStream }) { return((QueueState)SendDuplex <int>(message)); } }
bool Scheduler.GetNextRequest(out QueueEntry queueEntry) { if (requestQueue.Count > 0) { SortAndPrune(); queueEntry = requestQueue.Min; requestQueue.Remove(queueEntry); return(true); } queueEntry = new QueueEntry(); return(false); }
/// <summary> /// Get a queue entry /// </summary> public DispatcherQueueEntry GetQueueEntry(string queueName, string correlationId) { var filePath = Path.Combine(this.m_configuration.QueuePath, queueName, correlationId); if (File.Exists(filePath)) { using (var fs = File.OpenRead(filePath)) { var entry = QueueEntry.Load(fs); return(new DispatcherQueueEntry(correlationId, queueName, entry.CreationTime, entry.Type, entry.XmlData)); } } throw new KeyNotFoundException($"{queueName}\\{correlationId} doesn't exist"); }
public bool HasReadyItems() { lock (items) { if (items.Count == 0) { return(false); } QueueEntry item = items.Peek(); return((DateTime.Now.Ticks - item.insertionTimeInTicks) >= ticksUntilEligible); } }
public object Pop() { QueueEntry node = (QueueEntry)_tree.GetMinimumNode(); if (node == null) { return(null); } object result = node.Value; _tree.RB_Delete(node); _freeNode = node; return(result); }
private QueueEntry AllocEntry() { QueueEntry result = _freeNode; if (result == null) { result = new QueueEntry(); } else { _freeNode = null; } return(result); }
private static UnitInfo BuildUnitInfo(QueueEntry queueEntry, ClientRun clientRun, UnitRun unitRun, UnitInfoLogData unitInfoLogData, bool matchOverride = false) { // queueEntry can be null Debug.Assert(clientRun != null); // unitInfoLogData can be null var unit = new UnitInfo(); UnitRunData unitRunData; if (unitRun == null) { if (matchOverride) { unitRunData = new UnitRunData(); } else { return(null); } } else { unit.LogLines = unitRun.ToList(); unitRunData = unitRun.Data; } unit.UnitStartTimeStamp = unitRunData.UnitStartTimeStamp ?? TimeSpan.MinValue; unit.FramesObserved = unitRunData.FramesObserved; unit.CoreVersion = unitRunData.CoreVersion; unit.UnitResult = unitRunData.WorkUnitResult; if (queueEntry != null) { UpdateUnitInfoFromQueueData(unit, queueEntry); SearchFahLogUnitDataProjects(unit, unitRunData); UpdateUnitInfoFromLogData(unit, clientRun.Data, unitRunData, unitInfoLogData); if (!ProjectsMatch(unit, unitRunData) && !ProjectsMatch(unit, unitInfoLogData) && !matchOverride) { return(null); } } else { UpdateUnitInfoFromLogData(unit, clientRun.Data, unitRunData, unitInfoLogData); } return(unit); }
protected async override Task <JobResult> RunInternalAsync() { Log.Info().Message("Process user description job starting").Write(); int totalUserDescriptionsProcessed = 0; int totalUserDescriptionsToProcess = Context.GetWorkItemLimit(); while (!CancelPending && (totalUserDescriptionsToProcess == -1 || totalUserDescriptionsProcessed < totalUserDescriptionsToProcess)) { QueueEntry <EventUserDescription> queueEntry = null; try { queueEntry = await _queue.DequeueAsync(); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventUserDescription: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { continue; } _statsClient.Counter(StatNames.EventsUserDescriptionDequeued); Log.Info().Message("Processing EventUserDescription '{0}'.", queueEntry.Id).Write(); try { ProcessUserDescription(queueEntry.Value); totalUserDescriptionsProcessed++; _statsClient.Counter(StatNames.EventsUserDescriptionProcessed); } catch (DocumentNotFoundException ex) { _statsClient.Counter(StatNames.EventsUserDescriptionErrors); queueEntry.AbandonAsync().Wait(); Log.Error().Exception(ex).Message("An event with this reference id \"{0}\" has not been processed yet or was deleted. Queue Id: {1}", ex.Id, queueEntry.Id).Write(); continue; } catch (Exception ex) { _statsClient.Counter(StatNames.EventsUserDescriptionErrors); queueEntry.AbandonAsync().Wait(); // TODO: Add the EventUserDescription to the logged exception. Log.Error().Exception(ex).Message("An error occurred while processing the EventUserDescription '{0}': {1}", queueEntry.Id, ex.Message).Write(); return(JobResult.FromException(ex)); } await queueEntry.CompleteAsync(); } return(JobResult.Success); }
private async Task CompleteEntryAsync(QueueEntry <EventPost> queueEntry, EventPostInfo eventPostInfo, DateTime created) { await queueEntry.CompleteAsync().AnyContext(); if (queueEntry.Value.ShouldArchive) { await _storage.CompleteEventPostAsync(queueEntry.Value.FilePath, eventPostInfo.ProjectId, created, queueEntry.Value.ShouldArchive).AnyContext(); } else { await _storage.DeleteFileAsync(queueEntry.Value.FilePath).AnyContext(); await _storage.SetNotActiveAsync(queueEntry.Value.FilePath).AnyContext(); } }
private static CompleteQueueEntry GetCompleteEntry(QueueEntry entry) { var completeEntry = new CompleteQueueEntry { Id = entry.Id, User = entry.User }; if (entry.Track != null) { completeEntry.ImageArtRef = ImageCache.GetImage(entry.Track.Uri); completeEntry.Track = SpotifyLookup.Track.Lookup(entry.Track.Uri); } return (completeEntry); }
/** Return the log base 2 of the maximum number of children of a candidate. */ /** * Process a candidate by either adding it to the result list or expanding its * children and inserting it into the priority queue. Passing an argument of * NULL does nothing. */ private void AddCandidate(Candidate candidate) { if (candidate == null) { return; } if (candidate.IsTerminal) { _result.Add(candidate.Cell.Id); return; } // assert (candidate.numChildren == 0); // Expand one level at a time until we hit min_level_ to ensure that // we don't skip over it. var numLevels = (candidate.Cell.Level < _minLevel) ? 1 : _levelMod; var numTerminals = ExpandChildren(candidate, candidate.Cell, numLevels); if (candidate.NumChildren == 0) { // Do nothing } else if (!_interiorCovering && numTerminals == 1 << MaxChildrenShift && candidate.Cell.Level >= _minLevel) { // Optimization: add the parent cell rather than all of its children. // We can't do this for interior coverings, since the children just // intersect the region, but may not be contained by it - we need to // subdivide them further. candidate.IsTerminal = true; AddCandidate(candidate); } else { // We negate the priority so that smaller absolute priorities are returned // first. The heuristic is designed to refine the largest cells first, // since those are where we have the largest potential gain. Among cells // at the same level, we prefer the cells with the smallest number of // intersecting children. Finally, we prefer cells that have the smallest // number of children that cannot be refined any further. var priority = -((((candidate.Cell.Level << MaxChildrenShift) + candidate.NumChildren) << MaxChildrenShift) + numTerminals); var entry = new QueueEntry(priority, candidate); _candidateQueue.Enqueue(entry); // logger.info("Push: " + candidate.cell.id() + " (" + priority + ") "); } }
protected void PumpSendQueue() { lock (queueLock) { while (!sendQueue.IsEmpty) { QueueEntry entry = sendQueue.Front(); if (sendLatency > 0) { // Should really come up with a gaussian distribution based on the configured // value, but this will do for now. int jitter = (sendLatency * 2 / 3) + ((random.Next() % sendLatency) / 3); if (Utility.GetCurrentTime() < sendQueue.Front().queueTime + jitter) { break; } } if (oopPercent > 0 && ooPacket.message != null && random.Next() % 100 < oopPercent) { uint delay = (uint)(random.Next() % (sendLatency * 10 + 1000)); Log($"creating rogue oop (seq: {entry.message.SequenceNumber} delay: {delay})"); ooPacket.queueTime = Utility.GetCurrentTime() + delay; ooPacket.message = entry.message; ooPacket.destAddress = entry.destAddress; } else { var byteMsg = Utility.GetByteArray(entry.message); udpClient.SendTo(byteMsg, udpEndpoint); entry.message = null; } sendQueue.Pop(); } } if (ooPacket.message != null && ooPacket.queueTime < Utility.GetCurrentTime()) { Log("sending rogue oop!"); var ooMsg = Utility.GetByteArray(ooPacket.message); udpClient.SendTo(ooMsg, udpEndpoint); ooPacket.message = null; } }
public void UserIDCalculationTest() { byte[] b = QueueEntry.HexToData("99D3CF222E1FA00"); Array.Reverse(b); string userID = QueueEntry.GetUserID(b, 16, false); Assert.AreEqual("99D3CF222E1F9F0", userID); b = QueueEntry.HexToData("00FAE122F23C9D09"); userID = QueueEntry.GetUserID(b, 16, false); Assert.AreEqual("99D3CF222E1F9F0", userID); b = QueueEntry.HexToData("99D3CF222E1FA00"); userID = QueueEntry.GetUserID(b, 16, true); Assert.AreEqual("99D3CF222E1F9F0", userID); }
public IActionResult CreateMessage() { // Create a message and add it to the queue. CloudQueueMessage message = new CloudQueueMessage("naveed-queue-poc_" + Guid.NewGuid()); _queue.AddMessageAsync(message); var response = new QueueEntry() { QueueName = _queue.Name, QueueMessage = message.AsString, Action = "Enqueue" }; return(Ok(response)); }
public void EntriesJoinedInJsonArray(int entriesCount) { Guid[] entriesIdentifiers = Enumerable.Repeat(0, entriesCount).Select(p => Guid.NewGuid()).ToArray(); string[] entries = entriesIdentifiers.Select(p => $"{{\"Tag\":\"tag\", \"D\":\"{p}\"}}").ToArray(); DequeueResult result = new DequeueResult(entries.Select(p => QueueEntry.FromRequestString(new QueueContext("WHATEVER"), p, 1000)).ToArray()); Assert.IsTrue(result.IsOk); string resultString = result.DataToString(); Assert.AreEqual('[', resultString.First()); Assert.AreEqual(']', resultString.Last()); Assert.IsTrue(entriesIdentifiers.SequenceEqual(JArray.Parse(resultString).Select(p => Guid.Parse(p["D"].ToString())))); }
public IActionResult GetMessage() { // Peek at the next message Task <CloudQueueMessage> message = _queue.GetMessageAsync(); _queue.DeleteMessageAsync(message.Result); var response = new QueueEntry() { QueueName = _queue.Name, QueueMessage = message.Result.AsString, Action = "Dequeue" }; return(Ok(response)); }
public bool Enqueue(QueueEntry x) { var w = _write.Load(); if (_data[w].Load(MemoryOrder.Relaxed) != null) { return(false); } if (_config.UseEnqueueReleaseFence) { Fence.Insert(MemoryOrder.Release); } _data[w].Store(x, _config.EnqueueMemoryOrder); _write.Store((w + 1) % _size); return(true); }
internal QueueEntry RemoveQueueEntry(string queueEntryId) { using (SqlConnection conn = CreateConnection()) { conn.Open(); QueueEntry entry = LookupQueueEntry(queueEntryId, conn); if (entry != null) { SqlCommand removeQueueEntry = new SqlCommand("sp_RemoveQueueEntry", conn); removeQueueEntry.Parameters.AddWithValue("@QueueId", queueEntryId); removeQueueEntry.CommandType = CommandType.StoredProcedure; removeQueueEntry.ExecuteNonQuery(); } return(entry); } }
public void Update(float delta) { Time += delta; previousTime += delta; mixTime += delta; if (queue.Count > 0) { QueueEntry entry = queue[0]; if (Time >= entry.delay) { SetAnimationInternal(entry.animation, entry.loop); queue.RemoveAt(0); } } }
public async Task <IActionResult> OnGetAsync(int?id) { if (id == null) { return(NotFound()); } QueueEntry = await _context.QueueEntries .Include(q => q.Member).FirstOrDefaultAsync(m => m.Id == id); if (QueueEntry == null) { return(NotFound()); } return(Page()); }
protected async override Task <JobResult> RunInternalAsync(CancellationToken token) { Log.Trace().Message("Web hook job starting").Write(); QueueEntry <WebHookNotification> queueEntry = null; try { queueEntry = _queue.Dequeue(); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next WebHookNotification: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { return(JobResult.Success); } Log.Trace().Message("Processing WebHookNotification '{0}'.", queueEntry.Id).Write(); WebHookNotification body = queueEntry.Value; Log.Trace().Project(body.ProjectId).Message("Process web hook call: project={0} url={1}", body.ProjectId, body.Url).Write(); var client = new HttpClient(); try { var result = client.PostAsJson(body.Url, body.Data.ToJson(Formatting.Indented)); if (result.StatusCode == HttpStatusCode.Gone) { _webHookRepository.RemoveByUrl(body.Url); Log.Warn().Project(body.ProjectId).Message("Deleting web hook: org={0} project={1} url={2}", body.OrganizationId, body.ProjectId, body.Url).Write(); } queueEntry.Complete(); Log.Info().Project(body.ProjectId).Message("Web hook POST complete: status={0} org={1} project={2} url={3}", result.StatusCode, body.OrganizationId, body.ProjectId, body.Url).Write(); } catch (Exception ex) { queueEntry.Abandon(); return(JobResult.FromException(ex)); } return(JobResult.Success); }
public void Reschedule(Node node) { //Console.WriteLine("rescheduling "+node); IsScheduled[node] = false; QueueEntry entry = EntryOfNode[node]; if (entry == null) { bfs.IsVisited[node] = VisitState.Unvisited; bfs.SearchFrom(node); } else if (entry.QueuePosition < 0) { queue.Add(entry); UpdateEntry(entry); } }
public static async Task <bool> SendMessage(EmailType type, QueueEntry entry, EmailRecipients recipients, TextBox log = null) { try { return(await SendMessage(entry.EmailAddress ?? Settings.EmailAddress, Settings.EmailServerAddress, $"[{type}] {DateTime.Now} - DVD Order Queue - {new FileInfo(entry.Source).Name}", $"See DVD Progress Page for current status.\r\nThis message may be delayed due to processing time, subject lists current time sent.\r\nSource:\t{entry.Source}\r\nStatus:\t{entry.Status}\r\nCurrent Time Elapsed:\t{entry.TimeSpan}\r\nRobot:\t{entry.Robot}\r\nLines:\r\n{entry.Line1}\r\n{entry.Line2}\r\n{entry.Line3}", recipients)); } catch (Exception e) { if (log != null) { log.Text = e.Message; } return(false); } }
/// <summary> /// Creates a message queue with the specified name. /// </summary> /// <param name="queueName">Name of the message queue to create.</param> /// <returns>Returns whether the message queue was created.</returns> public bool CreateQueue(string queueName) { var entry = new QueueEntry(); var result = queues.TryAdd(queueName, entry); return result; }
public void WriteEnqueue(ConcurrentQueue queue, QueueEntry entry) { buffer.Enqueue( string.Format("Queue {0} Enqueued {1} DateTime {2} TTL {3} DeliveryCount {4}", queue.QueueID, entry.SeqNum, entry.EnqueueDateTime, entry.TTL, entry.DeliveryCount)); flushSignal.Set(); }
public Result<AwsSqsSendResponse> Send(string queue, AwsSqsMessage message, Result<AwsSqsSendResponse> result) { try { var msgQueue = GetQueue(queue); ThrowIfQueueIsNull(msgQueue); if(msgQueue == null) { throw new AwsSqsRequestException("AWS.SimpleQueueService.NonExistentQueue", DreamMessage.InternalError()); } var enqueued = new QueueEntry(message, queue); lock(msgQueue) { msgQueue.Add(enqueued); } _messageCounter++; result.Return(new SendResponse(enqueued.Message)); return result; } catch(Exception e) { result.Throw(e); return result; } }
protected abstract void OnMessageAquired(QueueEntry next);
private void DoWork(QueueEntry<SimpleWorkItem> w, CountDownLatch latch, ref int abandonCount, ref int errorCount) { Assert.Equal("Hello", w.Value.Data); latch.Signal(); // randomly complete, abandon or blowup. if (RandomHelper.GetBool()) { Console.WriteLine("Completing: {0}", w.Value.Id); w.CompleteAsync().Wait(); } else if (RandomHelper.GetBool()) { Console.WriteLine("Abandoning: {0}", w.Value.Id); w.AbandonAsync(); Interlocked.Increment(ref abandonCount); } else { Console.WriteLine("Erroring: {0}", w.Value.Id); Interlocked.Increment(ref errorCount); throw new ApplicationException(); } }
/// <summary> /// Send message on named queue with a visibility delay. /// </summary> /// <param name="queueName">Queue name.</param> /// <param name="messageBody">Message body.</param> /// <param name="delay">Time to wait until the message becomes visible.</param> public void SendMessage(SqsQueueName queueName, string messageBody, TimeSpan delay) { var msgQueue = GetQueue(queueName); AssertQueueIsNotNull(queueName, msgQueue); lock(msgQueue) { var entry = new QueueEntry(new SqsMessage(new SqsMessageId(Guid.NewGuid().ToString()), new SqsMessageReceipt(Guid.NewGuid().ToString()), messageBody), DateTime.MinValue); msgQueue.Add(entry); } }
private void EnqueueAction(QueueEntry entry) { if(entry == null) return; busy = true; GUI.enabled = false; // disable control on everything until next cycle actionQueue.Enqueue(entry); }
protected void DoWork(QueueEntry<SimpleWorkItem> w, CountdownEvent latch, WorkInfo info) { Trace.WriteLine($"Starting: {w.Value.Id}"); Assert.Equal("Hello", w.Value.Data); try { // randomly complete, abandon or blowup. if (RandomData.GetBool()) { Trace.WriteLine($"Completing: {w.Value.Id}"); w.Complete(); info.IncrementCompletedCount(); } else if (RandomData.GetBool()) { Trace.WriteLine($"Abandoning: {w.Value.Id}"); w.Abandon(); info.IncrementAbandonCount(); } else { Trace.WriteLine($"Erroring: {w.Value.Id}"); info.IncrementErrorCount(); throw new ApplicationException(); } } finally { Trace.WriteLine($"Signal {latch.CurrentCount}"); latch.Signal(); } }
private void DoWork(QueueEntry<SimpleWorkItem> w, CountdownEvent latch, WorkInfo info) { Debug.WriteLine("Starting: {0}", w.Value.Id); Assert.Equal("Hello", w.Value.Data); try { // randomly complete, abandon or blowup. if (RandomHelper.GetBool()) { Debug.WriteLine("Completing: {0}", w.Value.Id); w.Complete(); info.IncrementCompletedCount(); } else if (RandomHelper.GetBool()) { Debug.WriteLine("Abandoning: {0}", w.Value.Id); w.Abandon(); info.IncrementAbandonCount(); } else { Debug.WriteLine("Erroring: {0}", w.Value.Id); info.IncrementErrorCount(); throw new ApplicationException(); } } finally { latch.Signal(); } }
protected async Task DoWorkAsync(QueueEntry<SimpleWorkItem> w, AsyncCountdownEvent countdown, WorkInfo info) { Trace.WriteLine($"Starting: {w.Value.Id}"); Assert.Equal("Hello", w.Value.Data); try { // randomly complete, abandon or blowup. if (RandomData.GetBool()) { Trace.WriteLine($"Completing: {w.Value.Id}"); await w.CompleteAsync(); info.IncrementCompletedCount(); } else if (RandomData.GetBool()) { Trace.WriteLine($"Abandoning: {w.Value.Id}"); await w.AbandonAsync(); info.IncrementAbandonCount(); } else { Trace.WriteLine($"Erroring: {w.Value.Id}"); info.IncrementErrorCount(); throw new ApplicationException(); } } finally { Trace.WriteLine($"Signal {countdown.CurrentCount}"); countdown.Signal(); } }
/// <summary> /// Selects the response. /// </summary> /// <param name="item">The item to parse.</param> /// <returns>A <see cref="CloudQueue"/> object representing the item.</returns> private CloudQueue SelectResponse(QueueEntry item) { CloudQueue info = new CloudQueue(item.Attributes, this); return info; }
/// <summary> The first time we see a vertex, make up a new queue entry for it. /// /// </summary> /// <param name="vertex">a vertex which has just been encountered. /// </param> /// <param name="edge">the edge via which the vertex was encountered. /// /// </param> /// <returns> the new queue entry. /// </returns> private QueueEntry createSeenData(System.Object vertex, Edge edge) { double shortestPathLength; if (edge == null) { shortestPathLength = 0; } else { shortestPathLength = calculatePathLength(vertex, edge); } QueueEntry entry = new QueueEntry(shortestPathLength); entry.m_vertex = vertex; entry.m_spanningTreeEdge = edge; return entry; }
/// <summary> /// When an entry is archived, we need to attempt scavenging to cleanup the list /// </summary> public void OnEntryArchived(QueueEntry entry) { var next = head.Next; var newNext = head.GetNextValidEntry(); if (next == newNext) { // the head of the queue has not been archived, hence the archival must have been mid queue. // so update unscavengedEntry if entry is further back in the queue than the current unscavengedEntry value var currentUnscavengedEntry = unscavengedEntry; while (currentUnscavengedEntry == null || currentUnscavengedEntry.CompareTo(entry) < 0) { Interlocked.CompareExchange(ref unscavengedEntry, entry, currentUnscavengedEntry); currentUnscavengedEntry = unscavengedEntry; } // only going to scavenge() after N entries have been scavenged if (Interlocked.Increment(ref scavenges) > 10) { Interlocked.Exchange(ref scavenges, 0); Scavenge(); } } else { // the head has been scavenged var currentUnscavengedEntry = unscavengedEntry; if (currentUnscavengedEntry != null && (next == null || currentUnscavengedEntry.CompareTo(next) < 0)) { Interlocked.CompareExchange(ref unscavengedEntry, null, currentUnscavengedEntry); currentUnscavengedEntry = unscavengedEntry; } } }
public void AddCommand(DCCCommand cmd) { if (IsActive) { QueueEntry qe = new QueueEntry(cmd.ToTimings(), cmd.Repeats); lock (commands.SyncRoot) commands.Enqueue(qe); } }