public void PausingTheQueueShouldHoldItemsUntilUnpaused() { var item = Observable.Return(42); var fixture = new OperationQueue(2); var prePauseOutput = new[] { fixture.EnqueueObservableOperation(4, () => item), fixture.EnqueueObservableOperation(4, () => item), }.Merge().CreateCollection(); Assert.Equal(2, prePauseOutput.Count()); var unpause1 = fixture.PauseQueue(); // The queue is halted, but we should still eventually process these // once it's no longer halted var pauseOutput = new[] { fixture.EnqueueObservableOperation(4, () => item), fixture.EnqueueObservableOperation(4, () => item), }.Merge().CreateCollection(); Assert.Equal(0, pauseOutput.Count); var unpause2 = fixture.PauseQueue(); Assert.Equal(0, pauseOutput.Count); unpause1.Dispose(); Assert.Equal(0, pauseOutput.Count); unpause2.Dispose(); Assert.Equal(2, pauseOutput.Count); }
public void ShutdownShouldSignalOnceEverythingCompletes() { var subjects = Enumerable.Range(0, 5).Select(x => new AsyncSubject <int>()).ToArray(); var priorities = new[] { 5, 5, 5, 10, 1, }; var fixture = new OperationQueue(2); // The two at the front are solely to stop up the queue, they get subscribed // to immediately. var outputs = subjects.Zip(priorities, (inp, pri) => fixture.EnqueueObservableOperation(pri, () => inp).CreateCollection()) .ToArray(); var shutdown = fixture.ShutdownQueue().CreateCollection(); Assert.True(outputs.All(x => x.Count == 0)); Assert.Equal(0, shutdown.Count); for (int i = 0; i < 4; i++) { subjects[i].OnNext(42); subjects[i].OnCompleted(); } Assert.Equal(0, shutdown.Count); // Complete the last one, that should signal that we're shut down subjects[4].OnNext(42); subjects[4].OnCompleted(); Assert.True(outputs.All(x => x.Count == 1)); Assert.Equal(1, shutdown.Count); }
public void ShouldBeAbleToDecreaseTheMaximunConcurrentValueOfAnExistingQueue() { var subjects = Enumerable.Range(0, 6).Select(x => new AsyncSubject <int>()).ToArray(); var fixture = new OperationQueue(3); // The three at the front are solely to stop up the queue, they get subscribed // to immediately. var outputs = subjects .Select(inp => fixture.EnqueueObservableOperation(5, () => inp).CreateCollection()) .ToArray(); Assert.True( new[] { true, true, true, false, false, false, }.Zip(subjects, (expected, subj) => new { expected, actual = subj.HasObservers, }) .All(x => x.expected == x.actual)); fixture.SetMaximumConcurrent(2); // Complete the first one, the last three subjects should still have // no observers because we reduced maximum concurrent subjects[0].OnNext(42); subjects[0].OnCompleted(); Assert.True( new[] { false, true, true, false, false, false, }.Zip(subjects, (expected, subj) => new { expected, actual = subj.HasObservers, }) .All(x => x.expected == x.actual)); // Complete subj[1], now 2,3 are live subjects[1].OnNext(42); subjects[1].OnCompleted(); Assert.True( new[] { false, false, true, true, false, false, }.Zip(subjects, (expected, subj) => new { expected, actual = subj.HasObservers, }) .All(x => x.expected == x.actual)); }
public void KeyedItemsShouldBeSerialized() { var subj1 = new AsyncSubject <int>(); var subj2 = new AsyncSubject <int>(); var subscribeCount1 = 0; var input1Subj = new AsyncSubject <int>(); var input1 = Observable.Defer(() => { subscribeCount1++; return(input1Subj); }); var subscribeCount2 = 0; var input2Subj = new AsyncSubject <int>(); var input2 = Observable.Defer(() => { subscribeCount2++; return(input2Subj); }); var fixture = new OperationQueue(2); // Block up the queue foreach (var v in new[] { subj1, subj2, }) { fixture.EnqueueObservableOperation(5, () => v); } // subj1,2 are live, input1,2 are in queue var out1 = fixture.EnqueueObservableOperation(5, "key", Observable.Never <Unit>(), () => input1).CreateCollection(); var out2 = fixture.EnqueueObservableOperation(5, "key", Observable.Never <Unit>(), () => input2).CreateCollection(); Assert.Equal(0, subscribeCount1); Assert.Equal(0, subscribeCount2); // Dispatch both subj1 and subj2, we should end up with input1 live, // but input2 in queue because of the key subj1.OnNext(42); subj1.OnCompleted(); subj2.OnNext(42); subj2.OnCompleted(); Assert.Equal(1, subscribeCount1); Assert.Equal(0, subscribeCount2); Assert.Equal(0, out1.Count); Assert.Equal(0, out2.Count); // Dispatch input1, input2 can now execute input1Subj.OnNext(42); input1Subj.OnCompleted(); Assert.Equal(1, subscribeCount1); Assert.Equal(1, subscribeCount2); Assert.Equal(1, out1.Count); Assert.Equal(0, out2.Count); // Dispatch input2, everything is finished input2Subj.OnNext(42); input2Subj.OnCompleted(); Assert.Equal(1, subscribeCount1); Assert.Equal(1, subscribeCount2); Assert.Equal(1, out1.Count); Assert.Equal(1, out2.Count); }
public void Enqueue(Operation operation) { if (!(operation is Operation) || operation.Callback is null) { var result = new OperationResult() { Operation = operation }; OnOperationTriggered(result, new ArgumentException($"{nameof(operation)} is not valid or callback is null")); return; } try { OperationQueue?.Enqueue(operation); _resetEvent.Set(); } catch (Exception ex) { OnOperationTriggered(new OperationResult() { Operation = operation }, ex); } }
public void NonkeyedItemsShouldRunInParallel() { var unkeyed1Subj = new AsyncSubject <int>(); var unkeyed1SubCount = 0; var unkeyed1 = Observable.Defer(() => { unkeyed1SubCount++; return(unkeyed1Subj); }); var unkeyed2Subj = new AsyncSubject <int>(); var unkeyed2SubCount = 0; var unkeyed2 = Observable.Defer(() => { unkeyed2SubCount++; return(unkeyed2Subj); }); var fixture = new OperationQueue(2); Assert.Equal(0, unkeyed1SubCount); Assert.Equal(0, unkeyed2SubCount); fixture.EnqueueObservableOperation(5, () => unkeyed1); fixture.EnqueueObservableOperation(5, () => unkeyed2); Assert.Equal(1, unkeyed1SubCount); Assert.Equal(1, unkeyed2SubCount); }
public Tapstream(OperationQueue queue, string accountName, string developerSecret, string hardware) { del = new DelegateImpl(); platform = new PlatformImpl(); listener = new CoreListenerImpl(queue); core = new Core(del, platform, listener, accountName, developerSecret, hardware); }
public bool IsOperationInQueue(string operationId) { lock (queueLock) { return(OperationQueue.ToList().Find(oq => oq.Id == operationId) != null); } }
void PacketHandler() { new Thread(() => { while (!Disposed) { try { if (PacketQueue.TryDequeue(out Packet packet)) { OperationQueue.Enqueue(new Operation(GetPeer(packet.EndPoint), packet)); } else { PacketQueueBegin.Reset(); PacketQueueBegin.WaitOne(); } } catch (Exception e) { OnException(e); } } Console.WriteLine("Packet manager disposed"); }).Start(); }
private void Start() { while (true && !_disposedValue) { if (!OperationQueue.IsEmpty) { Operation operation = null; try { if (OperationQueue.TryDequeue(out operation)) { var result = operation.Callback?.Invoke(operation.Parameters); OnOperationTriggered(new OperationResult() { Operation = operation, Result = result }); } } catch (Exception ex) { OnOperationTriggered(new OperationResult() { Operation = operation }, ex); } } else { _resetEvent.WaitOne(); } _resetEvent.WaitOne(Parameter.Sleep); } }
public RateLimitedHttpMessageHandler(HttpMessageHandler handler, Priority basePriority, int priority = 0, long?maxBytesToRead = null, OperationQueue opQueue = null, Func <HttpRequestMessage, HttpResponseMessage, string, CancellationToken, Task> cacheResultFunc = null) : base(handler) { this.priority = (int)basePriority + priority; this.maxBytesToRead = maxBytesToRead; this.opQueue = opQueue; this.cacheResult = cacheResultFunc; }
public PeripheralConnection(Peripheral device, int num) { _device = device; _context = SynchronizationContext.Current; _loggerId = $"BLEConnection:{device.Uuid.RightHalf:X12}:{num}"; _logger = device.Adapter._loggerFactory.CreateLogger(_loggerId); _q = new OperationQueue(_logger); }
public Task <bool> EnqueueOperationAsync(ShardWriteOperation data) { lock (queueLock) { OperationQueue.Add(SystemExtension.Clone(data)); } return(Task.FromResult(true)); }
public Task <bool> DeleteOperationFromQueueAsync(ShardWriteOperation operation) { lock (queueLock) { OperationQueue.Remove(operation); } return(Task.FromResult(true)); }
public bool IsOperationComplete(string transactionId) { lock (queueLock) { var isOperationInQueue = OperationQueue.Find(oq => oq.Id == transactionId) != null; return(!TransitQueue.ContainsKey(transactionId) && !isOperationInQueue); } }
/// <summary> /// Agent that runs a scheduled task /// </summary> /// <param name="task"> /// The invoked task /// </param> /// <remarks> /// This method is called when a periodic or resource intensive task is invoked /// </remarks> async protected override void OnInvoke(ScheduledTask task) { Logger.Log("Agent", "- - - - - - - - - - - - -"); Logger.Log("Agent", "Agent invoked -> " + task.Name); this.contactBindingManager = await ContactBindings.GetAppContactBindingManagerAsync(); // Use the name of the task to differentiate between the ExtensilityTaskAgent // and the ScheduledTaskAgent if (task.Name == "ExtensibilityTaskAgent") { List <Task> inprogressOperations = new List <Task>(); OperationQueue operationQueue = await SocialManager.GetOperationQueueAsync(); ISocialOperation socialOperation = await operationQueue.GetNextOperationAsync(); while (null != socialOperation) { Logger.Log("Agent", "Dequeued an operation of type " + socialOperation.Type.ToString()); try { switch (socialOperation.Type) { case SocialOperationType.DownloadRichConnectData: await ProcessOperationAsync(socialOperation as DownloadRichConnectDataOperation); break; default: // This should never happen HandleUnknownOperation(socialOperation); break; } // The agent can only use up to 20 MB // Logging the memory usage information for debugging purposes Logger.Log("Agent", string.Format("Completed operation {0}, memusage: {1}kb/{2}kb", socialOperation.ToString(), (int)((long)DeviceExtendedProperties.GetValue("ApplicationCurrentMemoryUsage")) / 1024, (int)((long)DeviceExtendedProperties.GetValue("ApplicationPeakMemoryUsage")) / 1024)); // This can block for up to 1 minute. Don't expect to run instantly every time. socialOperation = await operationQueue.GetNextOperationAsync(); } catch (Exception e) { Helpers.HandleException(e); } } Logger.Log("Agent", "No more operations in the queue"); } NotifyComplete(); }
public Tapstream(OperationQueue queue, string accountName, string developerSecret, Config config) { del = new DelegateImpl(); platform = new PlatformImpl(); listener = new CoreListenerImpl(queue); this.config = config; core = new Core(del, platform, listener, null, accountName, developerSecret, config); core.Start(); }
public ArtworkCache(IBlobCache cache = null, IArtworkFetcher artworkFetcher = null) { this.cache = cache ?? BlobCache.LocalMachine; this.artworkFetcher = artworkFetcher ?? new MusicBrainzArtworkFetcher(); this.queue = new OperationQueue(1); // Disk operations should be serialized this.storageSemaphore = new KeyedMemoizingSemaphore(); this.keyedMemoizingSemaphore = new KeyedMemoizingSemaphore(); }
public ArtworkCache(IBlobCache cache = null, IArtworkFetcher artworkFetcher = null) { this.cache = cache ?? BlobCache.LocalMachine; this.artworkFetcher = artworkFetcher ?? new MusicBrainzArtworkFetcher(); this.queue = new OperationQueue(1); // Disk operations should be serialized this.storageSemaphore = new KeyedMemoizingSemaphore(); this.keyedMemoizingSemaphore = new KeyedMemoizingSemaphore(); }
public bool Enqueue(BuildingOperation operation) { if (OperationQueue.Count == QUEUE_SIZE) { return(false); } OperationQueue.Add(operation); return(true); }
public ActionError CancelOperation(int queueIndex) { if (queueIndex >= OperationQueue.Count) { return(ActionError.InvalidQueueIndex); } OperationQueue.RemoveAt(queueIndex); return(ActionError.Succeed); }
public PeripheralConnection(Peripheral device, int num) { _device = device; _adapter = device.Adapter; _peripheral = device.CBPeripheral; _central = _adapter.CentralManager; _loggerId = $"BLEConnection:{device.Uuid}:{num}"; _logger = _adapter._loggerFactory.CreateLogger(_loggerId); _q = new OperationQueue(_logger); }
public BuildingOperation DequeueAt(int i) { if (OperationQueue.Count <= i) { throw new InvalidOperationException(); } var r = OperationQueue[i]; OperationQueue.RemoveAt(i); return(r); }
public void ShouldBeAbleToIncreaseTheMaximunConcurrentValueOfAnExistingQueue() { var unkeyed1Subj = new AsyncSubject <int>(); var unkeyed1SubCount = 0; var unkeyed1 = Observable.Defer(() => { unkeyed1SubCount++; return(unkeyed1Subj); }); var unkeyed2Subj = new AsyncSubject <int>(); var unkeyed2SubCount = 0; var unkeyed2 = Observable.Defer(() => { unkeyed2SubCount++; return(unkeyed2Subj); }); var unkeyed3Subj = new AsyncSubject <int>(); var unkeyed3SubCount = 0; var unkeyed3 = Observable.Defer(() => { unkeyed3SubCount++; return(unkeyed3Subj); }); var unkeyed4Subj = new AsyncSubject <int>(); var unkeyed4SubCount = 0; var unkeyed4 = Observable.Defer(() => { unkeyed4SubCount++; return(unkeyed4Subj); }); var fixture = new OperationQueue(2); Assert.Equal(0, unkeyed1SubCount); Assert.Equal(0, unkeyed2SubCount); Assert.Equal(0, unkeyed3SubCount); Assert.Equal(0, unkeyed4SubCount); fixture.EnqueueObservableOperation(5, () => unkeyed1); fixture.EnqueueObservableOperation(5, () => unkeyed2); fixture.EnqueueObservableOperation(5, () => unkeyed3); fixture.EnqueueObservableOperation(5, () => unkeyed4); Assert.Equal(1, unkeyed1SubCount); Assert.Equal(1, unkeyed2SubCount); Assert.Equal(0, unkeyed3SubCount); Assert.Equal(0, unkeyed4SubCount); fixture.SetMaximumConcurrent(3); Assert.Equal(1, unkeyed1SubCount); Assert.Equal(1, unkeyed2SubCount); Assert.Equal(1, unkeyed3SubCount); Assert.Equal(0, unkeyed4SubCount); }
/// <summary> /// 开始下载,若获取大小失败,则会抛出异常 /// </summary> public async ValueTask DownloadAsync(CancellationToken token) { StatusSubject.OnNext(@"正在获取下载文件大小..."); FileSize = await GetContentLengthAsync(token); //总大小 TempDir = EnsureDirectory(TempDir); var list = GetFileRangeList(); var opQueue = new OperationQueue(1); Current = 0; Last = 0; try { using var speedMonitor = CreateSpeedMonitor(); StatusSubject.OnNext(@"正在下载..."); await list.Select(info => // ReSharper disable once AccessToDisposedClosure opQueue.Enqueue(1, () => GetStreamAsync(info, token)) .ToObservable() .SelectMany(res => WriteToFileAsync(res.Item1, res.Item2, token)) ).Merge(); StatusSubject.OnNext(@"下载完成,正在合并文件..."); Current = 0; await MergeFilesAsync(list, token); } catch (OperationCanceledException) { StatusSubject.OnNext(@"下载已取消"); throw; } catch (Exception ex) { _logger.LogError(ex, @"下载出错"); StatusSubject.OnNext(@"下载出错"); } finally { await opQueue.ShutdownQueue(); opQueue.Dispose(); Task.Run(async() => { foreach (var range in list) { await DeleteFileWithRetryAsync(range.FileName); } }, CancellationToken.None).NoWarning(); } }
public async Task <bool> EnqueueOperationAsync(ShardWriteOperation transaction) { lock (queueLock) { OperationQueue.Add(transaction); } if (_persistToDisk) { return(await _operationCacheRepository.EnqueueOperationAsync(transaction)); } return(true); }
public Task <ShardWriteOperation> GetNextOperationAsync() { lock (queueLock) { var result = OperationQueue.Take(1); if (result.Count() > 0) { return(Task.FromResult(result.First())); } } return(Task.FromResult <ShardWriteOperation>(null)); }
public override void Update() { if (OperationQueue.Count != 0) { var op = OperationQueue.First(); op.Done++; if (op.Done == op.NeedDone) { Dequeue(); FinalizeOperation(op); } } }
/// <summary> /// Handles messages received by the service /// whenever the service encounters an error. /// </summary> /// <param name="ex">The exception received from the service.</param> public void HandleServiceError(Exception ex) { OperationQueue.Enqueue(new Action <IServiceMonitor>(serviceMonitor => { try { serviceMonitor.HandleServiceError(ex); } catch (Exception errorHandlerException) { throw new ErrorHandlerException(errorHandlerException.Message, errorHandlerException); } })); }
public async Task ProcessSocialOperationsQueue() { OperationQueue queue = await SocialManager.GetOperationQueueAsync(); while (true) { ISocialOperation operation = await queue.GetNextOperationAsync(); if (operation != null) { try { switch (operation.Type) { case SocialOperationType.DownloadHomeFeed: /*int num1 = */ await this.ProcessHomeFeed(operation as DownloadHomeFeedOperation); // ? 1 : 0; break; case SocialOperationType.DownloadContactFeed: await this.ProcessContactFeed(operation as DownloadFeedOperation); break; case SocialOperationType.DownloadDashboard: this.ProcessDashboard(operation as DownloadDashboardFeedOperation); break; case SocialOperationType.DownloadRichConnectData: /*int num2 = */ await this.ProcessConnectData(operation as DownloadRichConnectDataOperation); // ? 1 : 0; break; } operation.NotifyCompletion(); } catch { operation.NotifyCompletion(false); } operation = null; } else { break; } } }
public void ItemsShouldBeDispatchedByPriority() { var subjects = Enumerable.Range(0, 5).Select(x => new AsyncSubject <int>()).ToArray(); var priorities = new[] { 5, 5, 5, 10, 1, }; var fixture = new OperationQueue(2); // The two at the front are solely to stop up the queue, they get subscribed // to immediately. var outputs = subjects.Zip(priorities, (inp, pri) => { fixture .EnqueueObservableOperation(pri, () => inp) .ToObservableChangeSet(scheduler: ImmediateScheduler.Instance) .Bind(out var y).Subscribe(); return(y); }).ToArray();
public void CancellingItemsShouldntEvenBeEvaluated() { var subj1 = new AsyncSubject <int>(); var subj2 = new AsyncSubject <int>(); var fixture = new OperationQueue(2); // Block up the queue foreach (var v in new[] { subj1, subj2, }) { fixture.EnqueueObservableOperation(5, () => v); } var cancel1 = new Subject <Unit>(); bool wasCalled = false; var item1 = new AsyncSubject <int>(); var output = fixture.EnqueueObservableOperation(5, "foo", cancel1, () => { wasCalled = true; return(item1); }).CreateCollection(); // Still blocked by subj1,2 Assert.Equal(0, output.Count); Assert.False(wasCalled); // Still blocked by subj1,2 - however, we've cancelled foo before // it even had a chance to run - if that's the case, we shouldn't // even call the evaluation func cancel1.OnNext(Unit.Default); cancel1.OnCompleted(); Assert.Equal(0, output.Count); Assert.False(wasCalled); // Unblock subj1,2, we still shouldn't see wasCalled = true subj1.OnNext(42); subj1.OnCompleted(); Assert.Equal(0, output.Count); Assert.False(wasCalled); subj2.OnNext(42); subj2.OnCompleted(); Assert.Equal(0, output.Count); Assert.False(wasCalled); }
public void CancellingItemsShouldNotResultInThemBeingReturned() { var subj1 = new AsyncSubject <int>(); var subj2 = new AsyncSubject <int>(); var fixture = new OperationQueue(2); // Block up the queue foreach (var v in new[] { subj1, subj2, }) { fixture.EnqueueObservableOperation(5, () => v); } var cancel1 = new Subject <Unit>(); var item1 = new AsyncSubject <int>(); var output = new[] { fixture.EnqueueObservableOperation(5, "foo", cancel1, () => item1), fixture.EnqueueObservableOperation(5, "baz", () => Observable.Return(42)), }.Merge().CreateCollection(); // Still blocked by subj1,2 Assert.Equal(0, output.Count); // Still blocked by subj1,2, only baz is in queue cancel1.OnNext(Unit.Default); cancel1.OnCompleted(); Assert.Equal(0, output.Count); // foo was cancelled, baz is still good subj1.OnNext(42); subj1.OnCompleted(); Assert.Equal(1, output.Count); // don't care that cancelled item finished item1.OnNext(42); item1.OnCompleted(); Assert.Equal(1, output.Count); // still shouldn't see anything subj2.OnNext(42); subj2.OnCompleted(); Assert.Equal(1, output.Count); }
public void CancellingItemsShouldntEvenBeEvaluated() { var subj1 = new AsyncSubject<int>(); var subj2 = new AsyncSubject<int>(); var fixture = new OperationQueue(2); // Block up the queue foreach (var v in new[] { subj1, subj2, }) { fixture.EnqueueObservableOperation(5, () => v); } var cancel1 = new Subject<Unit>(); bool wasCalled = false; var item1 = new AsyncSubject<int>(); var output = fixture.EnqueueObservableOperation(5, "foo", cancel1, () => { wasCalled = true; return item1; }).CreateCollection(); // Still blocked by subj1,2 Assert.Equal(0, output.Count); Assert.False(wasCalled); // Still blocked by subj1,2 - however, we've cancelled foo before // it even had a chance to run - if that's the case, we shouldn't // even call the evaluation func cancel1.OnNext(Unit.Default); cancel1.OnCompleted(); Assert.Equal(0, output.Count); Assert.False(wasCalled); // Unblock subj1,2, we still shouldn't see wasCalled = true subj1.OnNext(42); subj1.OnCompleted(); Assert.Equal(0, output.Count); Assert.False(wasCalled); subj2.OnNext(42); subj2.OnCompleted(); Assert.Equal(0, output.Count); Assert.False(wasCalled); }
public void CancellingItemsShouldNotResultInThemBeingReturned() { var subj1 = new AsyncSubject<int>(); var subj2 = new AsyncSubject<int>(); var fixture = new OperationQueue(2); // Block up the queue foreach (var v in new[] { subj1, subj2, }) { fixture.EnqueueObservableOperation(5, () => v); } var cancel1 = new Subject<Unit>(); var item1 = new AsyncSubject<int>(); var output = new[] { fixture.EnqueueObservableOperation(5, "foo", cancel1, () => item1), fixture.EnqueueObservableOperation(5, "baz", () => Observable.Return(42)), }.Merge().CreateCollection(); // Still blocked by subj1,2 Assert.Equal(0, output.Count); // Still blocked by subj1,2, only baz is in queue cancel1.OnNext(Unit.Default); cancel1.OnCompleted(); Assert.Equal(0, output.Count); // foo was cancelled, baz is still good subj1.OnNext(42); subj1.OnCompleted(); Assert.Equal(1, output.Count); // don't care that cancelled item finished item1.OnNext(42); item1.OnCompleted(); Assert.Equal(1, output.Count); // still shouldn't see anything subj2.OnNext(42); subj2.OnCompleted(); Assert.Equal(1, output.Count); }
public void TestWeightedSequentialExecution() { TestOperation operation1 = new TestOperation(); TestOperation operation2 = new TestOperation(); OperationQueue<TestOperation> testQueueOperation = new OperationQueue<TestOperation>( new WeightedTransaction<TestOperation>[] { new WeightedTransaction<TestOperation>(operation1, 0.5f), new WeightedTransaction<TestOperation>(operation2, 2.0f) } ); IOperationQueueSubscriber mockedSubscriber = mockSubscriber(testQueueOperation); testQueueOperation.Start(); Expect.Once.On(mockedSubscriber). Method("ProgressChanged"). With( new Matcher[] { new NMock2.Matchers.TypeMatcher(typeof(OperationQueue<TestOperation>)), new ProgressUpdateEventArgsMatcher(new ProgressReportEventArgs(0.1f)) } ); operation1.ChangeProgress(0.5f); Expect.Once.On(mockedSubscriber). Method("ProgressChanged"). With( new Matcher[] { new NMock2.Matchers.TypeMatcher(typeof(OperationQueue<TestOperation>)), new ProgressUpdateEventArgsMatcher(new ProgressReportEventArgs(0.2f)) } ); operation1.SetEnded(); this.mockery.VerifyAllExpectationsHaveBeenMet(); }
public void NonkeyedItemsShouldRunInParallel() { var unkeyed1Subj = new AsyncSubject<int>(); var unkeyed1SubCount = 0; var unkeyed1 = Observable.Defer(() => { unkeyed1SubCount++; return unkeyed1Subj; }); var unkeyed2Subj = new AsyncSubject<int>(); var unkeyed2SubCount = 0; var unkeyed2 = Observable.Defer(() => { unkeyed2SubCount++; return unkeyed2Subj; }); var fixture = new OperationQueue(2); Assert.Equal(0, unkeyed1SubCount); Assert.Equal(0, unkeyed2SubCount); fixture.EnqueueObservableOperation(5, () => unkeyed1); fixture.EnqueueObservableOperation(5, () => unkeyed2); Assert.Equal(1, unkeyed1SubCount); Assert.Equal(1, unkeyed2SubCount); }
public void TestEndPropagation() { TestOperation operation1 = new TestOperation(); TestOperation operation2 = new TestOperation(); OperationQueue<TestOperation> testQueueOperation = new OperationQueue<TestOperation>( new TestOperation[] { operation1, operation2 } ); testQueueOperation.Start(); Assert.IsFalse(testQueueOperation.Ended); operation1.SetEnded(); Assert.IsFalse(testQueueOperation.Ended); operation2.SetEnded(); Assert.IsTrue(testQueueOperation.Ended); testQueueOperation.Join(); }
public void ShouldBeAbleToIncreaseTheMaximunConcurrentValueOfAnExistingQueue() { var unkeyed1Subj = new AsyncSubject<int>(); var unkeyed1SubCount = 0; var unkeyed1 = Observable.Defer(() => { unkeyed1SubCount++; return unkeyed1Subj; }); var unkeyed2Subj = new AsyncSubject<int>(); var unkeyed2SubCount = 0; var unkeyed2 = Observable.Defer(() => { unkeyed2SubCount++; return unkeyed2Subj; }); var unkeyed3Subj = new AsyncSubject<int>(); var unkeyed3SubCount = 0; var unkeyed3 = Observable.Defer(() => { unkeyed3SubCount++; return unkeyed3Subj; }); var unkeyed4Subj = new AsyncSubject<int>(); var unkeyed4SubCount = 0; var unkeyed4 = Observable.Defer(() => { unkeyed4SubCount++; return unkeyed4Subj; }); var fixture = new OperationQueue(2); Assert.Equal(0, unkeyed1SubCount); Assert.Equal(0, unkeyed2SubCount); Assert.Equal(0, unkeyed3SubCount); Assert.Equal(0, unkeyed4SubCount); fixture.EnqueueObservableOperation(5, () => unkeyed1); fixture.EnqueueObservableOperation(5, () => unkeyed2); fixture.EnqueueObservableOperation(5, () => unkeyed3); fixture.EnqueueObservableOperation(5, () => unkeyed4); Assert.Equal(1, unkeyed1SubCount); Assert.Equal(1, unkeyed2SubCount); Assert.Equal(0, unkeyed3SubCount); Assert.Equal(0, unkeyed4SubCount); fixture.SetMaximumConcurrent(3); Assert.Equal(1, unkeyed1SubCount); Assert.Equal(1, unkeyed2SubCount); Assert.Equal(1, unkeyed3SubCount); Assert.Equal(0, unkeyed4SubCount); }
public Tapstream newTapstream(OperationQueue queue, string accountName, string secret, Config config) { return new Tapstream(queue, accountName, secret, config); }
public void ShouldBeAbleToDecreaseTheMaximunConcurrentValueOfAnExistingQueue() { var subjects = Enumerable.Range(0, 6).Select(x => new AsyncSubject<int>()).ToArray(); var fixture = new OperationQueue(3); // The three at the front are solely to stop up the queue, they get subscribed // to immediately. var outputs = subjects .Select(inp => fixture.EnqueueObservableOperation(5, () => inp).CreateCollection()) .ToArray(); Assert.True( new[] { true, true, true, false, false, false, }.Zip(subjects, (expected, subj) => new { expected, actual = subj.HasObservers, }) .All(x => x.expected == x.actual)); fixture.SetMaximumConcurrent(2); // Complete the first one, the last three subjects should still have // no observers because we reduced maximum concurrent subjects[0].OnNext(42); subjects[0].OnCompleted(); Assert.True( new[] { false, true, true, false, false, false, }.Zip(subjects, (expected, subj) => new { expected, actual = subj.HasObservers, }) .All(x => x.expected == x.actual)); // Complete subj[1], now 2,3 are live subjects[1].OnNext(42); subjects[1].OnCompleted(); Assert.True( new[] { false, false, true, true, false, false, }.Zip(subjects, (expected, subj) => new { expected, actual = subj.HasObservers, }) .All(x => x.expected == x.actual)); }
public Tapstream newTapstream(OperationQueue queue, string accountName, string secret, string hardware) { return new Tapstream(queue, accountName, secret, hardware); }
public void KeyedItemsShouldBeSerialized() { var subj1 = new AsyncSubject<int>(); var subj2 = new AsyncSubject<int>(); var subscribeCount1 = 0; var input1Subj = new AsyncSubject<int>(); var input1 = Observable.Defer(() => { subscribeCount1++; return input1Subj; }); var subscribeCount2 = 0; var input2Subj = new AsyncSubject<int>(); var input2 = Observable.Defer(() => { subscribeCount2++; return input2Subj; }); var fixture = new OperationQueue(2); // Block up the queue foreach (var v in new[] { subj1, subj2, }) { fixture.EnqueueObservableOperation(5, () => v); } // subj1,2 are live, input1,2 are in queue var out1 = fixture.EnqueueObservableOperation(5, "key", Observable.Never<Unit>(), () => input1).CreateCollection(); var out2 = fixture.EnqueueObservableOperation(5, "key", Observable.Never<Unit>(), () => input2).CreateCollection(); Assert.Equal(0, subscribeCount1); Assert.Equal(0, subscribeCount2); // Dispatch both subj1 and subj2, we should end up with input1 live, // but input2 in queue because of the key subj1.OnNext(42); subj1.OnCompleted(); subj2.OnNext(42); subj2.OnCompleted(); Assert.Equal(1, subscribeCount1); Assert.Equal(0, subscribeCount2); Assert.Equal(0, out1.Count); Assert.Equal(0, out2.Count); // Dispatch input1, input2 can now execute input1Subj.OnNext(42); input1Subj.OnCompleted(); Assert.Equal(1, subscribeCount1); Assert.Equal(1, subscribeCount2); Assert.Equal(1, out1.Count); Assert.Equal(0, out2.Count); // Dispatch input2, everything is finished input2Subj.OnNext(42); input2Subj.OnCompleted(); Assert.Equal(1, subscribeCount1); Assert.Equal(1, subscribeCount2); Assert.Equal(1, out1.Count); Assert.Equal(1, out2.Count); }
public void TestTransparentWrapping() { WeightedTransaction<TestOperation> operation1 = new WeightedTransaction<TestOperation>( new TestOperation() ); WeightedTransaction<TestOperation> operation2 = new WeightedTransaction<TestOperation>( new TestOperation() ); OperationQueue<TestOperation> testQueueOperation = new OperationQueue<TestOperation>( new WeightedTransaction<TestOperation>[] { operation1, operation2 } ); // Order is important due to sequential execution! Assert.AreSame(operation1, testQueueOperation.Children[0]); Assert.AreSame(operation2, testQueueOperation.Children[1]); }
public void ItemsShouldBeDispatchedByPriority() { var subjects = Enumerable.Range(0, 5).Select(x => new AsyncSubject<int>()).ToArray(); var priorities = new[] {5,5,5,10,1,}; var fixture = new OperationQueue(2); // The two at the front are solely to stop up the queue, they get subscribed // to immediately. var outputs = subjects.Zip(priorities, (inp, pri) => fixture.EnqueueObservableOperation(pri, () => inp).CreateCollection()) .ToArray(); // Alright, we've got the first two subjects taking up our two live // slots, and 3,4,5 queued up. However, the order of completion should // be "4,3,5" because of the priority. Assert.True(outputs.All(x => x.Count == 0)); subjects[0].OnNext(42); subjects[0].OnCompleted(); Assert.Equal(new[] { 1, 0, 0, 0, 0, }, outputs.Select(x => x.Count)); // 0 => completed, 1,3 => live, 2,4 => queued. Make sure 4 *doesn't* fire because // the priority should invert it. subjects[4].OnNext(42); subjects[4].OnCompleted(); Assert.Equal(new[] { 1, 0, 0, 0, 0, }, outputs.Select(x => x.Count)); // At the end, 0,1 => completed, 3,2 => live, 4 is queued subjects[1].OnNext(42); subjects[1].OnCompleted(); Assert.Equal(new[] { 1, 1, 0, 0, 0, }, outputs.Select(x => x.Count)); // At the end, 0,1,2,4 => completed, 3 is live (remember, we completed // 4 early) subjects[2].OnNext(42); subjects[2].OnCompleted(); Assert.Equal(new[] { 1, 1, 1, 0, 1, }, outputs.Select(x => x.Count)); subjects[3].OnNext(42); subjects[3].OnCompleted(); Assert.Equal(new[] { 1, 1, 1, 1, 1, }, outputs.Select(x => x.Count)); }
public void TestExceptionPropagation() { TestOperation operation1 = new TestOperation(); TestOperation operation2 = new TestOperation(); OperationQueue<TestOperation> testQueueOperation = new OperationQueue<TestOperation>( new TestOperation[] { operation1, operation2 } ); testQueueOperation.Start(); Assert.IsFalse(testQueueOperation.Ended); operation1.SetEnded(); Assert.IsFalse(testQueueOperation.Ended); operation2.SetEnded(new AbortedException("Hello World")); Assert.Throws<AbortedException>( delegate() { testQueueOperation.Join(); } ); }
public void PausingTheQueueShouldHoldItemsUntilUnpaused() { var item = Observable.Return(42); var fixture = new OperationQueue(2); var prePauseOutput = new[] { fixture.EnqueueObservableOperation(4, () => item), fixture.EnqueueObservableOperation(4, () => item), }.Merge().CreateCollection(); Assert.Equal(2, prePauseOutput.Count); var unpause1 = fixture.PauseQueue(); // The queue is halted, but we should still eventually process these // once it's no longer halted var pauseOutput = new[] { fixture.EnqueueObservableOperation(4, () => item), fixture.EnqueueObservableOperation(4, () => item), }.Merge().CreateCollection(); Assert.Equal(0, pauseOutput.Count); var unpause2 = fixture.PauseQueue(); Assert.Equal(0, pauseOutput.Count); unpause1.Dispose(); Assert.Equal(0, pauseOutput.Count); unpause2.Dispose(); Assert.Equal(2, pauseOutput.Count); }
public CoreListenerImpl(OperationQueue q) { queue = q; }
public void ShutdownShouldSignalOnceEverythingCompletes() { var subjects = Enumerable.Range(0, 5).Select(x => new AsyncSubject<int>()).ToArray(); var priorities = new[] {5,5,5,10,1,}; var fixture = new OperationQueue(2); // The two at the front are solely to stop up the queue, they get subscribed // to immediately. var outputs = subjects.Zip(priorities, (inp, pri) => fixture.EnqueueObservableOperation(pri, () => inp).CreateCollection()) .ToArray(); var shutdown = fixture.ShutdownQueue().CreateCollection(); Assert.True(outputs.All(x => x.Count == 0)); Assert.Equal(0, shutdown.Count); for (int i = 0; i < 4; i++) { subjects[i].OnNext(42); subjects[i].OnCompleted(); } Assert.Equal(0, shutdown.Count); // Complete the last one, that should signal that we're shut down subjects[4].OnNext(42); subjects[4].OnCompleted(); Assert.True(outputs.All(x => x.Count == 1)); Assert.Equal(1, shutdown.Count); }
public void QueueShouldRespectMaximumConcurrent() { var unkeyed1Subj = new AsyncSubject<int>(); var unkeyed1SubCount = 0; var unkeyed1 = Observable.Defer(() => { unkeyed1SubCount++; return unkeyed1Subj; }); var unkeyed2Subj = new AsyncSubject<int>(); var unkeyed2SubCount = 0; var unkeyed2 = Observable.Defer(() => { unkeyed2SubCount++; return unkeyed2Subj; }); var unkeyed3Subj = new AsyncSubject<int>(); var unkeyed3SubCount = 0; var unkeyed3 = Observable.Defer(() => { unkeyed3SubCount++; return unkeyed3Subj; }); var fixture = new OperationQueue(2); Assert.Equal(0, unkeyed1SubCount); Assert.Equal(0, unkeyed2SubCount); Assert.Equal(0, unkeyed3SubCount); fixture.EnqueueObservableOperation(5, () => unkeyed1); fixture.EnqueueObservableOperation(5, () => unkeyed2); fixture.EnqueueObservableOperation(5, () => unkeyed3); Assert.Equal(1, unkeyed1SubCount); Assert.Equal(1, unkeyed2SubCount); Assert.Equal(0, unkeyed3SubCount); }