public void ThreadingTasks_SaveQueue_RunsInParallelWith_UploadQueue() { SemaphoreSlim s1 = new SemaphoreSlim(0); SemaphoreSlim s2 = new SemaphoreSlim(0); SemaphoreSlim s3 = new SemaphoreSlim(0); ConcurrentQueue <int> callOrder = new ConcurrentQueue <int>(); ThreadingManager.QueueTask(ThreadingManager.ThreadQueues.SaveQueue, (_) => { s1.Wait(); callOrder.Enqueue(1); s2.Release(); s3.Wait(); callOrder.Enqueue(2); }, null); ThreadingManager.QueueTask(ThreadingManager.ThreadQueues.UploadQueue, (_) => { s2.Wait(); callOrder.Enqueue(3); s3.Release(); }, null); Thread.Sleep(100); s1.Release(); ThreadingManager.WaitForOutstandingTasks(); Assert.AreEqual(3, callOrder.Count); CollectionAssert.AreEqual(new[] { 1, 3, 2 }, callOrder.ToArray()); }
public void WhenPruneTaskActive_SaveAndUploadTasksWaitForPruneCompletion() { SemaphoreSlim s1 = new SemaphoreSlim(0); ConcurrentQueue <int> callOrder = new ConcurrentQueue <int>(); ThreadingManager.QueueTask(ThreadingManager.ThreadQueues.PruneQueue, (_) => { s1.Wait(); callOrder.Enqueue(1); }, null); ThreadingManager.QueueTask(ThreadingManager.ThreadQueues.SaveQueue, (_) => { callOrder.Enqueue(2); }, null); ThreadingManager.QueueTask(ThreadingManager.ThreadQueues.UploadQueue, (_) => { callOrder.Enqueue(3); }, null); Thread.Sleep(100); s1.Release(); ThreadingManager.WaitForOutstandingTasks(); Assert.AreEqual(3, callOrder.Count); Assert.IsTrue(callOrder.TryDequeue(out int result)); Assert.AreEqual(1, result); var results = callOrder.ToArray(); CollectionAssert.AreEquivalent(new[] { 2, 3 }, results); }
public void ThreadingTasks_SameQueues_ExecuteSequentially(object queue) { var testQueue = (ThreadingManager.ThreadQueues)queue; SemaphoreSlim s1 = new SemaphoreSlim(0); SemaphoreSlim s2 = new SemaphoreSlim(0); ConcurrentQueue <int> callOrder = new ConcurrentQueue <int>(); ThreadingManager.QueueTask(testQueue, (_) => { s1.Wait(); callOrder.Enqueue(1); Thread.Sleep(50); callOrder.Enqueue(2); }, null); ThreadingManager.QueueTask(testQueue, (_) => { callOrder.Enqueue(3); }, null); Thread.Sleep(100); s1.Release(); ThreadingManager.WaitForOutstandingTasks(); Assert.AreEqual(3, callOrder.Count); CollectionAssert.AreEqual(new[] { 1, 2, 3 }, callOrder.ToArray()); }
/// <inheritdoc /> public void SaveCachedData(IList <CachedInfo> infos) { if (infos == null || infos.Count == 0) { return; } using (m_Logger.ScopedStep(LogLevel.Info, "SaveCachedData")) { m_Logger.AddEntrySafe(LogLevel.Info, $"Saving {infos.Count} infos"); // Setup Operations var ops = new FileOperations(infos.Count); using (m_Logger.ScopedStep(LogLevel.Info, "SetupOperations")) { for (int i = 0; i < infos.Count; i++) { var op = ops.data[i]; op.file = GetCachedInfoFile(infos[i].Asset); ops.data[i] = op; } } // Start writing thread ThreadingManager.QueueTask(ThreadingManager.ThreadQueues.SaveQueue, Write, ops); using (m_Logger.ScopedStep(LogLevel.Info, "SerializingCacheInfos")) { // Serialize data as previous data is being written out var formatter = new BinaryFormatter(); for (int index = 0; index < infos.Count; index++, ops.waitLock.Release()) { try { var op = ops.data[index]; var stream = new MemoryStream(); formatter.Serialize(stream, infos[index]); if (stream.Length > 0) { op.bytes = stream; ops.data[index] = op; // If we have a cache server connection, upload the cached data if (m_Uploader != null) { m_Uploader.QueueUpload(infos[index].Asset, GetCachedArtifactsDirectory(infos[index].Asset), new MemoryStream(stream.GetBuffer(), false)); } } } catch (Exception e) { BuildLogger.LogException(e); } } } } }
// We return from this function before all uploads are complete. So we must wait to dispose until all uploads are finished. public void QueueUpload(CacheEntry entry, string artifactsPath, MemoryStream stream) { var item = new WorkItem(); string finalHash = HashingMethods.Calculate(entry.Hash, m_GlobalHash).ToString(); item.fileId = FileId.From(entry.Guid.ToString(), finalHash); item.artifactsPath = artifactsPath; item.stream = stream; ThreadingManager.QueueTask(ThreadingManager.ThreadQueues.UploadQueue, UploadItem, item); }