public void EnqueueSynchronization(string destination, SynchronizationWorkItem workItem) { pendingRemoveLocks.GetOrAdd(destination, new ReaderWriterLockSlim()).EnterUpgradeableReadLock(); try { var pendingForDestination = pendingSynchronizations.GetOrAdd(destination, new ConcurrentQueue<SynchronizationWorkItem>()); // if delete work is enqueued and there are other synchronization works for a given file then remove them from a queue if (workItem.SynchronizationType == SynchronizationType.Delete && pendingForDestination.Any( x => x.FileName == workItem.FileName && x.SynchronizationType != SynchronizationType.Delete)) { pendingRemoveLocks.GetOrAdd(destination, new ReaderWriterLockSlim()).EnterWriteLock(); try { var modifiedQueue = new ConcurrentQueue<SynchronizationWorkItem>(); foreach (var pendingWork in pendingForDestination) { if (pendingWork.FileName != workItem.FileName) modifiedQueue.Enqueue(pendingWork); } modifiedQueue.Enqueue(workItem); pendingForDestination = pendingSynchronizations.AddOrUpdate(destination, modifiedQueue, (key, value) => modifiedQueue); } finally { pendingRemoveLocks.GetOrAdd(destination, new ReaderWriterLockSlim()).ExitWriteLock(); } } foreach (var pendingWork in pendingForDestination) { // if there is a file in pending synchronizations do not add it again if (pendingWork.Equals(workItem)) { Log.Debug("{0} for a file {1} and a destination {2} was already existed in a pending queue", workItem.GetType().Name, workItem.FileName, destination); return; } // if there is a work for a file of the same type but with lower file ETag just refresh existing work metadata and do not enqueue again if (pendingWork.FileName == workItem.FileName && pendingWork.SynchronizationType == workItem.SynchronizationType && Buffers.Compare(workItem.FileETag.ToByteArray(), pendingWork.FileETag.ToByteArray()) > 0) { pendingWork.RefreshMetadata(); Log.Debug( "{0} for a file {1} and a destination {2} was already existed in a pending queue but with older ETag, it's metadata has been refreshed", workItem.GetType().Name, workItem.FileName, destination); return; } } var activeForDestination = activeSynchronizations.GetOrAdd(destination, new ConcurrentDictionary<string, SynchronizationWorkItem> ()); // if there is a work in an active synchronizations do not add it again if (activeForDestination.ContainsKey(workItem.FileName) && activeForDestination[workItem.FileName].Equals(workItem)) { Log.Debug("{0} for a file {1} and a destination {2} was already existed in an active queue", workItem.GetType().Name, workItem.FileName, destination); return; } pendingForDestination.Enqueue(workItem); Log.Debug("{0} for a file {1} and a destination {2} was enqueued", workItem.GetType().Name, workItem.FileName, destination); } finally { pendingRemoveLocks.GetOrAdd(destination, new ReaderWriterLockSlim()).ExitUpgradeableReadLock(); } }
public void SynchronizationFinished(SynchronizationWorkItem work, string destination) { ConcurrentDictionary<string, SynchronizationWorkItem> activeDestinationTasks; if (activeSynchronizations.TryGetValue(destination, out activeDestinationTasks) == false) { Log.Warn("Could not get an active synchronization queue for {0}", destination); return; } SynchronizationWorkItem removingItem; if (activeDestinationTasks.TryRemove(work.FileName, out removingItem)) { Log.Debug("File '{0}' with ETag {1} was removed from an active synchronization queue for a destination {2}", work.FileName, work.FileETag, destination); } }
public void SynchronizationStarted(SynchronizationWorkItem work, string destination) { var activeForDestination = activeSynchronizations.GetOrAdd(destination, new ConcurrentDictionary<string, SynchronizationWorkItem>()); if (activeForDestination.TryAdd(work.FileName, work)) { Log.Debug("File '{0}' with ETag {1} was added to an active synchronization queue for a destination {2}", work.FileName, work.FileETag, destination); } }
public bool IsDifferentWorkForTheSameFileBeingPerformed(SynchronizationWorkItem work, string destination) { ConcurrentDictionary<string, SynchronizationWorkItem> activeForDestination; if (!activeSynchronizations.TryGetValue(destination, out activeForDestination)) return false; SynchronizationWorkItem activeWork; return activeForDestination.TryGetValue(work.FileName, out activeWork) && !activeWork.Equals(work); }
public bool TryDequePendingSynchronization(string destination, out SynchronizationWorkItem workItem) { var readerWriterLockSlim = pendingRemoveLocks.GetOrAdd(destination, new ReaderWriterLockSlim()); readerWriterLockSlim.EnterReadLock(); try { ConcurrentQueue<SynchronizationWorkItem> pendingForDestination; if (pendingSynchronizations.TryGetValue(destination, out pendingForDestination) == false) { workItem = null; return false; } return pendingForDestination.TryDequeue(out workItem); } finally { readerWriterLockSlim.ExitReadLock(); } }
private async Task<SynchronizationReport> PerformSynchronizationAsync(string destinationUrl, SynchronizationWorkItem work) { Log.Debug("Starting to perform {0} for a file '{1}' and a destination server {2}", work.GetType().Name, work.FileName, destinationUrl); if (!CanSynchronizeTo(destinationUrl)) { Log.Debug("The limit of active synchronizations to {0} server has been achieved. Cannot process a file '{1}'.", destinationUrl, work.FileName); synchronizationQueue.EnqueueSynchronization(destinationUrl, work); return new SynchronizationReport(work.FileName, work.FileETag, work.SynchronizationType) { Exception = new SynchronizationException(string.Format( "The limit of active synchronizations to {0} server has been achieved. Cannot process a file '{1}'.", destinationUrl, work.FileName)) }; } string fileName = work.FileName; synchronizationQueue.SynchronizationStarted(work, destinationUrl); publisher.Publish(new SynchronizationUpdate { FileName = work.FileName, DestinationServer = destinationUrl, SourceServerId = storage.Id, SourceServerUrl = ServerUrl, Type = work.SynchronizationType, Action = SynchronizationAction.Start, SynchronizationDirection = SynchronizationDirection.Outgoing }); SynchronizationReport report; try { report = await work.PerformAsync(destinationUrl); } catch (Exception ex) { report = new SynchronizationReport(work.FileName, work.FileETag, work.SynchronizationType) { Exception = ex, }; } var synchronizationCancelled = false; if (report.Exception == null) { var moreDetails = string.Empty; if (work.SynchronizationType == SynchronizationType.ContentUpdate) { moreDetails = string.Format(". {0} bytes were transfered and {1} bytes copied. Need list length was {2}", report.BytesTransfered, report.BytesCopied, report.NeedListLength); } Log.Debug("{0} to {1} has finished successfully{2}", work.ToString(), destinationUrl, moreDetails); } else { if (work.IsCancelled || report.Exception is TaskCanceledException) { synchronizationCancelled = true; Log.DebugException(string.Format("{0} to {1} was cancelled", work, destinationUrl), report.Exception); } else { Log.WarnException(string.Format("{0} to {1} has finished with the exception", work, destinationUrl), report.Exception); } } Queue.SynchronizationFinished(work, destinationUrl); if (!synchronizationCancelled) CreateSyncingConfiguration(fileName, work.FileETag, destinationUrl, work.SynchronizationType); publisher.Publish(new SynchronizationUpdate { FileName = work.FileName, DestinationServer = destinationUrl, SourceServerId = storage.Id, SourceServerUrl = ServerUrl, Type = work.SynchronizationType, Action = SynchronizationAction.Finish, SynchronizationDirection = SynchronizationDirection.Outgoing }); return report; }