/// <summary> /// Performs an atomic clear and enqueue of the specified item /// </summary> /// <param name="item"></param> public void ReplaceWith(CleanupWorkItem item) { lock (_sync) { queue.Clear(); queue.AddLast(item); } }
private void PopulateFolder(CleanupWorkItem item, bool recursive) { //Do the local work. if (!cache.Index.GetIsValid(item.RelativePath)) { if (logger != null) { Stopwatch sw = new Stopwatch(); sw.Start(); cache.Index.populate(item.RelativePath, item.PhysicalPath); sw.Stop(); logger.LogTrace("{0}ms: Querying file system about {1}", sw.ElapsedMilliseconds.ToString(NumberFormatInfo.InvariantInfo).PadLeft(4), item.RelativePath); } else { cache.Index.populate(item.RelativePath, item.PhysicalPath); } } if (recursive) { //Queue the recursive work. IList <string> names = cache.Index.getSubfolders(item.RelativePath); List <CleanupWorkItem> childWorkItems = new List <CleanupWorkItem>(names.Count); foreach (string n in names) { childWorkItems.Add(new CleanupWorkItem(CleanupWorkItem.Kind.PopulateFolderRecursive, addSlash(item.RelativePath, false) + n, addSlash(item.PhysicalPath, true) + n)); } queue.InsertRange(childWorkItems); } }
private void DoTask(CleanupWorkItem item) { Stopwatch sw = null; if (logger != null) { sw = new Stopwatch(); sw.Start(); } if (item.Task == CleanupWorkItem.Kind.RemoveFile) { RemoveFile(item); } else if (item.Task == CleanupWorkItem.Kind.CleanFolderRecursive || item.Task == CleanupWorkItem.Kind.CleanFolder) { CleanFolder(item, item.Task == CleanupWorkItem.Kind.PopulateFolderRecursive); } else if (item.Task == CleanupWorkItem.Kind.PopulateFolderRecursive || item.Task == CleanupWorkItem.Kind.PopulateFolder) { PopulateFolder(item, item.Task == CleanupWorkItem.Kind.PopulateFolderRecursive); } else if (item.Task == CleanupWorkItem.Kind.FlushAccessedDate) { FlushAccessedDate(item); } if (logger != null) { sw.Stop(); } logger?.LogTrace("{2}ms: Executing task {0} {1} ({3} tasks remaining)", item.Task.ToString(), item.RelativePath, sw.ElapsedMilliseconds.ToString(NumberFormatInfo.InvariantInfo).PadLeft(4), queue.Count.ToString(NumberFormatInfo.InvariantInfo)); }
public override bool Equals(object obj) { CleanupWorkItem other = obj as CleanupWorkItem; if (other == null) { return(false); } return(other.Task == Task && other.RelativePath == RelativePath && other.PhysicalPath == PhysicalPath && other.LazyProvider == LazyProvider); }
public CleanupWorkItem Pop() { lock (_sync) { CleanupWorkItem i = queue.Count > 0 ? queue.First.Value : null; if (i != null) { queue.RemoveFirst(); } return(i); } }
/// <summary> /// Queues the item if no other identical items exist in the queue. Returns true if the item was added. /// </summary> /// <param name="item"></param> /// <returns></returns> public bool QueueIfUnique(CleanupWorkItem item) { lock (_sync) { bool unique = !queue.Contains(item); if (unique) { queue.AddLast(item); } return(unique); } }
public void FlushAccessedDate(CleanupWorkItem item) { CachedFileInfo c = cache.Index.getCachedFileInfo(item.RelativePath); if (c == null) { return; //File was already deleted, nothing to do. } try{ cache.Locks.TryExecute(item.RelativePath.ToUpperInvariant(), 1, delegate() { File.SetLastAccessTimeUtc(item.PhysicalPath, c.AccessedUtc); }); //In both of these exception cases, we don't care. }catch (FileNotFoundException) { }catch (UnauthorizedAccessException) { } }
private void RemoveFile(CleanupWorkItem item) { //File names are never embedded into the first item, they are provided on-demand by a LazyProvider LazyTaskProvider provider = item.LazyProvider; item = provider(); if (item == null) { return; //The provider is out of possible items } item.LazyProvider = provider; //So if this item fails, we can queue 'item' again and the next task run will get the next alternative. bool removedFile = false; cache.Locks.TryExecute(item.RelativePath.ToUpperInvariant(), 10, delegate() { //If the file is already gone, consider the mission a success. if (!System.IO.File.Exists(item.PhysicalPath)) { cache.Index.setCachedFileInfo(item.RelativePath, null); removedFile = true; return; } //Cool, we got a lock on the file. //Remove it from the cache. Better a miss than an invalidation. cache.Index.setCachedFileInfo(item.RelativePath, null); try { System.IO.File.Delete(item.PhysicalPath); } catch (IOException) { return; //The file is in use, or has an open handle. - try the next file. } catch (UnauthorizedAccessException) { return; //Invalid NTFS permissions or readonly file. - try the next file } cache.Index.setCachedFileInfo(item.RelativePath, null); //In case it crossed paths. removedFile = true; }); //If we didn't remove a file, insert the task back in the queue for the next iteration. if (!removedFile) { queue.Insert(item); } }
public void Insert(CleanupWorkItem item) { lock (_sync) { queue.AddFirst(item); } }
public bool Exists(CleanupWorkItem item) { lock (_sync) { return(queue.Contains(item)); } }
public void Queue(CleanupWorkItem item) { lock (_sync) { queue.AddLast(item); } }
private void CleanFolder(CleanupWorkItem item, bool recursive) { //If we don't have an up-to-date folder level, we can't work.. if (!cache.Index.GetIsValid(item.RelativePath)) { //Put this task back where it was, but with a 'populate/populaterecursive' right before it. //We could actually make this Populate non-recursive, since the recursive Clean would just insert Populates beforehand anyway. queue.InsertRange(new CleanupWorkItem[] { new CleanupWorkItem(recursive ? CleanupWorkItem.Kind.PopulateFolderRecursive : CleanupWorkItem.Kind.PopulateFolder, item.RelativePath, item.PhysicalPath), item }); return; } string baseRelative = addSlash(item.RelativePath, false); string basePhysical = addSlash(item.PhysicalPath, true); //Ok, it's valid. //Queue the recursive work. if (item.Task == CleanupWorkItem.Kind.CleanFolderRecursive) { IList <string> names = cache.Index.getSubfolders(item.RelativePath); List <CleanupWorkItem> childWorkItems = new List <CleanupWorkItem>(names.Count); foreach (string n in names) { childWorkItems.Add(new CleanupWorkItem(CleanupWorkItem.Kind.CleanFolderRecursive, baseRelative + n, basePhysical + n)); } queue.InsertRange(childWorkItems); } //Now do the local work int files = cache.Index.getFileCount(item.RelativePath); //How much are we over? int overMax = Math.Max(0, files - cs.MaximumItemsPerFolder); int overOptimal = Math.Max(0, (files - overMax) - cs.TargetItemsPerFolder); if (overMax + overOptimal < 1) { return; //nothing to do } if (overMax > 0) { lock (_timesLock) lastFoundItemsOverMax = DateTime.UtcNow.Ticks; } //Make a linked list, like a queue of files. LinkedList <KeyValuePair <string, CachedFileInfo> > sortedList = new LinkedList <KeyValuePair <string, CachedFileInfo> >( cache.Index.getSortedSubfiles(item.RelativePath)); //This callback will execute (overMax) number of times CleanupWorkItem obsessive = new CleanupWorkItem(CleanupWorkItem.Kind.RemoveFile, delegate() { //Pop the next item KeyValuePair <string, CachedFileInfo> file; while (sortedList.Count > 0) { file = sortedList.First.Value; sortedList.RemoveFirst(); if (cs.ShouldRemove(baseRelative + file.Key, file.Value, true)) { return(new CleanupWorkItem(CleanupWorkItem.Kind.RemoveFile, baseRelative + file.Key, basePhysical + file.Key)); } } return(null); //No matching items left. }); CleanupWorkItem relaxed = new CleanupWorkItem(CleanupWorkItem.Kind.RemoveFile, delegate() { //Pop the next item KeyValuePair <string, CachedFileInfo> file; while (sortedList.Count > 0) { file = sortedList.First.Value; sortedList.RemoveFirst(); if (cs.ShouldRemove(baseRelative + file.Key, file.Value, false)) { return(new CleanupWorkItem(CleanupWorkItem.Kind.RemoveFile, baseRelative + file.Key, basePhysical + file.Key)); } } return(null); //No matching items left. }); //The 'obsessive' ones must be processed first, thus added last. for (int i = 0; i < overOptimal; i++) { queue.Insert(relaxed); } for (int i = 0; i < overMax; i++) { queue.Insert(obsessive); } }