public void ConcurrentHashSetRemoveRemovesIfSetContainsItem() { for (int i = 0; i < _validWords.Length; i++) { Assert.IsTrue(_hashSet.Remove(_validWords[i]) && _hashSet.TryAdd(_validWords[i])); } }
public void Delete(Type type, IEntity entity) { var key = GetCompositeId(type, entity.Id); if (_memoryCache != null) { _memoryCache.Remove(key); } else { HttpRuntime.Cache.Remove(key); } _keyTracker.Remove(key); }
public virtual async Task <Stack <ICollection <string> > > Seek(string start, string target, ConcurrentHashSet <string> source, CancellationToken cancellationToken = default) { source.Remove(start); _log.LogDebug("Started seeking with start {start} and target {target}.", start, target); var similarityGroups = new ConcurrentDictionary <string, ICollection <string> >(); bool foundTarget = await SeekTarget(start, target, source, similarityGroups); var researchResult = new Stack <ICollection <string> >(); if (!foundTarget) { _log.LogDebug("Target was not found. No results to be shown."); source.Dispose(); return(researchResult); } _log.LogDebug("Target found. Getting all paths next."); var paths = new List <string> { start }; SeekAllShortestPaths(start, target, similarityGroups.ToDictionary(kv => kv.Key, kv => kv.Value.ToList()), researchResult, paths); _log.LogDebug("Finalized seek."); source.Dispose(); return(researchResult); }
public void Remove_Success() { var hashSet = new ConcurrentHashSet <string>(); Parallel.For(0, 50000, i => { hashSet.Add(i.ToString()); }); Parallel.For(0, 50000, i => { if (i % 2 == 0) { hashSet.Remove(i.ToString()); } }); var count = 0; foreach (var index in hashSet) { count++; } Assert.AreEqual(25000, count); }
public IEntity GetById(Type type, Guid id) { var key = GetCompositeId(type, id); var item = _memoryCache != null ? _memoryCache.Get(key) : HttpRuntime.Cache.Get(key); var result = item as IEntity; if (result == null) { //ensure the key doesn't exist anymore in the tracker _keyTracker.Remove(key); } return(result); }
private static void ExerciseFullApi(ConcurrentHashSet <int> hashSet, int[] numbersToAdd) { dynamic _; foreach (var number in numbersToAdd) { hashSet.Add(number); } var index = 0; var genericEnumerator = hashSet.GetEnumerator(); while (index < numbersToAdd.Length && genericEnumerator.MoveNext()) { _ = genericEnumerator.Current; } index = 0; var nongenericEnumerator = ((IEnumerable)hashSet).GetEnumerator(); while (index < numbersToAdd.Length && nongenericEnumerator.MoveNext()) { _ = nongenericEnumerator.Current; } _ = hashSet.Count; var destinationArray = new int[500]; hashSet.CopyTo(destinationArray, 0); _ = hashSet.Contains(numbersToAdd.First()); hashSet.Remove(numbersToAdd.First()); hashSet.Clear(); }
public override void RemoveManager(IManager manager) { var wrappedManager = new Manager(manager); Accounts.Remove(wrappedManager); OnManagerRemoved(this, wrappedManager); base.RemoveManager(manager); }
private void ConnectionOnClosed(object sender, EventArgs eventArgs) { if (Stopped) { return; } var connection = (FtpConnection)sender; _connections.Remove(connection); Statistics.ActiveConnections -= 1; }
public void ConcurrentHashSetDoesNotRemoveItemIfUnderlyingSetDoesNotContainItem() { var items = new List <string> { "Frodo", "Sam", "Merry", "Pippin" }; var set = new ConcurrentHashSet <string>(items); set.Remove("Gandalf"); Assert.AreEqual(4, set.Count); }
public void ConcurrentHashSetRemoveReturnsFalseIfUnderlyingSetDidNotContainItem() { var items = new List <string> { "Frodo", "Sam", "Merry", "Pippin" }; var set = new ConcurrentHashSet <string>(items); bool result = set.Remove("Gandalf"); Assert.IsFalse(result); }
public void ConcurrentHashSet_FunctionsAsNormalHashSet_ForSingleThreadedAccess(params int[] numbersToAdd) { // Because we're not doing anything interesting with the hashset itself, it seems reasonable to just wrap all of the basic hashset API tests into one test var distinctNumbers = numbersToAdd.Distinct().ToList(); // Add foreach (var number in numbersToAdd) { _concurrentHashSet.Add(number); } // GetEnumerator<T> var index = 0; var genericEnumerator = _concurrentHashSet.GetEnumerator(); while (index < numbersToAdd.Length && genericEnumerator.MoveNext()) { Assert.AreEqual(distinctNumbers[index++], genericEnumerator.Current); } Assert.AreEqual(distinctNumbers.Count, index); // GetEnumerator index = 0; var nongenericEnumerator = ((IEnumerable)_concurrentHashSet).GetEnumerator(); while (index < numbersToAdd.Length && nongenericEnumerator.MoveNext()) { Assert.AreEqual(distinctNumbers[index++], nongenericEnumerator.Current); } Assert.AreEqual(distinctNumbers.Count, index); // Count Assert.AreEqual(_concurrentHashSet.Count, distinctNumbers.Count); // CopyTo var destinationArray = new int[distinctNumbers.Count]; _concurrentHashSet.CopyTo(destinationArray, 0); Assert.True(distinctNumbers.SequenceEqual(destinationArray)); // Contains Assert.True(distinctNumbers.All(_concurrentHashSet.Contains)); // Remove _concurrentHashSet.Remove(distinctNumbers.First()); Assert.False(_concurrentHashSet.Contains(distinctNumbers.First())); // Clear _concurrentHashSet.Clear(); Assert.AreEqual(0, _concurrentHashSet.Count); Assert.False(distinctNumbers.Any(_concurrentHashSet.Contains)); }
private static void LoadDownInfoHash() { var queue = new Queue <string>(new[] { TorrentPath }); var limitList = new LinkedList <string>(); while (queue.Count > 0) { var dir = queue.Dequeue(); foreach (var directory in Directory.GetDirectories(dir)) { queue.Enqueue(directory); } foreach (var file in Directory.GetFiles(dir)) { var hash = Path.GetFileNameWithoutExtension(file); DownlaodedSet.Add(hash); limitList.AddLast(hash); if (limitList.Count >= 50000) { DownlaodedSet.Remove(limitList.First.Value); limitList.RemoveFirst(); } } } if (!File.Exists(DownloadInfoPath)) { return; } foreach (var line in File.ReadAllLines(DownloadInfoPath)) { DownlaodedSet.Add(line); limitList.AddFirst(line); if (limitList.Count >= 50000) { DownlaodedSet.Remove(limitList.First.Value); limitList.RemoveFirst(); } } }
private void jobEnd(Task jobTask, ScheduledJob job) { if (jobTask.Exception == null) { Task.Run(() => _onSuccess?.Invoke(new ScheduledJobEventArgs(job))); } else { Task.Run(() => _onJobError?.Invoke(new ScheduledJobEventArgs(job), jobTask.Exception is AggregateException? jobTask.Exception.InnerException : jobTask.Exception)); } _tasks.Remove(jobTask); setNextExecution(job); }
public async Task ConcurrentHashSetAddRemove() { var set = new ConcurrentHashSet <int>(); var tasks = Enumerable.Range(0, 10000) .Select(i => AddRemove(i % 3)); await Task.WhenAll(tasks.ToArray()); Assert.AreEqual(0, set.Count); async Task AddRemove(int i) { set.Add(i); await Task.Delay(10); set.Remove(i); } }
private void RemovePlugin(CompilablePlugin plugin) { if (plugin.LastCompiledAt == default(DateTime)) { return; } queuedPlugins.Remove(plugin); plugins.Remove(plugin); plugin.OnCompilationFailed(); // Remove plugins which are required by this plugin if they are only being compiled for this requirement foreach (var requiredPlugin in plugins.Where(pl => !pl.IsCompilationNeeded && plugin.Requires.Contains(pl.Name)).ToArray()) { if (!plugins.Any(pl => pl.Requires.Contains(requiredPlugin.Name))) { RemovePlugin(requiredPlugin); } } }
private static bool ThottleInternal(object key, int milliseconds = 1000) { if (ThrottleKeys.Contains(key)) { return(false); } if (milliseconds > 0) { ThrottleKeys.Add(key); Task.Delay(milliseconds).ContinueWith(_ => { ThrottleKeys.Remove(key); }); } return(true); }
private static void StartDownTorrent() { if (DownTaskList.Count >= MaxDownTaskNum) { return; } Task downTask = Task.Factory.StartNew(() => { var random = new Random(); var localSize = Math.Min(MaxDownTaskNum, DownLoadQueue.BoundedCapacity - DownLoadQueue.Count); while (running) { try { if (!DownLoadQueue.TryTake(out var info)) { lock (InfoStore) { if (!DownLoadQueue.TryTake(out info)) { var infos = InfoStore.ReadLast(localSize); if (infos.Count > 0) { for (int i = 0; i < infos.Count - 1; i++) { var item = infos[i]; if (!DownLoadQueue.TryAdd(item)) { InfoStore.Add(item); } } info = infos.Last(); } } } } if (info == null) { if (!DownLoadQueue.TryTake(out info, DownTimeOutSeconds)) { if (DownTaskList.Count > MinDownTaskNum) { return; } continue; } } var isFirst = false; try { isFirst = DownlaodingSet.Add(info.Value); if (!isFirst) { DownInfoEnqueue(info); Thread.Sleep(random.Next(10, 100));//添加等待时间,防止入队以后依旧竞争同一个 continue; } if (DownlaodedSet.Contains(info.Value)) { continue; } watchLog.Info($"thread {Task.CurrentId:x2} downloading {info.Value}"); foreach (var peer in info.Peers.Where(p => p.Address.IsPublic())) { if (DownlaodedSet.Contains(info.Value)) { break; } var longPeer = peer.ToInt64(); try { if (BadAddress.TryGetValue(longPeer, out var expireTime)) { if (expireTime > DateTime.Now) { continue; } BadAddress.TryRemove(longPeer, out expireTime); } using (var client = new WireClient(peer)) { var meta = client.GetMetaData(new global::BitTorrent.InfoHash(info.Bytes), out var netError); if (meta == null) { if (netError) { BadAddress.AddOrUpdate(longPeer, DateTime.Now.AddDays(1), (ip, before) => DateTime.Now.AddDays(1)); } continue; } DownlaodedSet.Add(info.Value); var torrent = ParseBitTorrent(meta); torrent.InfoHash = info.Value; var subdirectory = TorrentDirectory.CreateSubdirectory(DateTime.Now.ToString("yyyy-MM-dd")); var path = Path.Combine(subdirectory.FullName, torrent.InfoHash + ".json"); var hasLock = false; writeLock.Enter(ref hasLock); try { File.WriteAllText(Path.Combine(TorrentPath, path), torrent.ToJson()); File.AppendAllText(DownloadInfoPath, torrent.InfoHash + Environment.NewLine); } finally { if (hasLock) { writeLock.Exit(false); } } watchLog.Info($"download {torrent.InfoHash} success"); } break; } catch (SocketException) { BadAddress.AddOrUpdate(longPeer, DateTime.Now.AddDays(1), (ip, before) => DateTime.Now.AddDays(1)); } catch (IOException) { BadAddress.AddOrUpdate(longPeer, DateTime.Now.AddDays(1), (ip, before) => DateTime.Now.AddDays(1)); } catch (Exception ex) { log.Error("下载失败", ex); } } } finally { if (isFirst) { DownlaodingSet.Remove(info.Value); } } } catch (Exception ex) { log.Error("并行下载时错误", ex); } } }, TaskCreationOptions.LongRunning); downTask.ContinueWith(t => DownTaskList.Remove(t)); DownTaskList.Add(downTask); }
private async Task ItemInstaller(PullItemModel args, BlockingCollection <IItemData> itemsToInstall, CancellationToken cancellationToken) { Thread.CurrentThread.Priority = ThreadPriority.Lowest; BulkUpdateContext bu = null; EventDisabler ed = null; try { if (args.BulkUpdate) { bu = new BulkUpdateContext(); } if (args.EventDisabler) { ed = new EventDisabler(); } using (new SecurityDisabler()) { while (!Completed) { IItemData remoteData; if (!itemsToInstall.TryTake(out remoteData, int.MaxValue, cancellationToken)) { lock (_locker) { if (!Completed && !CurrentlyProcessing.Any()) { Finalize(ItemsInstalled, args); } } break; } CurrentlyProcessing.Add(remoteData.Id); IItemData localData = _sitecore.GetItemData(remoteData.Id); await ProcessItem(args, localData, remoteData).ConfigureAwait(false); lock (_locker) { ItemsInstalled++; CurrentlyProcessing.Remove(remoteData.Id); if (CurrentlyProcessing.Any() || !itemsToInstall.IsAddingCompleted || itemsToInstall.Count != 0) { continue; } if (!Completed) { Finalize(ItemsInstalled, args); } } } } } catch (OperationCanceledException e) { Log.Warn("Content migration operation was cancelled", e, this); Status.Cancelled = true; lock (_locker) { if (!Completed) { Finalize(ItemsInstalled, args); } } } catch (Exception e) { Log.Error("Catastrophic error when installing items", e, this); } finally { if (args.BulkUpdate) { bu?.Dispose(); } if (args.EventDisabler) { ed?.Dispose(); } } }
internal void ProcessItem(PullItemModel args, IItemData localData, IItemData remoteData) { AllowedItems.Remove(remoteData.Id); if (args.Preview) { if (localData != null) { var results = _comparer.Compare(remoteData, localData); if (results.AreEqual) { _logger.BeginEvent(remoteData, LogStatus.Skipped, GetSrc(_sitecore.GetIconSrc(localData)), false); } else if (results.IsMoved) { _logger.BeginEvent(remoteData, LogStatus.Moved, GetSrc(_sitecore.GetIconSrc(localData)), false); } else if (results.IsRenamed) { _logger.BeginEvent(remoteData, LogStatus.Renamed, GetSrc(_sitecore.GetIconSrc(localData)), false); } else if (results.IsTemplateChanged) { _logger.BeginEvent(remoteData, LogStatus.TemplateChange, GetSrc(_sitecore.GetIconSrc(localData)), false); } else if (args.Overwrite) { _logger.BeginEvent(remoteData, LogStatus.Changed, GetSrc(_sitecore.GetIconSrc(localData)), false); } else { _logger.BeginEvent(remoteData, LogStatus.Skipped, GetSrc(_sitecore.GetIconSrc(localData)), false); } } else { _logger.BeginEvent(remoteData, LogStatus.Created, "", false); } } else { bool skip = false; if (!args.Overwrite && localData != null) { _logger.BeginEvent(remoteData, LogStatus.Skipped, GetSrc(_sitecore.GetIconSrc(localData)), false); skip = true; } if (!skip && localData != null) { var results = _comparer.Compare(remoteData, localData); if (results.AreEqual) { _logger.BeginEvent(remoteData, LogStatus.Skipped, GetSrc(_sitecore.GetIconSrc(localData)), false); skip = true; } } else if (!skip && !args.UseItemBlaster) { while (CurrentlyProcessing.Contains(remoteData.ParentId)) { if (Errors.Contains(remoteData.ParentId)) { Errors.Add(remoteData.Id); skip = true; break; } Task.Delay(WaitForParentDelay).Wait(); } } if (!skip) { try { if (localData != null || !args.UseItemBlaster) { _logger.BeginEvent(remoteData, LogStatus.Changed, GetSrc(_sitecore.GetIconSrc(localData)), true); _scDatastore.Save(remoteData); } else if (args.UseItemBlaster) { string icon = remoteData.SharedFields.FirstOrDefault(x => x.NameHint == "__Icon")?.Value; if (string.IsNullOrWhiteSpace(icon)) { icon = _sitecore.GetIcon(remoteData.TemplateId); } _logger.BeginEvent(remoteData, LogStatus.Created, $"/scs/platform/scsicon.scsvc?icon={icon}", false); _logger.AddToLog($"{DateTime.Now:h:mm:ss tt} [Created] Staging creation of item using Data Blaster {remoteData.Name} - {remoteData.Id}"); _itemsToCreate.Add(remoteData); } else { _scDatastore.Save(remoteData); } } catch (TemplateMissingFieldException tm) { _logger.BeginEvent(new ErrorItemData() { Name = remoteData.Name, Path = tm.ToString() }, LogStatus.Warning, "", false); } catch (ParentItemNotFoundException) { _logger.BeginEvent(remoteData, LogStatus.SkippedParentError, "", false); Errors.Add(remoteData.Id); } catch (Exception e) { Errors.Add(remoteData.Id); _logger.BeginEvent(new ErrorItemData() { Name = remoteData?.Name ?? "Unknown item", Path = e.ToString() }, LogStatus.Error, "", false); } if (localData != null) { if (_logger.HasLinesSupportEvents(localData.Id.ToString())) { _logger.CompleteEvent(localData.Id.ToString()); } else { _logger.BeginEvent(localData, LogStatus.Skipped, _logger.GetSrc(GetSrc(_sitecore.GetIconSrc(localData))), false); } } } } }
private void ItemInstaller(PullItemModel args, BlockingCollection <IItemData> itemsToInstall, CancellationToken cancellationToken) { Thread.CurrentThread.Priority = ThreadPriority.Lowest; BulkUpdateContext bu = null; EventDisabler ed = null; try { if (args.BulkUpdate) { bu = new BulkUpdateContext(); } if (args.EventDisabler) { ed = new EventDisabler(); } using (new SecurityDisabler()) using (new SyncOperationContext()) { while (!Completed) { if (!itemsToInstall.TryTake(out var remoteData, int.MaxValue, cancellationToken)) { break; } if (!args.UseItemBlaster) { CurrentlyProcessing.Add(remoteData.Id); } IItemData localData = _sitecore.GetItemData(remoteData.Id); ProcessItem(args, localData, remoteData); lock (_locker) { ItemsInstalled++; if (!args.UseItemBlaster) { CurrentlyProcessing.Remove(remoteData.Id); } } } } } catch (OperationCanceledException e) { Log.Warn("Content migration operation was cancelled", e, this); Status.Cancelled = true; lock (_locker) { if (!Completed) { Finalize(ItemsInstalled, args); } } } catch (Exception e) { Log.Error("Catastrophic error when installing items", e, this); } finally { if (args.BulkUpdate) { bu?.Dispose(); } if (args.EventDisabler) { ed?.Dispose(); } } }
internal async Task ProcessItem(PullItemModel args, IItemData localData, IItemData remoteData) { AllowedItems.Remove(remoteData.Id); if (args.Preview) { if (localData != null) { var results = _comparer.Compare(remoteData, localData); if (results.AreEqual) { _logger.BeginEvent(remoteData, LogStatus.Skipped, GetSrc(_sitecore.GetIconSrc(localData)), false); } else if (results.IsMoved) { _logger.BeginEvent(remoteData, LogStatus.Moved, GetSrc(_sitecore.GetIconSrc(localData)), false); } else if (results.IsRenamed) { _logger.BeginEvent(remoteData, LogStatus.Renamed, GetSrc(_sitecore.GetIconSrc(localData)), false); } else if (results.IsTemplateChanged) { _logger.BeginEvent(remoteData, LogStatus.TemplateChange, GetSrc(_sitecore.GetIconSrc(localData)), false); } else if (args.Overwrite) { _logger.BeginEvent(remoteData, LogStatus.Changed, GetSrc(_sitecore.GetIconSrc(localData)), false); } else { _logger.BeginEvent(remoteData, LogStatus.Skipped, GetSrc(_sitecore.GetIconSrc(localData)), false); } } else { _logger.BeginEvent(remoteData, LogStatus.Created, "", false); } } else { bool skip = false; if (!args.Overwrite && localData != null) { _logger.BeginEvent(remoteData, LogStatus.Skipped, GetSrc(_sitecore.GetIconSrc(localData)), false); skip = true; } if (!skip && localData != null) { var results = _comparer.Compare(remoteData, localData); if (results.AreEqual) { _logger.BeginEvent(remoteData, LogStatus.Skipped, GetSrc(_sitecore.GetIconSrc(localData)), false); skip = true; } } else if (!skip) { while (CurrentlyProcessing.Contains(remoteData.ParentId)) { if (Errors.Contains(remoteData.ParentId)) { Errors.Add(remoteData.Id); skip = true; break; } await Task.Delay(WaitForParentDelay).ConfigureAwait(false); } } if (!skip) { try { if (localData != null) { _logger.BeginEvent(remoteData, LogStatus.Changed, _logger.GetSrc(GetSrc(_sitecore.GetIconSrc(localData))), true); } _scDatastore.Save(remoteData); } catch (TemplateMissingFieldException tm) { _logger.BeginEvent(new ErrorItemData() { Name = remoteData.Name, Path = tm.ToString() }, LogStatus.Warning, "", false); } catch (ParentItemNotFoundException) { _logger.BeginEvent(remoteData, LogStatus.SkippedParentError, "", false); Errors.Add(remoteData.Id); } catch (Exception e) { Errors.Add(remoteData.Id); _logger.BeginEvent(new ErrorItemData() { Name = remoteData?.Name ?? "Unknown item", Path = e.ToString() }, LogStatus.Error, "", false); } if (localData != null) { if (!_logger.HasLinesSupportEvents(localData.Id.ToString())) { _logger.CompleteEvent(localData.Id.ToString()); } else { _logger.BeginEvent(localData, LogStatus.Skipped, _logger.GetSrc(GetSrc(_sitecore.GetIconSrc(localData))), false); } } } } }
public void ConcurrentHashSet() { ConcurrentHashSet <int> playSet = new ConcurrentHashSet <int>(); ConcurrentHashSet <int> smallSet = new ConcurrentHashSet <int>(new int[] { 0, 1 }); ConcurrentHashSet <int> bigSet = new ConcurrentHashSet <int>(new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, EqualityComparer <int> .Default); ConcurrentHashSet <int> nullSet = null; Assert.AreEqual(EqualityComparer <int> .Default, bigSet.Comparer); Assert.IsFalse(string.IsNullOrEmpty(smallSet.ToString())); Assert.IsTrue(smallSet.IsProperSubsetOf(bigSet)); Assert.IsTrue(smallSet.IsSubsetOf(bigSet)); Assert.IsTrue(bigSet.IsProperSupersetOf(smallSet)); Assert.IsTrue(bigSet.IsSupersetOf(smallSet)); playSet.TryAdd(0); ((ICollection <int>)playSet).Add(1); Assert.IsTrue(playSet.Contains(0)); Assert.IsTrue(smallSet.SetEquals(playSet)); Assert.IsFalse(smallSet.SetEquals(bigSet)); Assert.IsFalse(smallSet.SetEquals(null)); Assert.IsTrue(smallSet.IsSubsetOf(playSet)); Assert.IsFalse(bigSet.IsSubsetOf(playSet)); Assert.IsFalse(bigSet.IsSubsetOf(null)); Assert.IsFalse(playSet.IsSupersetOf(bigSet)); Assert.IsTrue(playSet.IsSupersetOf(null)); Assert.IsFalse(bigSet.IsSubsetOf(playSet)); Assert.IsFalse(bigSet.IsSubsetOf(null)); Assert.IsFalse(smallSet.IsProperSubsetOf(playSet)); Assert.IsFalse(playSet.IsProperSupersetOf(smallSet)); Assert.IsTrue(playSet.IsProperSupersetOf(null)); Assert.IsFalse(smallSet.IsProperSubsetOf(null)); Assert.IsFalse(smallSet.IsProperSubsetOf(new ConcurrentHashSet <int>(new int[] { 3, 4, 5 }))); Assert.IsFalse(smallSet.IsSupersetOf(new ConcurrentHashSet <int>(new int[] { 3, 4, 5 }))); Assert.IsFalse(smallSet.IsSubsetOf(new ConcurrentHashSet <int>(new int[] { 3, 4, 5 }))); Assert.IsFalse(new ConcurrentHashSet <int>(new int[] { 3, 4, 5 }).IsProperSupersetOf(smallSet)); Assert.IsFalse(new ConcurrentHashSet <int>(new int[] { 3, 4, 5 }).IsSupersetOf(smallSet)); Assert.IsFalse(new ConcurrentHashSet <int>(new int[] { 3, 4, 5 }).Overlaps(smallSet)); IEqualityComparer <ConcurrentHashSet <int> > setComparer = ConcurrentHashSet <int> .CreateSetComparer(); Assert.IsTrue(setComparer.Equals(smallSet, playSet)); Assert.IsFalse(setComparer.Equals(bigSet, playSet)); Assert.AreEqual(setComparer.GetHashCode(smallSet), setComparer.GetHashCode(playSet)); Assert.IsTrue(setComparer.Equals(smallSet, smallSet)); Assert.IsFalse(setComparer.Equals(smallSet, nullSet)); Assert.IsFalse(setComparer.Equals(nullSet, smallSet)); Assert.IsFalse(smallSet.IsReadOnly); Assert.IsTrue(smallSet.Overlaps(bigSet)); Assert.IsFalse(smallSet.Overlaps(null)); int[] test = new int[2]; smallSet.CopyTo(test); smallSet.CopyTo(test, 0); smallSet.Add(2); playSet.Remove(1); bigSet.ExceptWith(playSet); bigSet.ExceptWith(null); Assert.IsFalse(bigSet.Contains(0)); Assert.IsTrue(bigSet.Contains(1)); smallSet.IntersectWith(playSet); Assert.IsTrue(smallSet.Contains(0)); Assert.IsFalse(smallSet.Contains(1)); ConcurrentHashSet <int> niSet = new ConcurrentHashSet <int>(); niSet.Add(0); Assert.IsFalse(niSet.IsEmpty); niSet.IntersectWith(null); Assert.IsTrue(niSet.IsEmpty); niSet.Add(0); Assert.AreEqual(1, niSet.Count); niSet.RemoveWhere(null); Assert.AreEqual(1, niSet.Count); smallSet.IntersectWith(playSet); playSet.Add(0); playSet.Add(2); playSet.RemoveWhere(n => n != 0); Assert.IsTrue(playSet.Contains(0)); Assert.IsFalse(playSet.Contains(1)); playSet.Add(0); playSet.Add(2); smallSet.Add(1); playSet.SymmetricExceptWith(smallSet); playSet.SymmetricExceptWith(null); Assert.IsFalse(playSet.Contains(0)); Assert.IsTrue(playSet.Contains(1)); Assert.IsTrue(playSet.Contains(2)); playSet.UnionWith(smallSet); playSet.UnionWith(null); Assert.IsTrue(playSet.Contains(0)); Assert.IsTrue(playSet.Contains(1)); Assert.IsTrue(playSet.Contains(2)); Assert.IsFalse(playSet.Contains(3)); playSet.Clear(); Assert.IsFalse(smallSet.IsSubsetOf(playSet)); foreach (object o in ((System.Collections.IEnumerable)smallSet)) { Assert.IsTrue(o is int); Assert.IsTrue((int)o < 10); } }
public Task Run() { var backupDir = Core.Instance.Config.BackupDirectory; Core.Instance.Log.InfoFormat("Starting LookUpProtocol: '{0}'", backupDir); return(Task.Factory.StartNew(() => { // get array with all the fileId's associated with the backed up chunks var fileIds = Util.GetLocalFileChunks() .Select(chunk => chunk.FileId) .Distinct().ToList(); if (fileIds.Count == 0) { Core.Instance.Log.Info("LookUpProtocol: got no files, no lookup required"); return; } // remove duplicates, transform the collection into a ConcurrentHashSet var backedUpFilesId = new ConcurrentHashSet <FileId>(fileIds); var waitPeriod = 1000; for (int retry = 0; retry < MaxRetries; ++retry) { // perform a lookup for each fileId var subscriptions = backedUpFilesId.Select(id => { Core.Instance.MCChannel.Send(new LookupMessage(id)); return Core.Instance.MCChannel.Received .Where(message => message.MessageType == MessageType.Got) .Cast <GotMessage>() .Where(message => message.FileId == id) // ReSharper disable once AccessToDisposedClosure .Subscribe(msg => backedUpFilesId.Remove(msg.FileId)); }).ToList(); // wait Task.Delay(waitPeriod).Wait(); foreach (var subscription in subscriptions) { subscription.Dispose(); } // if we got a Got for all the files we don't need to wait longer if (backedUpFilesId.Count == 0) { break; } waitPeriod *= 2; } // delete all the unused chunks foreach (var unusedFileId in backedUpFilesId) { var fileList = Directory.GetFiles(backupDir, unusedFileId + "_*"); foreach (var backedUpChunk in fileList) { File.Delete(backedUpChunk); } Core.Instance.ChunkPeers.RemoveAllChunkPeer(unusedFileId); } backedUpFilesId.Clear(); backedUpFilesId.Dispose(); })); }
void innerStart() { while (!toStop1) { running = false; idleTimes++; try { // to check toStop signal, we need cycle, so wo cannot use queue.take(), instand of poll(timeout) var result = triggerQueue.TryTake(out TriggerParam triggerParam, 3 * 1000); if (triggerParam != null) { running = true; idleTimes = 0; triggerLogIdSet.Remove(triggerParam.logId); // parse param string[] handlerParams = (triggerParam.executorParams != null && triggerParam.executorParams.Trim().Length > 0) ? triggerParam.executorParams.Split(',') : null; // handle job ReturnT <String> executeResult = null; try { // log filename: yyyy-MM-dd/9999.log String logFileName = XxlJobFileAppender.makeLogFileName(TimeUtil.ToTime(triggerParam.logDateTim), triggerParam.logId); XxlJobFileAppender.contextHolder.Value = (logFileName); ShardingUtil.setShardingVo(new ShardingUtil.ShardingVO(triggerParam.broadcastIndex, triggerParam.broadcastTotal)); XxlJobLogger.log("<br>----------- xxl-job job execute start -----------<br>----------- Params:" + string.Join(",", handlerParams ?? new[] { "" })); executeResult = handler.execute(handlerParams); if (executeResult == null) { executeResult = ReturnT <string> .FAIL; } XxlJobLogger.log("<br>----------- xxl-job job execute end(finish) -----------<br>----------- ReturnT:" + executeResult); } catch (Exception e) { if (toStop1) { XxlJobLogger.log("<br>----------- JobThread toStop, stopReason:" + stopReason); } //StringWriter stringWriter = new StringWriter(); //e.printStackTrace(new PrintWriter(stringWriter)); String errorMsg = e.ToString(); executeResult = new ReturnT <String>(ReturnT <string> .FAIL_CODE, errorMsg); XxlJobLogger.log("<br>----------- JobThread Exception:" + errorMsg + "<br>----------- xxl-job job execute end(error) -----------"); } // callback handler info if (!toStop1) { // commonm TriggerCallbackThread.pushCallBack(new HandleCallbackParam(triggerParam.logId, executeResult)); } else { // is killed ReturnT <String> stopResult = new ReturnT <String>(ReturnT <string> .FAIL_CODE, stopReason + " [业务运行中,被强制终止]"); TriggerCallbackThread.pushCallBack(new HandleCallbackParam(triggerParam.logId, stopResult)); } } else { if (idleTimes > 30) { XxlJobExecutor.removeJobThread(jobId, "excutor idel times over limit."); } } } catch (Exception e) { if (toStop1) { XxlJobLogger.log("<br>----------- xxl-job toStop, stopReason:" + stopReason); } String errorMsg = e.ToString(); XxlJobLogger.log("----------- xxl-job JobThread Exception:" + errorMsg); } } // callback trigger request in queue while (triggerQueue != null && triggerQueue.Count > 0) { triggerQueue.TryTake(out TriggerParam triggerParam); if (triggerParam != null) { // is killed ReturnT <String> stopResult = new ReturnT <String>(ReturnT <string> .FAIL_CODE, stopReason + " [任务尚未执行,在调度队列中被终止]"); TriggerCallbackThread.pushCallBack(new HandleCallbackParam(triggerParam.logId, stopResult)); } } logger.Info(string.Format(">>>>>>>>>>>> xxl-job JobThread stoped, hashCode:{0}", Thread.CurrentThread)); }
public void UnSubscribeStateChanges(IZKStateListener stateListener) { //Monitor.Enter(_stateListener); _stateListener.Remove(stateListener); //Monitor.Exit(_stateListener); }
private async Task ProcessItem(PullItemModel args, IItemData localData, IItemData remoteData, Item localItem) { _allowedItems.Remove(remoteData.Id); if (args.Preview) { if (localData != null) { var results = _comparer.Compare(remoteData, localData); if (results.AreEqual) { _logger.BeginEvent(remoteData, "Skipped", _sitecore.GetItemIconSrc(localData), false); } else if (results.IsMoved) { _logger.BeginEvent(remoteData, "Moved", _sitecore.GetItemIconSrc(localData), false); } else if (results.IsRenamed) { _logger.BeginEvent(remoteData, "Renamed", _sitecore.GetItemIconSrc(localData), false); } else if (results.IsTemplateChanged) { _logger.BeginEvent(remoteData, "Template Change", _sitecore.GetItemIconSrc(localData), false); } else if (args.Overwrite) { _logger.BeginEvent(remoteData, "Changed", _sitecore.GetItemIconSrc(localData), false); } else { _logger.BeginEvent(remoteData, "Skipped", _sitecore.GetItemIconSrc(localData), false); } } else { _logger.BeginEvent(remoteData, "Created", "", false); } } else { bool skip = false; if (!args.Overwrite && localData != null) { _logger.BeginEvent(remoteData, "Skipped", _sitecore.GetItemIconSrc(localData), false); skip = true; } if (!skip && localData != null) { var results = _comparer.Compare(remoteData, localData); if (results.AreEqual) { _logger.BeginEvent(remoteData, "Skipped", _sitecore.GetItemIconSrc(localData), false); skip = true; } } else if (!skip) { while (_currentlyProcessing.Contains(remoteData.ParentId)) { if (_errors.Contains(remoteData.ParentId)) { _errors.Add(remoteData.Id); skip = true; break; } await Task.Delay(50); } } if (!skip) { try { if (localData != null) { _logger.BeginEvent(remoteData, "Changed", _logger.GetSrc(ThemeManager.GetIconImage(localItem, 32, 32, "", "")), true); } _scDatastore.Save(remoteData); } catch (TemplateMissingFieldException tm) { _logger.BeginEvent(new ErrorItemData() { Name = remoteData.Name, Path = tm.ToString() }, "Warning", "", false); } catch (ParentItemNotFoundException) { _logger.BeginEvent(remoteData, "Skipped parent error", "", false); _errors.Add(remoteData.Id); } catch (Exception e) { _errors.Add(remoteData.Id); _logger.BeginEvent(new ErrorItemData() { Name = remoteData?.Name ?? "Unknown item", Path = e.ToString() }, "Error", "", false); } if (localData != null) { if (_logger.LinesSupport[localData.Id.ToString()].Events.Count != 0) { _logger.CompleteEvent(localData.Id.ToString()); } else { _logger.BeginEvent(localData, "Skipped", _logger.GetSrc(ThemeManager.GetIconImage(localItem, 32, 32, "", "")), false); } } } } }
public void ClearRoom(TMRoom room) { room.RecallRpc(Lobby); Rooms.Remove(room); }
private void TestAll <T>(Func <T> create, Func <T, T> copy) { ConcurrentDictionary <T, T> map = new ConcurrentDictionary <T, T>(); ConcurrentHashSet <T> set = new ConcurrentHashSet <T>(); HashSet <T> items = new HashSet <T>(); for (int i = 0; i < MAX; i++) { T obj = create(); items.Add(obj); } foreach (T item in items) { // IsEmpty bool v1 = set.IsEmpty; bool v2 = map.IsEmpty; if (v1 != v2) { throw new InvalidOperationException(); } // Remove v1 = set.TryRemove(item, out T item1); v2 = map.TryRemove(item, out T item2); if (v1 != v2) { throw new InvalidOperationException(); } if (!Equals(item2, item2)) { throw new InvalidOperationException(); } if (set.Count != map.Count) { throw new InvalidOperationException(); } if (set.Remove(item)) { throw new InvalidOperationException(); } if (set.Count != map.Count) { throw new InvalidOperationException(); } // Add v1 = set.Add(item); v2 = map.TryAdd(item, item); if (v1 != v2) { throw new InvalidOperationException(); } if (set.Count != map.Count) { throw new InvalidOperationException(); } if (set.Add(item)) { throw new InvalidOperationException(); } if (set.Count != map.Count) { throw new InvalidOperationException(); } // Contains T clone = copy(item); v1 = set.Contains(clone); v2 = map.ContainsKey(clone); if (v1 != v2) { throw new InvalidOperationException(); } // TryGetValue v1 = set.TryGetValue(clone, out item1); v2 = map.TryGetValue(clone, out item2); if (v1 != v2) { throw new InvalidOperationException(); } if (!item1.Equals(item2)) { throw new InvalidOperationException(); } if (!clone.Equals(item1)) { throw new InvalidOperationException(); } if (typeof(T).IsClass && !ReferenceEquals(item1, item2)) { throw new InvalidOperationException(); } // TryUpdate v1 = set.TryUpdate(clone); v2 = map.TryUpdate(clone, clone, clone); if (v1 != v2) { throw new InvalidOperationException(); } set.TryGetValue(clone, out item1); map.TryGetValue(clone, out item2); if (!item1.Equals(item2)) { throw new InvalidOperationException(); } if (!clone.Equals(item1)) { throw new InvalidOperationException(); } if (typeof(T).IsClass && !ReferenceEquals(item1, item2)) { throw new InvalidOperationException(); } v1 = set.TryUpdate(item); v2 = map.TryUpdate(item, item, item); if (v1 != v2) { throw new InvalidOperationException(); } if (v1 != true) { throw new InvalidOperationException(); } } // Remove foreach (T item in items) { bool v1 = set.TryRemove(item, out T item1); bool v2 = map.TryRemove(item, out T item2); if (v1 != v2) { throw new InvalidOperationException(); } if (v1 != v2) { throw new InvalidOperationException(); } if (set.Count != map.Count) { throw new InvalidOperationException(); } // Contains v1 = set.Contains(item); v2 = map.ContainsKey(item); if (v1 != v2) { throw new InvalidOperationException(); } // AddOrUpdate T clone = copy(item); set.Add(item); if (!set.TryGetValue(item, out T v3)) { throw new InvalidOperationException(); } set.AddOrUpdate(clone); if (!set.TryGetValue(item, out v3)) { throw new InvalidOperationException(); } var v4 = map.AddOrUpdate(item, item, (x, y) => x); T v5 = map.AddOrUpdate(clone, clone, (x, y) => x); if (!item.Equals(clone)) { throw new InvalidOperationException(); } if (!item.Equals(v3)) { throw new InvalidOperationException(); } if (!item.Equals(v4)) { throw new InvalidOperationException(); } if (!item.Equals(v4)) { throw new InvalidOperationException(); } if (typeof(T).IsClass) { if (!ReferenceEquals(v3, v5)) { throw new InvalidOperationException(); } if (ReferenceEquals(item, clone)) { throw new InvalidOperationException(); } if (ReferenceEquals(v3, v4)) { throw new InvalidOperationException(); } } } // Clear map.Clear(); set.Clear(); if (map.Count != set.Count) { throw new InvalidOperationException(); } // Set specific if (!set.Add(default))
public void UnSubscribeStateChanges(IZKStateListener stateListener) { _stateListeners.Remove(stateListener); }
public void StartInstallingItems(PullItemModel args, BlockingCollection <IItemData> itemsToInstall, int threads, CancellationTokenSource cancellation) { Status.StartedTime = DateTime.Now; Status.RootNodes = args.Ids.Select(x => new ContentTreeNode(x)); Status.IsPreview = args.Preview; Status.Server = args.Server; int items = 0; for (int i = 0; i < threads; i++) { Task.Run(async() => { Thread.CurrentThread.Priority = ThreadPriority.Lowest; BulkUpdateContext bu = null; EventDisabler ed = null; try { if (args.BulkUpdate) { bu = new BulkUpdateContext(); } if (args.EventDisabler) { ed = new EventDisabler(); } using (new SecurityDisabler()) { while (!Completed) { IItemData remoteData; if (!itemsToInstall.TryTake(out remoteData, int.MaxValue, cancellation.Token)) { lock (_locker) { if (!Completed && !_currentlyProcessing.Any()) { Finalize(items, args); } } break; } _currentlyProcessing.Add(remoteData.Id); Item localItem = _sitecore.GetItem(remoteData.Id); IItemData localData = localItem == null ? null : new Rainbow.Storage.Sc.ItemData(localItem); await ProcessItem(args, localData, remoteData, localItem); lock (_locker) { items++; _currentlyProcessing.Remove(remoteData.Id); if (_currentlyProcessing.Any() || !itemsToInstall.IsAddingCompleted || itemsToInstall.Count != 0) { continue; } if (!Completed) { Finalize(items, args); } } } } } catch (OperationCanceledException e) { Log.Warn("Content migration operation was cancelled", e, this); Status.Cancelled = true; lock (_locker) { if (!Completed) { Finalize(items, args); } } } catch (Exception e) { Log.Error("Catastrophic error when installing items", e, this); } finally { if (args.BulkUpdate) { bu?.Dispose(); } if (args.EventDisabler) { ed?.Dispose(); } } }); } }
public void TestLockFreeHashSet_Long() { ConcurrentHashSet<long> listTest = new ConcurrentHashSet<long>(); listTest.Add(42); listTest.Add(22); listTest.Add(22); listTest.Add(64); listTest.Add(55); Assert.IsTrue(listTest.Count == 4); Assert.IsTrue(listTest.Contains(42)); Assert.IsFalse(listTest.Contains(142)); Assert.IsFalse(listTest.Contains(2)); Assert.IsTrue(listTest.Contains(64)); listTest.Remove(42); Assert.IsFalse(listTest.Contains(42)); Assert.IsFalse(listTest.Remove(42)); Assert.IsTrue(listTest.Count == 3); listTest.Add(42); listTest.Add(41); listTest.Add(0); Assert.IsTrue(listTest.Count == 6); listTest.Add(0); Assert.IsTrue(listTest.Count == 6); Assert.IsTrue(listTest.Contains(41)); Assert.IsTrue(listTest.Contains(0)); Assert.IsTrue(listTest.Remove(0)); Assert.IsFalse(listTest.Contains(0)); Assert.IsTrue(listTest.Remove(22)); Assert.IsTrue(listTest.Remove(55)); listTest.Add(1212); listTest.Add(323); listTest.Add(7567); listTest.Add(567); Assert.IsTrue(listTest.Count == 7); }