public void ReleaseRef() { if (null != _assets) { foreach (var asset in _assets) { Logger.LogVerbose("BundlerMessenger::ReleaseRef: {0}, loader: {1}", this, asset.Loader); asset.Release(); Logger.LogVerbose("BundlerMessenger::ReleaseRef: {0}, loader: {1}, finished!", this, asset.Loader); } HashSetPool <AssetBase> .Return(_assets); } if (null != _typedAssets) { foreach (var kv in _typedAssets) { Logger.LogVerbose("BundlerMessenger::ReleaseRef: {0}, loader: {1}", this, kv.Value.Loader); kv.Value.Release(); Logger.LogVerbose("BundlerMessenger::ReleaseRef: {0}, loader: {1}, finished!", this, kv.Value.Loader); } DictionaryPool <Type, AssetBase> .Return(_typedAssets); } Messengers.Remove(this); }
void OnMeshDataReceived(MeshTaskInfo info) { MeshData meshData = info.meshData; //主线程回调 isWaitForData = false; mesh = meshData.CreateMesh();// TODO pool mesh DictionaryPool <MeshData> .Return(meshData); hasMesh = true; updateCallback(); }
/// <summary> /// The last step for loading data. /// In this step, prototype references are being resolved for this serialized data and their sub data objects. /// </summary> /// <param name="prototypes">Prototypes to use for remapping</param> /// <param name="errors"></param> public void ResolveReferenceFields(List <IPrototype> prototypes, List <ParsingError> errors, PrototypeParserState state) { Dictionary <string, object> updates = DictionaryPool <string, object> .Get(); try { foreach (var field in fields) { var @ref = field.Value as SerializedPrototypeReference; if (!ReferenceEquals(@ref, null)) { updates.Add(field.Key, @ref.Resolve(prototypes)); } var sub = field.Value as SerializedData; if (!ReferenceEquals(sub, null)) { sub.ResolveReferenceFields(prototypes, errors, state); } var col = field.Value as SerializedCollectionData; if (!ReferenceEquals(col, null)) { col.ResolveReferenceFieldsAndSubData(prototypes, errors, state); } } // Write updates foreach (var update in updates) { this.fields[update.Key] = update.Value; } } finally { DictionaryPool <string, object> .Return(updates); } }
/// <summary> /// The last step for loading data. /// In this step, root references are being resolved for this serialized data and their sub data objects. /// </summary> /// <param name="referenceables">Root objects to use for remapping</param> /// <param name="filename">Only used for error reporting</param> public void ResolveReferenceFields(string filename, List <ISerializableRoot> referenceables, List <SerializerError> errors, XMLSerializerParams parameters) { Dictionary <string, object> updates = DictionaryPool <string, object> .Get(); try { foreach (var field in fields) { var @ref = field.Value as SerializedRootObjectReference; if (!ReferenceEquals(@ref, null)) { updates.Add(field.Key, @ref.Resolve(referenceables)); } var sub = field.Value as SerializedData; if (!ReferenceEquals(sub, null)) { sub.ResolveReferenceFields(filename, referenceables, errors, parameters); } var col = field.Value as SerializedCollectionData; if (!ReferenceEquals(col, null)) { col.ResolveReferenceFieldsAndSubData(filename, referenceables, errors, parameters); } } // Write updates foreach (var update in updates) { this.fields[update.Key] = update.Value; } } finally { DictionaryPool <string, object> .Return(updates); } }
/// <summary> /// Runs distribution algorithm on input. /// </summary> /// <param name="input"></param> /// <param name="output"></param> /// <param name="available">The amount you want to distribute</param> /// <returns>The overflow</returns> public static float ReqDistribute(List <DistributionInput> input, List <DistributionResult> output, float available) { List <TObject> tmp = ListPool <TObject> .Get(); List <TObject> objs = ListPool <TObject> .Get(); Dictionary <TObject, float> distributed = DictionaryPool <TObject, float> .Get(); Dictionary <TObject, float> requested = DictionaryPool <TObject, float> .Get(); float availablePrev = -available; // Parse foreach (var inp in input) { if (Mathf.Approximately(inp.requestedAmount, 0)) { continue; } distributed.Add(inp.obj, 0); requested.Add(inp.obj, inp.requestedAmount); objs.Add(inp.obj); } int panic = 0; while (available > 0 && !Mathf.Approximately(available, availablePrev) && requested.Count > 0) { availablePrev = available; float perInput = available / (float)requested.Count; foreach (var obj in objs) { float alreadyDistributed = distributed[obj]; float stillRequested = requested[obj]; float distributing = Mathf.Min(stillRequested, perInput); distributed[obj] += distributing; requested[obj] -= distributing; available -= distributing; if (Mathf.Approximately(requested[obj], 0)) { tmp.Add(obj); } } foreach (var obj in tmp) { objs.Remove(obj); requested.Remove(obj); } tmp.Clear(); panic++; if (panic > 1000) { Debug.LogError("Distribution alrogithm triggered panic exit!"); return(available); } } // Write back foreach (var kvp in distributed) { output.Add(new DistributionResult() { amount = kvp.Value, obj = kvp.Key }); } DictionaryPool <TObject, float> .Return(distributed); DictionaryPool <TObject, float> .Return(requested); ListPool <TObject> .Return(tmp); ListPool <TObject> .Return(objs); return(available); }
public SuggestResponseDto[] SuggestForV2(int id, int limit, string key, string value) { var accounts = _repo.Accounts; var uniqueLikesOfTarget = accounts[id].GetLikesFrom().ToHashSet(); ISuggestFilter filter = null; if (key != null) { filter = _factory.CreateFilter(key, value); if (filter == null) { return(new SuggestResponseDto[0]); } } var likersData = SimilarityDictionaryPool.Rent(); foreach (var likeeId in uniqueLikesOfTarget) { long targetTsSum = 0; long targetTsCount = 0; var likersOfTargetLike = accounts[likeeId].GetLikesToWithTs(); var l = likersOfTargetLike.GetLength(0); for (int i = 0; i < l; i++) { var likerId = likersOfTargetLike[i, 0]; var ts = likersOfTargetLike[i, 1]; if (likerId == id) { targetTsCount++; targetTsSum += ts; } else if (filter == null || filter.IsOk(likerId)) { if (!likersData.ContainsKey(likerId)) { likersData[likerId] = CounterPool.Rent(); } likersData[likerId].AddTs(ts); } } var targetTs = (double)(targetTsSum / targetTsCount); foreach (var counter in likersData.Values) { counter.Calculate(targetTs); } } if (likersData.Count == 0) { return(new SuggestResponseDto[0]); } var count = likersData.Count; var sortedIdsBySimilarity = Pool.Rent(count); var index = 0; foreach (var pair in likersData) { sortedIdsBySimilarity[index] = pair; index++; } Array.Sort(sortedIdsBySimilarity, 0, count, DescSimilarityComparer); var allExceptLikes = new List <int>(); for (int i = 0; i < count; i++) { var likesFromCurrent = accounts[sortedIdsBySimilarity[i].Key].GetLikesFrom(); Array.Sort(likesFromCurrent, _descComparer); foreach (var like in likesFromCurrent) { if (!uniqueLikesOfTarget.Contains(like)) { allExceptLikes.Add(like); uniqueLikesOfTarget.Add(like); } } if (allExceptLikes.Count >= limit) { break; } } Pool.Return(sortedIdsBySimilarity); CounterPool.Return(sortedIdsBySimilarity.Take(count).Select(x => x.Value)); SimilarityDictionaryPool.Return(likersData); return(allExceptLikes.Take(limit).Select(x => _converter.Convert(x)).ToArray()); }