/// <summary> /// Creates a new instance of a <see cref="StreamObjectReader"/>. /// </summary> /// <param name="stream">The stream to read objects from.</param> /// <param name="knownObjects">An optional list of objects assumed known by the corresponding <see cref="StreamObjectWriter"/>.</param> /// <param name="binder">A binder that provides object and type decoding.</param> /// <param name="cancellationToken"></param> private StreamObjectReader( Stream stream, ObjectData knownObjects, ObjectBinder binder, CancellationToken cancellationToken) { // String serialization assumes both reader and writer to be of the same endianness. // It can be adjusted for BigEndian if needed. Debug.Assert(BitConverter.IsLittleEndian); _recursive = IsRecursive(stream); _reader = new BinaryReader(stream, Encoding.UTF8); _referenceMap = new ReferenceMap(knownObjects); _binder = binder ?? FixedObjectBinder.Empty; _cancellationToken = cancellationToken; if (!_recursive) { _valueStack = SOW.s_variantStackPool.Allocate(); _constructionStack = s_constructionStackPool.Allocate(); _memberList = SOW.s_variantListPool.Allocate(); _memberReader = new VariantListReader(_memberList); } }
/// <inheritdoc /> public Hash128 GetHash128(IBuildLogger log) { HashSet <CacheEntry> hashObjects = new HashSet <CacheEntry>(); using (log.ScopedStep(LogLevel.Verbose, $"Gather Objects {GetType().Name}", Command.fileName)) { if (Command.serializeObjects != null) { foreach (var serializeObject in Command.serializeObjects) { hashObjects.Add(BuildCacheUtility.GetCacheEntry(serializeObject.serializationObject)); } } } List <Hash128> hashes = new List <Hash128>(); using (log.ScopedStep(LogLevel.Verbose, $"Hashing Command", Command.fileName)) hashes.Add(Command.GetHash128()); using (log.ScopedStep(LogLevel.Verbose, $"Hashing UsageSet", Command.fileName)) hashes.Add(UsageSet.GetHash128()); using (log.ScopedStep(LogLevel.Verbose, $"Hashing ReferenceMap", Command.fileName)) hashes.Add(ReferenceMap.GetHash128()); using (log.ScopedStep(LogLevel.Verbose, $"Hashing Objects", Command.fileName)) hashes.Add(HashingMethods.Calculate(hashObjects).ToHash128()); hashes.Add(DependencyHash); return(HashingMethods.Calculate(hashes).ToHash128()); }
/// <summary> /// Constructor with the intialSize and maxSize. <see cref="ArgumentException"/> is thrown when <paramref name="initialSize"/> is larger than <paramref name="maxSize"/> /// <see cref="IPooledObjectFactory{T}.Create"/> is called to create objects with the number of <paramref name="initialSize"/>. If it returns the object with null, it is not /// added to the pool. /// </summary> /// <param name="initialSize">The initial object number of the pool.</param> /// <param name="maxSize">The max object number of the pool.</param> /// <param name="factory">The factory create and destroy the pooled object.</param> /// <param name="validator">Validator instance. Can be <see langword="null"/>.</param> public GenericObjectPool(int initialSize, int maxSize, IPooledObjectFactory <T> factory, IPooledObjectValidator <T> validator, int acquiredInvalidLimit) { if (initialSize < 0) { throw new ArgumentException(Constants.InvalidInitialSize); } if (maxSize != -1 && maxSize < initialSize) { throw new ArgumentException(Constants.InvalidMaxSize); } if (factory == null) { throw new ArgumentNullException(nameof(factory)); } this.initialSize = initialSize; this.maxSize = maxSize; this.factory = factory; this.validator = validator ?? new NeverValidateValidator(); this.acquiredInvalidLimit = acquiredInvalidLimit; createdCount = 0; objectReturned = new AutoResetEvent(false); locker = new ReaderWriterLockSlim(); objQueue = new ConcurrentQueue <T>(); idleObjects = new ReferenceMap <T, bool>(); for (var i = 0; i < initialSize; i++) { objQueue.Enqueue(factory.Create()); createdCount++; } }
/// <summary> /// Creates a new instance of a <see cref="StreamObjectWriter"/>. /// </summary> /// <param name="stream">The stream to write to.</param> /// <param name="knownObjects">An optional list of objects assumed known by the corresponding <see cref="StreamObjectReader"/>.</param> /// <param name="binder">A binder that provides object and type encoding.</param> /// <param name="recursive">True if the writer encodes objects recursively.</param> /// <param name="cancellationToken"></param> public StreamObjectWriter( Stream stream, ObjectData knownObjects = null, ObjectBinder binder = null, bool recursive = true, CancellationToken cancellationToken = default(CancellationToken)) { // String serialization assumes both reader and writer to be of the same endianness. // It can be adjusted for BigEndian if needed. Debug.Assert(BitConverter.IsLittleEndian); _writer = new BinaryWriter(stream, Encoding.UTF8); _referenceMap = new ReferenceMap(knownObjects); _binder = binder ?? FixedObjectBinder.Empty; _recursive = recursive; _cancellationToken = cancellationToken; WriteVersion(); if (_recursive) { _writer.Write((byte)EncodingKind.Recursive); } else { _writer.Write((byte)EncodingKind.NonRecursive); _valueStack = s_variantStackPool.Allocate(); _memberList = s_variantListPool.Allocate(); _memberWriter = new VariantListWriter(_memberList); } }
/// <inheritdoc /> public Hash128 GetHash128(IBuildLogger log) { #if UNITY_2019_3_OR_NEWER CacheEntry entry = BuildCacheUtility.GetCacheEntry(Scene); #else CacheEntry entry = BuildCacheUtility.GetCacheEntry(ProcessedScene); #endif HashSet <CacheEntry> hashObjects = new HashSet <CacheEntry>(); using (log.ScopedStep(LogLevel.Verbose, $"Gather Objects", Command.fileName)) Command.GatherSerializedObjectCacheEntries(hashObjects); List <Hash128> hashes = new List <Hash128>(); using (log.ScopedStep(LogLevel.Verbose, $"Hashing Command", Command.fileName)) hashes.Add(Command.GetHash128()); using (log.ScopedStep(LogLevel.Verbose, $"Hashing UsageSet", Command.fileName)) hashes.Add(UsageSet.GetHash128()); using (log.ScopedStep(LogLevel.Verbose, $"Hashing ReferenceMap", Command.fileName)) hashes.Add(ReferenceMap.GetHash128()); using (log.ScopedStep(LogLevel.Verbose, $"Hashing PreloadInfo", Command.fileName)) hashes.Add(PreloadInfo.GetHash128()); using (log.ScopedStep(LogLevel.Verbose, $"Hashing Info", Command.fileName)) hashes.Add(Info.GetHash128()); using (log.ScopedStep(LogLevel.Verbose, $"Hashing Objects", Command.fileName)) hashes.Add(HashingMethods.Calculate(hashObjects).ToHash128()); hashes.Add(new Hash128(0, 0, 0, (uint)QualitySettingsApi.GetNumberOfLODsStripped())); hashes.Add(DependencyHash); return(HashingMethods.Calculate(hashes, Scene, entry).ToHash128()); }
/// <inheritdoc /> public Hash128 GetHash128() { #if UNITY_2019_3_OR_NEWER return(HashingMethods.Calculate(Command, UsageSet.GetHash128(), ReferenceMap.GetHash128(), Scene, PreloadInfo).ToHash128()); #else var processedSceneHash = HashingMethods.CalculateFile(ProcessedScene).ToHash128(); return(HashingMethods.Calculate(Command, UsageSet.GetHash128(), ReferenceMap.GetHash128(), Scene, processedSceneHash, PreloadInfo).ToHash128()); #endif }
internal ChangeProcessor(CommonDataServices services, DataContext context) { this.services = services; this.context = context; this.tracker = services.ChangeTracker; this.changeDirector = services.ChangeDirector; this.currentParentEdges = new EdgeMap(); this.originalChildEdges = new EdgeMap(); this.originalChildReferences = new ReferenceMap(); }
internal ChangeProcessor(CommonDataServices services, DataContext context) { this._services = services; this._context = context; this._tracker = services.ChangeTracker; this._changeDirector = services.ChangeDirector; this._currentParentEdges = new EdgeMap(); this._originalChildEdges = new EdgeMap(); this._originalChildReferences = new ReferenceMap(); }
public void TestReferenceMapConstructor() { var map1 = new ReferenceMap <string, Order>(); map1.HashAbility(); var map2 = new ReferenceMap <string, Order>(10000); map2.HashAbility(); }
public void CreateReferenceMapping() { if (IsReferenceMappingCreated) { return; } m_referenceMap = new ReferenceMap(AllObjects); IsReferenceMappingCreated = true; }
/// <inheritdoc /> public Hash128 GetHash128() { #if UNITY_2019_3_OR_NEWER var prefabHashes = AssetDatabase.GetDependencies(Scene).Where(path => path.EndsWith(".prefab")).Select(AssetDatabase.GetAssetDependencyHash); return(HashingMethods.Calculate(Command, UsageSet.GetHash128(), ReferenceMap.GetHash128(), Scene, PreloadInfo, Info, prefabHashes).ToHash128()); #else var processedSceneHash = HashingMethods.CalculateFile(ProcessedScene).ToHash128(); var prefabHashes = AssetDatabase.GetDependencies(Scene).Where(path => path.EndsWith(".prefab")).Select(AssetDatabase.GetAssetDependencyHash); return(HashingMethods.Calculate(Command, UsageSet.GetHash128(), ReferenceMap.GetHash128(), Scene, processedSceneHash, PreloadInfo, Info, prefabHashes).ToHash128()); #endif }
public void TestReferenceMapCollectionOperations() { var map = new ReferenceMap <Order, Bill>(); map.Fill(x => new KeyValuePair <Order, Bill>(new Order { Id = x }, new Bill { Id = x })); map.CollectionOperations <KeyValuePair <Order, Bill> >(1000); }
protected AddOrRemoveMusicChoiceAction(GameObject gameObject, ZEventListener comp) { ReferenceMap referenceMap_ = G.Sys.LevelEditor_.ReferenceMap_; originalHandle = referenceMap_.GetHandleOrNull(gameObject); newHandle = referenceMap_.GetHandleOrNull <GameObject>(null); addedComponentHandle = referenceMap_.GetHandleOrNull(comp); isAdd = (comp == null); if (!isAdd) { componentBytes = Serializers.BinarySerializer.SaveComponentToBytes(comp, null); } }
internal CodeFrame() { Layouts = new FastList <MemoryLayout>(); Ops = new FastList <Op>(); OpData = new FastList <Int32>(); Strings = new FastList <String>(); _references = new ReferenceMap(); HandleMap = new FastList <Int32>(); InternalTypes = new Dictionary <String, Int32>(); _internalClasses = new ClassMap(); InternalInstances = new FastList <InstanceData>(); LateBounds = new FastList <LateBoundSymbol>(); InternalConstructors = new FastList <ConstructorData>(); }
/// <inheritdoc /> public Hash128 GetHash128() { HashSet <CacheEntry> hashObjects = new HashSet <CacheEntry>(); if (Command.serializeObjects != null) { foreach (var serializeObject in Command.serializeObjects) { hashObjects.Add(BuildCacheUtility.GetCacheEntry(serializeObject.serializationObject)); } } return(HashingMethods.Calculate(Command, UsageSet.GetHash128(), ReferenceMap.GetHash128(), Info, hashObjects).ToHash128()); }
public DocumentData(IEnumerable <Reference> references) : this() { if (references is null) { throw new System.ArgumentNullException(nameof(references)); } foreach (var reference in references) { if (!ReferenceMap.ContainsKey(reference.Label)) { ReferenceMap.Add(reference.Label, reference); } } }
public void TestReferenceMap() { var orderList = new List <Order>(); for (var i = 0; i < 10000; i++) { orderList.Add(new Order { Id = i, Name = i.ToString() }); } var map = new ReferenceMap <Order, Bill>(); foreach (var order in orderList) { map.Add(order, new Bill { Id = order.Id, Count = order.Id }); } foreach (var order in orderList) { Assert.True(map.ContainsKey(order)); } foreach (var order in orderList) { Assert.Equal(order.Id, map[order].Id); Assert.Equal(order.Id, map[order].Count); } var newOrderList = new List <Order>(); for (var i = 0; i < 10000; i++) { orderList.Add(new Order { Id = i, Name = i.ToString() }); } foreach (var order in newOrderList) { Assert.False(map.ContainsKey(order)); } }
/// <inheritdoc /> public Hash128 GetHash128() { var prefabHashes = AssetDatabase.GetDependencies(Scene).Where(path => path.EndsWith(".prefab")).Select(AssetDatabase.GetAssetDependencyHash); #if UNITY_2019_3_OR_NEWER CacheEntry entry = BuildCacheUtility.GetCacheEntry(Scene); #else CacheEntry entry = BuildCacheUtility.GetCacheEntry(ProcessedScene); #endif HashSet <CacheEntry> hashObjects = new HashSet <CacheEntry>(); if (Command.serializeObjects != null) { foreach (var serializeObject in Command.serializeObjects) { hashObjects.Add(BuildCacheUtility.GetCacheEntry(serializeObject.serializationObject)); } } return(HashingMethods.Calculate(Command, UsageSet.GetHash128(), ReferenceMap.GetHash128(), Scene, PreloadInfo, prefabHashes, entry, hashObjects).ToHash128()); }
/// <inheritdoc /> public Hash128 GetHash128(IBuildLogger log) { HashSet <CacheEntry> hashObjects = new HashSet <CacheEntry>(); using (log.ScopedStep(LogLevel.Verbose, $"Gather Objects {GetType().Name}", Command.fileName)) Command.GatherSerializedObjectCacheEntries(hashObjects); List <Hash128> hashes = new List <Hash128>(); using (log.ScopedStep(LogLevel.Verbose, $"Hashing Command", Command.fileName)) hashes.Add(Command.GetHash128()); using (log.ScopedStep(LogLevel.Verbose, $"Hashing UsageSet", Command.fileName)) hashes.Add(UsageSet.GetHash128()); using (log.ScopedStep(LogLevel.Verbose, $"Hashing ReferenceMap", Command.fileName)) hashes.Add(ReferenceMap.GetHash128()); using (log.ScopedStep(LogLevel.Verbose, $"Hashing Objects", Command.fileName)) hashes.Add(HashingMethods.Calculate(hashObjects).ToHash128()); hashes.Add(DependencyHash); return(HashingMethods.Calculate(hashes).ToHash128()); }
public DocBuilder(BuilderSettings settings, ReferenceMap referenceMap, Outline outline, ApiDocuments apiDocuments, ArticleRenderer articleRenderer, ApiDocumentRenderer apiDocumentRenderer, ApiIndexRenderer apiIndexRenderer, OutputPath outputPath, MarkdownFormatter markdownFormatter, ReportGenerator reportGenerator, ILogger <DocBuilder> logger) { _settings = settings; _referenceMap = referenceMap; _outline = outline; _apiDocuments = apiDocuments; _articleRenderer = articleRenderer; _apiDocumentRenderer = apiDocumentRenderer; _apiIndexRenderer = apiIndexRenderer; _outputPath = outputPath; _markdownFormatter = markdownFormatter; _reportGenerator = reportGenerator; _logger = logger; }
void Analyze() { // Build the ocean // - an ocean (set) of islands (set) // - also a hash for TopicAnalysis (topic->{island set,refcount}) for quick check if already present ocean = new Set(); topicToTopicAnalysis = new Hashtable(); referenceMap = _namespaceManager.GetReferenceMap(ExistencePolicy.ExistingOnly); foreach (string outerTopic in referenceMap.Keys) { // Response.Write("Consider: " + outerTopic + "<br>"); Set islands = new Set(); QualifiedTopicRevisionCollection linkedTopics = referenceMap[outerTopic]; // Response.Write("Linked topics count: " + linkedTopics.Count + "<br>"); TopicAnalysis outerTopicAnalysis = (TopicAnalysis)(topicToTopicAnalysis[outerTopic]); if (outerTopicAnalysis == null) { outerTopicAnalysis = new TopicAnalysis(); topicToTopicAnalysis[outerTopic] = outerTopicAnalysis; // Response.Write("Creating info for " + outerTopic.Name + "<br>"); } else { // Response.Write("Found existing info for " + outerTopic.Name + "<br>"); // Response.Write("[island = " + outerTopicAnalysis.Island + "<br>"); } if (outerTopicAnalysis.Island != null) islands.Add(outerTopicAnalysis.Island); // - foreach outer topic // islands = new set // foreach linked topic // increment refcount for linked topic // if (linkedtopic is on an island) // islands add that island Set inNamespaceLinks = new Set(); foreach (QualifiedTopicRevision linkedTopic in linkedTopics) { // Only analyze in this namespace if (linkedTopic.Namespace != _namespaceManager.Namespace) { // Response.Write("Skiping linked topic (" + linkedTopic.Name + ") because namespace doesn't match<br>"); continue; } // Only do each topic once; have we seen this one? if (inNamespaceLinks.Contains(linkedTopic)) { // Response.Write("Skiping linked topic (" + linkedTopic.Name + ") because seen before<br>"); continue; } // Skip self-references if (linkedTopic.Equals(outerTopic)) { continue; } inNamespaceLinks.Add(linkedTopic); TopicAnalysis linkedTopicAnalysis = (TopicAnalysis)(topicToTopicAnalysis[linkedTopic]); if (linkedTopicAnalysis == null) { linkedTopicAnalysis = new TopicAnalysis(); topicToTopicAnalysis[linkedTopic] = linkedTopicAnalysis; // Response.Write("Creating info for " + linkedTopic.Name + "<br>"); } else { // Response.Write("Found existing info for " + linkedTopic.Name + "<br>"); } linkedTopicAnalysis.RefCount++; if (linkedTopicAnalysis.Island != null) islands.Add(linkedTopicAnalysis.Island); } // if (islands is empty) // create new island // add outer topic and all linked topics // else if (islands size == 1) // add all links and the outer topic to that islands // else // // need to merge islands // newset = merged set of all islands // TopicAnalysiss and replace and of the old islands with the new island Set newIsland; if (islands.Count == 1) newIsland = (Set)(islands.First); // if there's only one, we can just use that one else { newIsland = new Set(); ocean.Add(newIsland); } // Add the island and the linkedTopics newIsland.Add(outerTopic); outerTopicAnalysis.Island = newIsland; foreach (QualifiedTopicRevision linkedTopic in inNamespaceLinks) { newIsland.Add(linkedTopic); ((TopicAnalysis)(topicToTopicAnalysis[linkedTopic])).Island = newIsland; // Response.Write("Placing " + linkedTopic.Name + "<br>"); } // Now merge if there was originally more than one if (islands.Count > 1) { foreach (Set eachIsland in islands) { foreach (object o in eachIsland) newIsland.Add(o); ocean.Remove(eachIsland); // Now update all the pointers from the TopicAnalysiss foreach (QualifiedTopicRevision eachTopic in eachIsland) ((TopicAnalysis)(topicToTopicAnalysis[eachTopic])).Island = newIsland; } } } }
public ReferenceSet() { map = new ReferenceMap <T, object>(); }
public void ClearReferenceMapping() { IsReferenceMappingCreated = false; m_referenceMap = null; }
public ReferenceSet(int capacity) { map = new ReferenceMap <T, object>(capacity); }
/// <inheritdoc /> public Hash128 GetHash128() { Hash128 processedSceneHash = HashingMethods.CalculateFile(ProcessedScene).ToHash128(); return(HashingMethods.Calculate(Command, UsageSet.GetHash128(), ReferenceMap.GetHash128(), Scene, processedSceneHash, PreloadInfo).ToHash128()); }
internal ReferenceMap(ReferenceMap copy) : base(copy) { }
/// <inheritdoc /> public Hash128 GetHash128() { return(HashingMethods.Calculate(Command, UsageSet.GetHash128(), ReferenceMap.GetHash128(), Info).ToHash128()); }
/// <summary> /// Returns a list of all references from all topics in this namespace. /// </summary> /// <param name="includeImports">Indicates whether topics should be filtered only /// to those that actually exist.</param> /// <returns>A map of topic names to the list of topics they reference.</returns> public ReferenceMap GetReferenceMap(ExistencePolicy existencePolicy) { ReferenceMap map = new ReferenceMap(); foreach (TopicName topicName in AllTopics(ImportPolicy.DoNotIncludeImports)) { map[topicName.LocalName] = AllReferencesByTopic(topicName.LocalName, existencePolicy); } return map; }
/// <summary> /// Returns a list of all references from all topics in this namespace. /// </summary> /// <param name="includeImports">Indicates whether topics should be filtered only /// to those that actually exist.</param> /// <returns>A map of topic names to the list of topics they reference.</returns> public ReferenceMap GetReferenceMap(ExistencePolicy existencePolicy) { ReferenceMap map = new ReferenceMap(); foreach (QualifiedTopicName topicName in AllTopics(ImportPolicy.DoNotIncludeImports)) { // We don't try to map topics that don't grant read permission - we're not // going to be able to parse them anyway. if (HasPermission(new UnqualifiedTopicName(topicName.LocalName), TopicPermission.Read)) { map[topicName.LocalName] = AllReferencesByTopic(topicName.LocalName, existencePolicy); } } return map; }
public void TestReferenceMapDictionaryOperations() { var map = new ReferenceMap <Order, Bill>(); ReferenceMapDictionaryOperations(map); }
private void Analyze() { // Build the ocean // - an ocean (set) of islands (set) // - also a hash for TopicAnalysis (topic->{island set,refcount}) for quick check if already present _referenceMap = _namespaceManager.GetReferenceMap(ExistencePolicy.ExistingOnly); foreach (string outerTopic in _referenceMap.Keys) { Ocean islands = new Ocean(); QualifiedTopicRevisionCollection linkedTopics = _referenceMap[outerTopic]; QualifiedTopicRevision outerRevision = new QualifiedTopicRevision(outerTopic, _namespaceManager.Namespace); TopicAnalysis outerTopicAnalysis = null; if (!_topicToTopicAnalysis.ContainsKey(outerRevision)) { outerTopicAnalysis = new TopicAnalysis(); _topicToTopicAnalysis[outerRevision] = outerTopicAnalysis; } else { outerTopicAnalysis = _topicToTopicAnalysis[outerRevision]; } if (outerTopicAnalysis.Island != null) { islands.Add(outerTopicAnalysis.Island); } // - foreach outer topic // islands = new set // foreach linked topic // increment refcount for linked topic // if (linkedtopic is on an island) // islands add that island Island inNamespaceLinks = new Island(); foreach (QualifiedTopicRevision linkedTopic in linkedTopics) { // Only analyze in this namespace if (linkedTopic.Namespace != _namespaceManager.Namespace) { // Response.Write("Skiping linked topic (" + linkedTopic.Name + ") because namespace doesn't match<br>"); continue; } // Only do each topic once; have we seen this one? if (inNamespaceLinks.Contains(linkedTopic)) { // Response.Write("Skiping linked topic (" + linkedTopic.Name + ") because seen before<br>"); continue; } // Skip self-references if (linkedTopic.Equals(outerTopic)) { continue; } inNamespaceLinks.Add(linkedTopic); TopicAnalysis linkedTopicAnalysis = null; if (!_topicToTopicAnalysis.ContainsKey(linkedTopic)) { linkedTopicAnalysis = new TopicAnalysis(); _topicToTopicAnalysis[linkedTopic] = linkedTopicAnalysis; } else { linkedTopicAnalysis = _topicToTopicAnalysis[linkedTopic]; } linkedTopicAnalysis.RefCount++; if (linkedTopicAnalysis.Island != null) { islands.Add(linkedTopicAnalysis.Island); } } // if (islands is empty) // create new island // add outer topic and all linked topics // else if (islands size == 1) // add all links and the outer topic to that islands // else // // need to merge islands // newset = merged set of all islands // TopicAnalysiss and replace and of the old islands with the new island Island newIsland; if (islands.Count == 1) { newIsland = islands.First; // if there's only one, we can just use that one } else { newIsland = new Island(); _ocean.Add(newIsland); } // Add the island and the linkedTopics newIsland.Add(new QualifiedTopicRevision(outerTopic, _namespaceManager.Namespace)); outerTopicAnalysis.Island = newIsland; foreach (QualifiedTopicRevision linkedTopic in inNamespaceLinks) { newIsland.Add(linkedTopic); _topicToTopicAnalysis[linkedTopic].Island = newIsland; // Response.Write("Placing " + linkedTopic.Name + "<br>"); } // Now merge if there was originally more than one if (islands.Count > 1) { foreach (Island eachIsland in islands) { foreach (QualifiedTopicRevision revision in eachIsland) { newIsland.Add(revision); } _ocean.Remove(eachIsland); // Now update all the pointers from the TopicAnalysiss foreach (QualifiedTopicRevision eachTopic in eachIsland) { _topicToTopicAnalysis[eachTopic].Island = newIsland; } } } } }