public void Add(T result) { if (!_results.Contains(result)) { _results.Add(result); } }
private bool ExportIsAllowed(UUID creatorId) { if (m_allowedCreatorIds.IsEmpty) { return(true); } else { return(m_allowedCreatorIds.Contains(creatorId)); } }
public string GetNext() { int index = 0; string name; for (; ;) { name = NameMaker.UniqueName(index++); if (!names.Contains(name)) { return(name); } } }
protected void CheckEnums( string name, int expectedTypes, string[] expected, string[] notExpected ) { C5.HashSet<string> valsToFind = new C5.HashSet<string>( ); valsToFind.AddAll( expected ); C5.HashSet<string> valsNotToFind = new C5.HashSet<string>( ); valsNotToFind.AddAll( notExpected ); AssemblyHelper.CheckAssembly( name, expectedTypes, delegate( TypeDefinition typeDef ) { return typeDef.BaseType.FullName == "System.Enum"; }, delegate( TypeDefinition typeDef ) { // num expected + num unexpected + field storage int totalValues = expected.Length + notExpected.Length + 1; Assert.AreEqual( totalValues, typeDef.Fields.Count, String.Format( "Type should have {0} values.", totalValues ) ); foreach ( FieldDefinition field in typeDef.Fields ) { Assert.IsFalse( valsNotToFind.Contains( field.Name ), String.Format( "Did not expect to find event '{0}'.", field.Name ) ); valsToFind.Remove( field.Name ); } Assert.IsFalse( valsToFind.Count > 0, "Failed to find all expected values." ); } ); }
public void CheckNestedTypes() { string xml = String.Format( @"<?xml version='1.0'?>" + @"<Obfuscator>" + @"<Var name='InPath' value='{0}' />" + @"<Var name='OutPath' value='{1}' />" + @"<Module file='$(InPath)\AssemblyWithNestedTypes.dll'>" + @"<SkipType name='TestClasses.ClassA/NestedClassA' />" + @"</Module>" + @"</Obfuscator>", TestHelper.InputPath, TestHelper.OutputPath); TestHelper.BuildAndObfuscate("AssemblyWithNestedTypes", string.Empty, xml); C5.HashSet <string> typesToFind = new C5.HashSet <string> (); typesToFind.Add("A.A"); typesToFind.Add("A.A/a"); typesToFind.Add("A.A/NestedClassA"); AssemblyHelper.CheckAssembly("AssemblyWithNestedTypes", 1, delegate(TypeDefinition typeDef) { return(true); }, delegate(TypeDefinition typeDef) { Assert.IsTrue(typesToFind.Contains(typeDef.ToString()), "Type {0} not expected.", typeDef.ToString()); typesToFind.Remove(typeDef.ToString()); }); Assert.IsTrue(typesToFind.Count == 0, "Not all types found."); }
// sets connection two ways, node <-> node public void setAccessibility(Nav2dNode[] nodes, int layerMask) { foreach (var neighbor in nodes) { //if input neighbor empty or neightbor already set SKIP if (neighbor == null || neighbors.Contains(neighbor)) { continue; } var hit = Physics2D.Raycast(worldPos, (neighbor.worldPos - worldPos), (neighbor.worldPos - worldPos).magnitude, layerMask); if (hit.collider == null) { walkableConnectionNum++; neighbor.walkableConnectionNum++; neighbor.zone = this.zone; // add the neighbor to the list neighbors.Add(neighbor); // add this node to the neightbor neighbor.neighbors.Add(this); } } this.accessible = walkableConnectionNum >= 1; }
public static void CheckAssembly(string name, int expectedTypes, string[] expectedMethods, string[] notExpectedMethods, Predicate <TypeDefinition> isType, Action <TypeDefinition> checkType) { C5.HashSet <string> methodsToFind = new C5.HashSet <string>( ); methodsToFind.AddAll(expectedMethods); C5.HashSet <string> methodsNotToFind = new C5.HashSet <string>( ); methodsNotToFind.AddAll(notExpectedMethods); CheckAssembly(name, expectedTypes, isType, delegate(TypeDefinition typeDef) { // make sure we have enough methods... Assert.AreEqual(expectedMethods.Length + notExpectedMethods.Length, typeDef.Methods.Count, "Some of the methods for the type are missing."); foreach (MethodDefinition method in typeDef.Methods) { Assert.IsFalse(methodsNotToFind.Contains(method.Name), String.Format( "Did not expect to find method '{0}'.", method.Name)); methodsToFind.Remove(method.Name); } if (checkType != null) { checkType(typeDef); } }); Assert.IsFalse(methodsToFind.Count > 0, "Failed to find all expected methods."); }
protected void CheckEnums(string name, int expectedTypes, string[] expected, string[] notExpected) { C5.HashSet <string> valsToFind = new C5.HashSet <string>( ); valsToFind.AddAll(expected); C5.HashSet <string> valsNotToFind = new C5.HashSet <string>( ); valsNotToFind.AddAll(notExpected); AssemblyHelper.CheckAssembly(name, expectedTypes, delegate(TypeDefinition typeDef) { return(typeDef.BaseType.FullName == "System.Enum"); }, delegate(TypeDefinition typeDef) { // num expected + num unexpected + field storage int totalValues = expected.Length + notExpected.Length + 1; Assert.AreEqual(totalValues, typeDef.Fields.Count, String.Format("Type should have {0} values.", totalValues)); foreach (FieldDefinition field in typeDef.Fields) { Assert.IsFalse(valsNotToFind.Contains(field.Name), String.Format( "Did not expect to find event '{0}'.", field.Name)); valsToFind.Remove(field.Name); } Assert.IsFalse(valsToFind.Count > 0, "Failed to find all expected values."); }); }
public void CheckNestedTypes( ) { string xml = String.Format( @"<?xml version='1.0'?>" + @"<Obfuscator>" + @"<Var name='InPath' value='{0}' />" + @"<Var name='OutPath' value='{1}' />" + @"<Module file='$(InPath)\AssemblyWithNestedTypes.dll'>" + @"<SkipType name='TestClasses.ClassA/NestedClassA' />" + @"</Module>" + @"</Obfuscator>", TestHelper.InputPath, TestHelper.OutputPath ); TestHelper.BuildAndObfuscate( "AssemblyWithNestedTypes", string.Empty, xml ); C5.HashSet<string> typesToFind = new C5.HashSet<string>( ); typesToFind.Add( "A.A" ); typesToFind.Add( "A.A/a" ); typesToFind.Add( "A.A/NestedClassA" ); AssemblyHelper.CheckAssembly( "AssemblyWithNestedTypes", 3, delegate( TypeDefinition typeDef ) { return true; }, delegate( TypeDefinition typeDef ) { Assert.IsTrue( typesToFind.Contains( typeDef.ToString( ) ), "Type {0} not expected.", typeDef.ToString( ) ); typesToFind.Remove( typeDef.ToString( ) ); } ); Assert.IsTrue( typesToFind.Count == 0, "Not all types found." ); }
public static void CheckAssembly( string name, int expectedTypes, string[] expectedMethods, string[] notExpectedMethods, Predicate<TypeDefinition> isType, Action<TypeDefinition> checkType ) { C5.HashSet<string> methodsToFind = new C5.HashSet<string>( ); methodsToFind.AddAll( expectedMethods ); C5.HashSet<string> methodsNotToFind = new C5.HashSet<string>( ); methodsNotToFind.AddAll( notExpectedMethods ); CheckAssembly( name, expectedTypes, isType, delegate( TypeDefinition typeDef ) { // make sure we have enough methods... Assert.AreEqual( expectedMethods.Length + notExpectedMethods.Length, typeDef.Methods.Count, "Some of the methods for the type are missing." ); foreach ( MethodDefinition method in typeDef.Methods ) { Assert.IsFalse( methodsNotToFind.Contains( method.Name ), String.Format( "Did not expect to find method '{0}'.", method.Name ) ); methodsToFind.Remove( method.Name ); } if ( checkType != null ) checkType( typeDef ); } ); Assert.IsFalse( methodsToFind.Count > 0, "Failed to find all expected methods." ); }
public override void RemoveAvatar(PhysicsActor actor) { BasicActor act = (BasicActor)actor; if (_actors.Contains(act)) { _actors.Remove(act); } }
private bool AllowOverflow(PostedEvent postedEvent) { if (OVERFLOWABLE_EVENTS.Contains(postedEvent.EventType)) { return(true); } return(false); }
public static IEnumerable <IPathNode <N> > GetReachableNodes <N>(N start, float maxCost) where N : INode <N> { C5.IDictionary <N, PathNode <N> > nodeDictionary = new C5.HashDictionary <N, PathNode <N> >(); C5.IPriorityQueue <PathNode <N> > openSet = new C5.IntervalHeap <PathNode <N> >(new PathNodeComparer <N>(), C5.MemoryType.Normal); C5.ICollection <N> closedSet = new C5.HashSet <N>(); C5.ArrayList <IPathNode <N> > res = new C5.ArrayList <IPathNode <N> >(C5.MemoryType.Normal); PathNode <N> curNode = new PathNode <N>(start); curNode.g = 0; nodeDictionary.Add(start, curNode); while (true) { res.Add(curNode); foreach (IEdge <N> edge in curNode.node) { N other = edge.GetEnd(); if (!closedSet.Contains(other)) { PathNode <N> otherNode = null; if (!nodeDictionary.Find(ref other, out otherNode)) { otherNode = new PathNode <N>(other); nodeDictionary.Add(other, otherNode); } float newG = edge.GetCost() + curNode.g; if (otherNode.g > newG) { otherNode.g = newG; if (otherNode.queueHandle != null) { openSet.Replace(otherNode.queueHandle, otherNode); } otherNode.prev = curNode; } if (otherNode.queueHandle == null) { C5.IPriorityQueueHandle <PathNode <N> > handle = null; openSet.Add(ref handle, otherNode); otherNode.queueHandle = handle; } } } if (openSet.IsEmpty) { return(res); } closedSet.Add(curNode.node); curNode = openSet.DeleteMin(); if (curNode.g > maxCost) { return(res); } } }
public void Enqueue(SceneObjectPart part) { lock (m_syncObject) { if (!m_ids.Contains(part.LocalId)) { m_ids.Add(part.LocalId); m_queue.Enqueue(part); } } }
public void AddEntry() { if (_functionNames.Contains(id)) { return; } functions.Add(this.MakeSig()); _functionNames.Add(id); type = null; id = null; parmTypes.Clear(); parmNames.Clear(); }
/// <summary> /// Subscribe to receive notification when the transaction ends. /// </summary> /// <param name="subscriber"> /// The subscriber to be notified when the transaction ends. /// </param> /// <remarks> /// The subscription is OK to handle large number of subscribers. /// </remarks> public void Subscribe(ITransactionNotification subscriber) { Check.DoRequireArgumentNotNull(subscriber, "subscriber"); Check.DoCheckOperationValid(IsActive, "Transaction is not active"); EnsureSubscribedToMaster(); lock (_watchers) { if (!_watchers.Contains(subscriber)) { _watchers.Add(subscriber); } } }
public IInventoryStorage GetProvider(UUID userId) { if (!_migrationActive) { return(_cassandraStorage); } lock (_migratedUsers) { if (_migratedUsers.Contains(userId)) { return(_cassandraStorage); } } //nothing in our cache, we need to consult the database MigrationStatus status = _migrationStatusChecker.GetUserMigrationStatus(userId); if (status == MigrationStatus.Migrated) { lock (_migratedUsers) { _migratedUsers.Add(userId); } return(_cassandraStorage); } else if (status == MigrationStatus.InProgress) { throw new InventoryStorageException("Inventory can not be used while a migration is in progress"); } else { return(_legacyStorage); } }
protected void CheckProperties(string name, int expectedTypes, string[] expected, string[] notExpected) { C5.HashSet <string> propsToFind = new C5.HashSet <string> (); propsToFind.AddAll(expected); C5.HashSet <string> propsNotToFind = new C5.HashSet <string> (); propsNotToFind.AddAll(notExpected); string[] expectedMethods = new string[expected.Length * 2]; for (int i = 0; i < expected.Length; i++) { expectedMethods [i * 2 + 0] = "get_" + expected [i]; expectedMethods [i * 2 + 1] = "set_" + expected [i]; } string[] notExpectedMethods = new string[notExpected.Length * 2]; for (int i = 0; i < notExpected.Length; i++) { notExpectedMethods [i * 2 + 0] = "get_" + notExpected [i]; notExpectedMethods [i * 2 + 1] = "set_" + notExpected [i]; } AssemblyHelper.CheckAssembly(name, expectedTypes, expectedMethods, notExpectedMethods, delegate(TypeDefinition typeDef) { return(true); }, delegate(TypeDefinition typeDef) { Assert.AreEqual(expected.Length, typeDef.Properties.Count, expected.Length == 1 ? "Type should have 1 property (others dropped by default)." : String.Format("Type should have {0} properties (others dropped by default).", expected.Length)); foreach (PropertyDefinition prop in typeDef.Properties) { Assert.IsFalse(propsNotToFind.Contains(prop.Name), String.Format( "Did not expect to find property '{0}'.", prop.Name)); propsToFind.Remove(prop.Name); } Assert.IsFalse(propsToFind.Count > 0, "Failed to find all expected properties."); }); }
protected void CheckProperties( string name, int expectedTypes, string[] expected, string[] notExpected ) { C5.HashSet<string> propsToFind = new C5.HashSet<string>( ); propsToFind.AddAll( expected ); C5.HashSet<string> propsNotToFind = new C5.HashSet<string>( ); propsNotToFind.AddAll( notExpected ); string[] expectedMethods = new string[expected.Length * 2]; for ( int i = 0; i < expected.Length; i ++ ) { expectedMethods[i * 2 + 0] = "get_" + expected[i]; expectedMethods[i * 2 + 1] = "set_" + expected[i]; } string[] notExpectedMethods = new string[notExpected.Length * 2]; for ( int i = 0; i < notExpected.Length; i ++ ) { notExpectedMethods[i * 2 + 0] = "get_" + notExpected[i]; notExpectedMethods[i * 2 + 1] = "set_" + notExpected[i]; } AssemblyHelper.CheckAssembly( name, expectedTypes, expectedMethods, notExpectedMethods, delegate( TypeDefinition typeDef ) { return true; }, delegate( TypeDefinition typeDef ) { Assert.AreEqual( expected.Length, typeDef.Properties.Count, expected.Length == 1 ? "Type should have 1 property (others dropped by default)." : String.Format( "Type should have {0} properties (others dropped by default).", expected.Length ) ); foreach ( PropertyDefinition prop in typeDef.Properties ) { Assert.IsFalse( propsNotToFind.Contains( prop.Name ), String.Format( "Did not expect to find property '{0}'.", prop.Name ) ); propsToFind.Remove( prop.Name ); } Assert.IsFalse( propsToFind.Count > 0, "Failed to find all expected properties." ); } ); }
private void PurgeFolderInternal(InventoryFolderBase folder, long timeStamp) { //block all deletion requests for a folder with a 0 id if (folder.ID == UUID.Zero) { throw new UnrecoverableInventoryStorageException("Refusing to allow the deletion of the inventory ZERO root folder"); } Dictionary<byte[], Dictionary<string, List<Mutation>>> muts = new Dictionary<byte[], Dictionary<string, List<Mutation>>>(); byte[] folderIdBytes = ByteEncoderHelper.GuidEncoder.ToByteArray(folder.ID.Guid); byte[] parentFolderIdBytes = ByteEncoderHelper.GuidEncoder.ToByteArray(folder.ParentID.Guid); //to purge a folder, we have to find all subfolders and items inside a folder //for each of the sub folders folders they choose, we need to recurse into all //sub-sub folders and grab out the items and folders. Once we have all of them //to the last leaf level we do simple removes on all the items and folders List<UUID> allFolders = new List<UUID>(); List<UUID> allItems = new List<UUID>(); C5.HashSet<UUID> rootItems = new C5.HashSet<UUID>(); C5.HashSet<UUID> rootFolders = new C5.HashSet<UUID>(); StringBuilder debugFolderList = new StringBuilder(); this.RecursiveCollectSubfoldersAndItems(folder.ID, folder.Owner, allFolders, allItems, rootItems, rootFolders, true, null, debugFolderList); this.DebugFolderPurge("PurgeFolderInternal", folder, debugFolderList); List<byte[]> allFolderIdBytes = new List<byte[]>(); foreach (UUID fid in allFolders) { allFolderIdBytes.Add(ByteEncoderHelper.GuidEncoder.ToByteArray(fid.Guid)); } List<byte[]> allItemIdBytes = new List<byte[]>(); List<byte[]> rootItemIdBytes = new List<byte[]>(); foreach (UUID iid in allItems) { byte[] itemIdBytes = ByteEncoderHelper.GuidEncoder.ToByteArray(iid.Guid); allItemIdBytes.Add(itemIdBytes); if (rootItems.Contains(iid)) { rootItemIdBytes.Add(itemIdBytes); } } //we have all the contents, so delete the actual folders and their versions... //this will wipe out the folders and in turn all items in subfolders byte[] ownerIdBytes = ByteEncoderHelper.GuidEncoder.ToByteArray(folder.Owner.Guid); this.GetFolderDeletionMutations(ownerIdBytes, allFolderIdBytes, timeStamp, muts); //then we delete this actual folder this.GetSingleFolderDeletionMutations(ownerIdBytes, folderIdBytes, timeStamp, muts); //and remove the subfolder reference from this folders parent this.GetSubfolderEntryDeletionMutations(folderIdBytes, parentFolderIdBytes, timeStamp, muts); //delete the ItemParents folder references for the removed items... foreach (byte[] itemId in allItemIdBytes) { this.GetItemParentDeletionMutations(itemId, timeStamp, muts); } //increment the version of the parent of the purged folder if (folder.ParentID != UUID.Zero) { this.GetFolderVersionIncrementMutations(muts, parentFolderIdBytes); } ICluster cluster = AquilesHelper.RetrieveCluster(_clusterName); cluster.Execute(new ExecutionBlock(delegate(Apache.Cassandra.Cassandra.Client client) { client.batch_mutate(muts, DEFAULT_CONSISTENCY_LEVEL); return null; }), KEYSPACE); }
public void C5_HashSet() => c5_hashset.Contains(Search);
/// <summary> /// Archive the region requested. /// </summary> /// <exception cref="System.IO.IOException">if there was an io problem with creating the file</exception> public void ArchiveRegion() { Dictionary <UUID, int> assetUuids = new Dictionary <UUID, int>(); List <EntityBase> entities = m_scene.GetEntities(); List <SceneObjectGroup> sceneObjects = new List <SceneObjectGroup>(); // Filter entities so that we only have scene objects. // FIXME: Would be nicer to have this as a proper list in SceneGraph, since lots of methods // end up having to do this foreach (EntityBase entity in entities) { if (entity is SceneObjectGroup) { SceneObjectGroup sceneObject = (SceneObjectGroup)entity; if (MustCheckCreatorIds) { bool failedCreatorCheck = false; foreach (SceneObjectPart part in sceneObject.GetParts()) { if (!ExportIsAllowed(part.CreatorID)) { failedCreatorCheck = true; break; } } if (failedCreatorCheck) { continue; } } if (!sceneObject.IsDeleted && !sceneObject.IsAttachment) { sceneObjects.Add(sceneObject); } } } if (m_storeAssets) { UuidGatherer assetGatherer = new UuidGatherer(m_scene.CommsManager.AssetCache); foreach (SceneObjectGroup sceneObject in sceneObjects) { assetGatherer.GatherAssetUuids(sceneObject, assetUuids); } } // Make sure that we also request terrain texture assets RegionSettings regionSettings = m_scene.RegionInfo.RegionSettings; if (m_storeAssets) { if (regionSettings.TerrainTexture1 != RegionSettings.DEFAULT_TERRAIN_TEXTURE_1) { assetUuids[regionSettings.TerrainTexture1] = 1; } if (regionSettings.TerrainTexture2 != RegionSettings.DEFAULT_TERRAIN_TEXTURE_2) { assetUuids[regionSettings.TerrainTexture2] = 1; } if (regionSettings.TerrainTexture3 != RegionSettings.DEFAULT_TERRAIN_TEXTURE_3) { assetUuids[regionSettings.TerrainTexture3] = 1; } if (regionSettings.TerrainTexture4 != RegionSettings.DEFAULT_TERRAIN_TEXTURE_4) { assetUuids[regionSettings.TerrainTexture4] = 1; } } if (MustCheckCreatorIds) { int originalCount = assetUuids.Count; m_log.DebugFormat( "[ARCHIVER]: Filtering {0} asset IDs for {1} allowed creators", originalCount, m_allowedCreatorIds.Count); C5.HashSet <UUID> assetsCreatedByAllowedUsers = this.CollectCreatedAssetIdsFromUserInventories(); IEnumerable <UUID> uuids = new List <UUID>(assetUuids.Keys); assetUuids.Clear(); foreach (UUID assetId in uuids) { if (assetsCreatedByAllowedUsers.Contains(assetId)) { assetUuids.Add(assetId, 1); } } m_log.DebugFormat( "[ARCHIVER]: Allowing export of {0} of {1} assets", assetUuids.Count, originalCount); } m_log.DebugFormat( "[ARCHIVER]: {0} scene objects to serialize requiring save of {1} assets", sceneObjects.Count, assetUuids.Count); TarArchiveWriter archiveWriter = new TarArchiveWriter(m_saveStream); // Asynchronously request all the assets required to perform this archive operation ArchiveWriteRequestExecution awre = new ArchiveWriteRequestExecution( sceneObjects, m_scene.RequestModuleInterface <ITerrainModule>(), m_scene.RequestModuleInterface <IRegionSerializerModule>(), m_scene, archiveWriter, m_requestId); new AssetsRequest( new AssetsArchiver(archiveWriter), assetUuids.Keys, m_scene.CommsManager.AssetCache, awre.ReceivedAllAssets).Execute(); }
protected void CheckEvents( string name, int expectedTypes, string[] expected, string[] notExpected ) { C5.HashSet<string> eventsToFind = new C5.HashSet<string>( ); eventsToFind.AddAll( expected ); C5.HashSet<string> eventsNotToFind = new C5.HashSet<string>( ); eventsNotToFind.AddAll( notExpected ); C5.HashSet<string> methodsToFind = new C5.HashSet<string>( ); for ( int i = 0; i < expected.Length; i++ ) { methodsToFind.Add( "add_" + expected[i] ); methodsToFind.Add( "remove_" + expected[i] ); } C5.HashSet<string> methodsNotToFind = new C5.HashSet<string>( ); for ( int i = 0; i < notExpected.Length; i++ ) { methodsNotToFind.Add( "add_" + notExpected[i] ); methodsNotToFind.Add( "remove_" + notExpected[i] ); } bool foundDelType = false; AssemblyHelper.CheckAssembly( name, expectedTypes, delegate( TypeDefinition typeDef ) { if ( typeDef.BaseType.FullName == "System.MulticastDelegate" ) { foundDelType = true; return false; } else return true; }, delegate( TypeDefinition typeDef ) { // make sure we have enough methods... // 2 methods / event + a method to fire them Assert.AreEqual( methodsToFind.Count + methodsNotToFind.Count + 1, typeDef.Methods.Count, "Some of the methods for the type are missing." ); foreach ( MethodDefinition method in typeDef.Methods ) { Assert.IsFalse( methodsNotToFind.Contains( method.Name ), String.Format( "Did not expect to find method '{0}'.", method.Name ) ); methodsToFind.Remove( method.Name ); } Assert.AreEqual( expected.Length, typeDef.Events.Count, expected.Length == 1 ? "Type should have 1 event (others dropped by default)." : String.Format( "Type should have {0} events (others dropped by default).", expected.Length ) ); foreach ( EventDefinition evt in typeDef.Events ) { Assert.IsFalse( eventsNotToFind.Contains( evt.Name ), String.Format( "Did not expect to find event '{0}'.", evt.Name ) ); eventsToFind.Remove( evt.Name ); } Assert.IsFalse( methodsToFind.Count > 0, "Failed to find all expected methods." ); Assert.IsFalse( eventsToFind.Count > 0, "Failed to find all expected events." ); } ); Assert.IsTrue( foundDelType, "Should have found the delegate type." ); }
static void Main(string[] args) { // cmd line params variables string dbConnStr = null; int runID = 0; int benchmarkMethodID = 0; int n; int maxN; try { DateTime startTime = DateTime.Now; //Console.WriteLine($"Args Count:" + args.Length.ToString()); //foreach (string s in args) //{ // Console.WriteLine(s); //} //Console.ReadKey(); string errMsg = PerfUtil.GetCmdLineParams_DbNAndMaxN(args, out dbConnStr, out runID, out benchmarkMethodID, out n, out maxN); //if (errMsg != null) //{ // Console.WriteLine(errMsg); //} //Console.WriteLine($"Args: {dbConnStr}; {runID.ToString()}; {benchmarkMethodID.ToString()}; {n.ToString()}; {maxN.ToString()}"); //Console.ReadKey(); int[] a; int[] c; C5.HashSet<int> set = new C5.HashSet<int>(); BenchUtil.PopulateArrays25_25_50PctUnique(maxN, out a, out c); // in a real world scenario, we will have probably have recently added the items into the set, so no need to try to clear the cache or anything for (int i = 0; i < maxN; i++) { set.Add(a[i]); } double overheadNanoSecs = PerfUtil.GetTimestampOverheadInNanoSeconds(); PerfUtil.DoGCCollect(); int iterations = 1; long startTicks; long endTicks; double ticks; // this is enough to jit things and not put everything in the cache bool isContained = set.Contains(0); if (maxN <= 1000) { iterations = 1; // there amount of time taken for these is too small to measure just one iteration - so we measure multiple iterations in a loop and get the time for these // the mean time is this total time / iterations startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < maxN; i++) { set.Contains(c[i]); } endTicks = Stopwatch.GetTimestamp(); ticks = ((endTicks - startTicks) * n) / (double)maxN; } else { iterations = 1; startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < n; i++) // loop overhead is ok because we assume there will be some loop in real-world scenario { set.Contains(c[i]); } endTicks = Stopwatch.GetTimestamp(); ticks = (double)(endTicks - startTicks); } double nanoSecs = PerfUtil.GetNanoSecondsFromTicks(ticks, Stopwatch.Frequency) - overheadNanoSecs; PerfDb.InsertMeasurement(dbConnStr, runID, benchmarkMethodID, n, iterations, nanoSecs, startTime, DateTime.Now); } catch (Exception ex) { Console.Write(ex.ToString()); if (!string.IsNullOrEmpty(dbConnStr)) { // write error to db PerfDb.InsertRunError(dbConnStr, runID, benchmarkMethodID, ex); } else { // log error to file } } }
/// <summary> /// Archive the region requested. /// </summary> /// <exception cref="System.IO.IOException">if there was an io problem with creating the file</exception> public void ArchiveRegion() { Dictionary <UUID, int> assetUuids = new Dictionary <UUID, int>(); List <EntityBase> entities = m_scene.GetEntities(); List <SceneObjectGroup> sceneObjects = new List <SceneObjectGroup>(); List <UUID> userlist = new List <UUID>(); // Filter entities so that we only have scene objects. // FIXME: Would be nicer to have this as a proper list in SceneGraph, since lots of methods // end up having to do this foreach (EntityBase entity in entities) { if (entity is SceneObjectGroup) { SceneObjectGroup sceneObject = (SceneObjectGroup)entity; // If storing assets, assume cross-grid and include the user list file if (m_storeAssets) { AddObjectUsersToList(userlist, sceneObject); } if (MustCheckCreatorIds) { bool failedCreatorCheck = false; foreach (SceneObjectPart part in sceneObject.GetParts()) { if (!ExportIsAllowed(part.CreatorID)) { failedCreatorCheck = true; break; } } if (failedCreatorCheck) { continue; } } if (!sceneObject.IsDeleted && !sceneObject.IsAttachment) { sceneObjects.Add(sceneObject); } } } if (m_storeAssets) { UuidGatherer assetGatherer = new UuidGatherer(m_scene.CommsManager.AssetCache); foreach (SceneObjectGroup sceneObject in sceneObjects) { assetGatherer.GatherAssetUuids(sceneObject, assetUuids); } } // Make sure that we also request terrain texture assets RegionSettings regionSettings = m_scene.RegionInfo.RegionSettings; if (m_storeAssets) { if (regionSettings.TerrainTexture1 != RegionSettings.DEFAULT_TERRAIN_TEXTURE_1) { assetUuids[regionSettings.TerrainTexture1] = 1; } if (regionSettings.TerrainTexture2 != RegionSettings.DEFAULT_TERRAIN_TEXTURE_2) { assetUuids[regionSettings.TerrainTexture2] = 1; } if (regionSettings.TerrainTexture3 != RegionSettings.DEFAULT_TERRAIN_TEXTURE_3) { assetUuids[regionSettings.TerrainTexture3] = 1; } if (regionSettings.TerrainTexture4 != RegionSettings.DEFAULT_TERRAIN_TEXTURE_4) { assetUuids[regionSettings.TerrainTexture4] = 1; } } if (MustCheckCreatorIds) { int originalCount = assetUuids.Count; m_log.DebugFormat( "[ARCHIVER]: Filtering {0} asset IDs for {1} allowed creators", originalCount, m_allowedCreatorIds.Count); C5.HashSet <UUID> assetsCreatedByAllowedUsers = this.CollectCreatedAssetIdsFromUserInventories(); IEnumerable <UUID> uuids = new List <UUID>(assetUuids.Keys); assetUuids.Clear(); foreach (UUID assetId in uuids) { if (assetsCreatedByAllowedUsers.Contains(assetId)) { assetUuids.Add(assetId, 1); } } m_log.DebugFormat( "[ARCHIVER]: Allowing export of {0} of {1} assets", assetUuids.Count, originalCount); } m_log.DebugFormat( "[ARCHIVER]: {0} scene objects to serialize requiring save of {1} assets", sceneObjects.Count, assetUuids.Count); TarArchiveWriter archiveWriter = new TarArchiveWriter(m_saveStream); // Asynchronously request all the assets required to perform this archive operation ArchiveWriteRequestExecution awre = new ArchiveWriteRequestExecution( sceneObjects, m_scene.RequestModuleInterface <ITerrainModule>(), m_scene.RequestModuleInterface <IRegionSerializerModule>(), m_scene, archiveWriter, m_requestId); // Write out archive.xml control file first archiveWriter.WriteFile(ArchiveConstants.CONTROL_FILE_PATH, awre.CreateControlFile(assetUuids.Count > 0)); m_log.InfoFormat("[ARCHIVER]: Added {0} control file to archive.", ArchiveConstants.CONTROL_FILE_PATH); // Now include the user list file (only if assets are being saved and it produced a list). if (userlist.Count > 0) { StringBuilder sb = new StringBuilder(); foreach (UUID id in userlist) { String name = m_scene.CommsManager.UserService.Key2Name(id, false); if (!String.IsNullOrWhiteSpace(name)) { sb.AppendFormat("{0} {1}{2}", id, name, Environment.NewLine); } } String userlistContents = sb.ToString(); if (!String.IsNullOrWhiteSpace(userlistContents)) { archiveWriter.WriteFile(ArchiveConstants.USERLIST_FILE_PATH, userlistContents); m_log.InfoFormat("[ARCHIVER]: Added {0} file to archive.", ArchiveConstants.USERLIST_FILE_PATH); } } new AssetsRequest( new AssetsArchiver(archiveWriter, m_scene), assetUuids.Keys, m_scene.CommsManager.AssetCache, awre.ReceivedAllAssets).Execute(); }
public static IEnumerable <IPathNode <N> > GetShortestPath <N>(N start, N goal, IHeuristic <N> heuristic) where N : INode <N> { C5.IDictionary <N, PathNode <N> > nodeDictionary = new C5.HashDictionary <N, PathNode <N> >(); C5.IPriorityQueue <PathNode <N> > openSet = new C5.IntervalHeap <PathNode <N> >(new PathNodeComparer <N>()); C5.ICollection <N> closedSet = new C5.HashSet <N>(); PathNode <N> curNode = new PathNode <N>(start); curNode.g = 0; nodeDictionary.Add(start, curNode); while (true) { foreach (IEdge <N> edge in curNode.Node) { N other = edge.GetEnd(); if (!closedSet.Contains(other)) { PathNode <N> otherNode = null; if (!nodeDictionary.Find(ref other, out otherNode)) { otherNode = new PathNode <N>(other); nodeDictionary.Add(other, otherNode); } float newG = edge.GetCost() + curNode.g; if (otherNode.g > newG) { otherNode.g = newG; if (otherNode.queueHandle != null) { openSet.Replace(otherNode.queueHandle, otherNode); } otherNode.prev = curNode; } if (otherNode.queueHandle == null) { otherNode.h = heuristic.MinDist(other, goal); C5.IPriorityQueueHandle <PathNode <N> > handle = null; openSet.Add(ref handle, otherNode); otherNode.queueHandle = handle; } } } if (openSet.IsEmpty) { return(null); } closedSet.Add(curNode.Node); curNode = openSet.DeleteMin(); if (curNode.Node.Equals(goal)) { C5.ArrayList <IPathNode <N> > res = new C5.ArrayList <IPathNode <N> >(); do { res.Add(curNode); curNode = curNode.prev; } while (curNode != null); res.Reverse(); return(res); } } }
static void Main(string[] args) { //string outputFileName = @"e:\\proj\\summary.tsv"; //int minN = 100_000; //int maxN = 1_000_000; //int incrementNBy = 10_000; string errMsg = PerfUtil.GetCmdLineParams_OutputFileAndMinMaxIncN(args, out int minN, out int maxN, out int incrementNBy, out string outputFileName); int nCount = ((maxN - minN) / incrementNBy) + 1; int[] nArray = new int[nCount]; int idx = 0; for (int n = minN; n <= maxN; n += incrementNBy, idx++) { nArray[idx] = n; } const int LoopUnrollCount = 1; const int IterartionCount = 512; const int IterartionWarmupCount = 16; long[] ticksH = new long[nArray.Length * IterartionCount * LoopUnrollCount]; int ticksIdxForH = 0; long[] ticksF = new long[nArray.Length * IterartionCount * LoopUnrollCount]; int ticksIdxForF = 0; long[] ticksC = new long[nArray.Length * IterartionCount * LoopUnrollCount]; int ticksIdxForC = 0; long startTicks; double overheadTicks = PerfUtil.GetTimestampOverheadInNanoSeconds(); int[] a; int[] c; SCG.HashSet <int> h = new HashSet <int>(); FastHashSet <int> f = new FastHashSet <int>(); C5.HashSet <int> c5 = new C5.HashSet <int>(); HashSetBench.BenchUtil.PopulateCollections25_25_50PctUnique(maxN, out a, out c, h, f, c5); // not sure if we should run bechmark 1 and then benchmark 2 separately so that the presence of the one doesn't effect the other??? // in practice they will probably not be run together one after the other PerfUtil.DoGCCollect(); int N; for (int j = 0; j < nArray.Length; j++) { N = nArray[j]; // not really sure what running the warmup really does - it can put things in the cache that maybe shouldn't be there because they won't be in a real application??? // still, if we execute the same code with the same data in a loop alot of times, this will populate the cache unrealistically // also if we do a warmup, the jit times will be removed, but does this represent reality - jit times do happen in real running code??? for (int iterationIdx = 0; iterationIdx < IterartionWarmupCount; iterationIdx++) { // SCG_Contains for (int i = 0; i < N; i++) { h.Contains(c[i]); } // Fast_Contains for (int i = 0; i < N; i++) { f.Contains(c[i]); } for (int i = 0; i < N; i++) { c5.Contains(c[i]); } } for (int iterationIdx = 0; iterationIdx < IterartionCount; iterationIdx++) { // to minimize the effects of the loop code on the count, unroll each bechmark 2 times // also alternate randomly between the order of these to minimize any effects of order // not sure what effects loop unrolling has since that part isn't contained in the stopwatch time // still there might be some residual effects on CPU registers? - not really sure // 1 // there is some overhead that should be removed - it is returning from GetTimestamp and setting startTicks and afterwards calling GetTimestamp until the point where the return value is obtained // we should determine this overhead by calling startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < N; i++) { h.Contains(c[i]); } ticksH[ticksIdxForH++] = Stopwatch.GetTimestamp() - startTicks; startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < N; i++) { f.Contains(c[i]); } ticksF[ticksIdxForF++] = Stopwatch.GetTimestamp() - startTicks; startTicks = Stopwatch.GetTimestamp(); for (int i = 0; i < N; i++) { c5.Contains(c[i]); } ticksC[ticksIdxForC++] = Stopwatch.GetTimestamp() - startTicks; } } // summarize and output the data BenchmarkSummaries summaries = new BenchmarkSummaries(); summaries.AddNSummaryList(NSummary.CreateNSummaryListForBenchmark(overheadTicks, nArray, IterartionCount * LoopUnrollCount, ticksH), "SCG_Contains"); summaries.AddNSummaryList(NSummary.CreateNSummaryListForBenchmark(overheadTicks, nArray, IterartionCount * LoopUnrollCount, ticksF), "Fast_Contains"); summaries.AddNSummaryList(NSummary.CreateNSummaryListForBenchmark(overheadTicks, nArray, IterartionCount * LoopUnrollCount, ticksC), "C5_Contains"); summaries.OutputSummariesToFile(outputFileName, "SCG_Contains"); }
protected void CheckEvents(string name, int expectedTypes, string[] expected, string[] notExpected) { C5.HashSet <string> eventsToFind = new C5.HashSet <string>( ); eventsToFind.AddAll(expected); C5.HashSet <string> eventsNotToFind = new C5.HashSet <string>( ); eventsNotToFind.AddAll(notExpected); C5.HashSet <string> methodsToFind = new C5.HashSet <string>( ); for (int i = 0; i < expected.Length; i++) { methodsToFind.Add("add_" + expected[i]); methodsToFind.Add("remove_" + expected[i]); } C5.HashSet <string> methodsNotToFind = new C5.HashSet <string>( ); for (int i = 0; i < notExpected.Length; i++) { methodsNotToFind.Add("add_" + notExpected[i]); methodsNotToFind.Add("remove_" + notExpected[i]); } bool foundDelType = false; AssemblyHelper.CheckAssembly(name, expectedTypes, delegate(TypeDefinition typeDef) { if (typeDef.BaseType.FullName == "System.MulticastDelegate") { foundDelType = true; return(false); } else { return(true); } }, delegate(TypeDefinition typeDef) { // make sure we have enough methods... // 2 methods / event + a method to fire them Assert.AreEqual(methodsToFind.Count + methodsNotToFind.Count + 1, typeDef.Methods.Count, "Some of the methods for the type are missing."); foreach (MethodDefinition method in typeDef.Methods) { Assert.IsFalse(methodsNotToFind.Contains(method.Name), String.Format( "Did not expect to find method '{0}'.", method.Name)); methodsToFind.Remove(method.Name); } Assert.AreEqual(expected.Length, typeDef.Events.Count, expected.Length == 1 ? "Type should have 1 event (others dropped by default)." : String.Format("Type should have {0} events (others dropped by default).", expected.Length)); foreach (EventDefinition evt in typeDef.Events) { Assert.IsFalse(eventsNotToFind.Contains(evt.Name), String.Format( "Did not expect to find event '{0}'.", evt.Name)); eventsToFind.Remove(evt.Name); } Assert.IsFalse(methodsToFind.Count > 0, "Failed to find all expected methods."); Assert.IsFalse(eventsToFind.Count > 0, "Failed to find all expected events."); }); Assert.IsTrue(foundDelType, "Should have found the delegate type."); }