public void TestAddAcyclicConnection_CumulativeAdditions() { var pop = CreateNeatPopulation(); var generationSeq = new Int32Sequence(); var genomeBuilder = NeatGenomeBuilderFactory <double> .Create(pop.MetaNeatGenome); var rootGenome = pop.GenomeList[0]; var strategy = new AddAcyclicConnectionStrategy <double>( pop.MetaNeatGenome, genomeBuilder, pop.GenomeIdSeq, pop.InnovationIdSeq, generationSeq); IRandomSource rng = RandomDefaults.CreateRandomSource(); var nodeIdSet = GetNodeIdSet(rootGenome); CyclicGraphAnalysis cyclicGraphAnalysis = new CyclicGraphAnalysis(); AcyclicGraphDepthAnalysis graphDepthAnalysis = new AcyclicGraphDepthAnalysis(); // Run the inner loop test multiple times. // Note. The add-connection mutations are random, thus each loop accumulates a different set of mutations. for (int i = 0; i < 50; i++) { var parentGenome = rootGenome; // Accumulate random mutations for some number of loops. for (int j = 0; j < 20;) { var childGenome = strategy.CreateChildGenome(rootGenome, rng); // Note. the strategy will return a null if it cannot find an acyclic connection to add; // test for this and try again. The test will be for N successful mutations rather than N attempts. if (null == childGenome) { continue; } // The child genome should have one more connection than parent. Assert.AreEqual(rootGenome.ConnectionGenes.Length + 1, childGenome.ConnectionGenes.Length); // The child genome's new connection should not be a duplicate of any of the existing/parent connections. var connSet = GetDirectedConnectionSet(rootGenome); var childConnSet = GetDirectedConnectionSet(childGenome); var newConnList = new List <DirectedConnection>(childConnSet.Except(connSet)); Assert.AreEqual(1, newConnList.Count); // The connection genes should be sorted. Assert.IsTrue(SortUtils.IsSortedAscending(childGenome.ConnectionGenes._connArr)); // The child genome should have the same set of node IDs as the parent. var childNodeIdSet = GetNodeIdSet(childGenome); Assert.IsTrue(nodeIdSet.SetEquals(childNodeIdSet)); // The child genome should describe an acyclic graph, i.e. the new connection should not have // formed a cycle in the graph. var digraph = childGenome.DirectedGraph; Assert.IsFalse(cyclicGraphAnalysis.IsCyclic(digraph)); // Run the acyclic graph depth analysis algorithm. GraphDepthInfo depthInfo = graphDepthAnalysis.CalculateNodeDepths(childGenome.DirectedGraph); // Run again with the alternative algorithm (that uses function recursion). GraphDepthInfo depthInfo2 = AcyclicGraphDepthAnalysisByRecursion.CalculateNodeDepths(childGenome.DirectedGraph); Assert.AreEqual(nodeIdSet.Count, depthInfo._nodeDepthArr.Length); Assert.AreEqual(nodeIdSet.Count, depthInfo2._nodeDepthArr.Length); ArrayTestUtils.Compare(depthInfo2._nodeDepthArr, depthInfo._nodeDepthArr); // Set the child genome to be the new parent, thus we accumulate random new connections over time. parentGenome = childGenome; // Increment for successful tests only. j++; } } }
public void QuickSortArrayAndAssociatedArray_AssociatedArray0ArrayNull_ValidateIsNull() { AssertValidation.NotNullAll( () => SortUtils.QuickSort(true, elementComparer, new Element[6], null as Element[], new Element[6]), "AssociatedLists", 0); }
public void QuickSortArrayAndAssociatedArray_ListDifferentSizes_ValidateIsSizesEquals() { AssertValidation.SizesEquals( () => SortUtils.QuickSort(true, elementComparer, new Element[3], new Element[6], new Element[6]), "List", "AssociatedLists", 0, 3, 6); }
/// <summary> /// Speciates the offspring genomes in genomeList into the provided species. In contrast to /// SpeciateGenomes() genomeList is taken to be a list of new genomes (e.g. offspring) that should be /// added to existing species. That is, the species contain genomes that are not in genomeList /// that we wish to keep; typically these would be elite genomes that are the parents of the /// offspring. /// </summary> public void SpeciateOffspring(IList <TGenome> genomeList, IList <Specie <TGenome> > specieList) { // Each specie should contain at least one genome. We need at least one existing genome per specie to act // as a specie centroid in order to define where the specie is within the encoding space. Debug.Assert(SpeciationUtils.TestPopulatedSpecies(specieList), "SpeciateOffspring(IList<TGenome>,IList<Species<TGenome>>) called with an empty specie."); // Make a copy of genomeList and shuffle the items. List <TGenome> gList = new List <TGenome>(genomeList); SortUtils.Shuffle(gList, _rng); // Count how many genomes we have in total. int genomeCount = gList.Count; int totalGenomeCount = genomeCount; foreach (Specie <TGenome> specie in specieList) { totalGenomeCount += specie.GenomeList.Count; } // We attempt to evenly distribute genomes between species. // Calc how many genomes per specie. Baseline number given by integer division rounding down (by truncating fractional part). // This is guaranteed to be at least 1 because genomeCount >= specieCount. int specieCount = specieList.Count; int genomesPerSpecie = totalGenomeCount / specieCount; // Sort species, smallest first. We must make a copy of specieList to do this; Species must remain at // the correct index in the main specieList. The principle here is that we wish to ensure that genomes are // allocated to smaller species in preference to larger species, this is motivated by the desire to create // evenly sized species. List <Specie <TGenome> > sList = new List <Specie <TGenome> >(specieList); sList.Sort(delegate(Specie <TGenome> x, Specie <TGenome> y) { // We use the difference in size where we aren't expecting that diff value to overflow the range of an int. return(x.GenomeList.Count - y.GenomeList.Count); }); // Add genomes into each specie in turn until they each reach genomesPerSpecie in size. int genomeIdx = 0; for (int i = 0; i < specieCount && genomeIdx < genomeCount; i++) { Specie <TGenome> specie = sList[i]; int fillcount = genomesPerSpecie - specie.GenomeList.Count; if (fillcount <= 0) { // We may encounter species with more than genomesPerSpecie genomes. Since we have // ordered the species by size we break out of this loop and allocate the remaining // genomes randomly. break; } // Don't allocate more genomes than there are remaining in genomeList. fillcount = Math.Min(fillcount, genomeCount - genomeIdx); // Allocate memory for the genomes we are about to allocate; // This eliminates potentially having to dynamically resize the list one or more times. if (specie.GenomeList.Capacity < specie.GenomeList.Count + fillcount) { specie.GenomeList.Capacity = specie.GenomeList.Count + fillcount; } // genomeIdx test not required. Already taken into account by fillCount. for (int j = 0; j < fillcount; j++) { gList[genomeIdx].SpecieIdx = specie.Idx; specie.GenomeList.Add(gList[genomeIdx++]); } } // Evenly allocate any remaining genomes. int[] specieIdxArr = new int[specieCount]; for (int i = 0; i < specieCount; i++) { specieIdxArr[i] = i; } SortUtils.Shuffle(specieIdxArr, _rng); for (int i = 0; i < specieCount && genomeIdx < genomeCount; i++, genomeIdx++) { int specieIdx = specieIdxArr[i]; gList[genomeIdx].SpecieIdx = specieIdx; specieList[specieIdx].GenomeList.Add(gList[genomeIdx]); } Debug.Assert(SpeciationUtils.PerformIntegrityCheck(specieList)); }
public void QuickSortOnlyArray_ArrayNull_ValidateIsNull() { AssertValidation.NotNull( () => SortUtils.QuickSort(true, elementComparer, null), "List"); }
public void IsSortedAscending_Comparer_Int_NotSorted(int[] arr) { Assert.False(SortUtils.IsSortedAscending(arr, Comparer <int> .Default)); }
public void IsSortedAscending_Comparer_String_NotSorted(params string[] arr) { Assert.False(SortUtils.IsSortedAscending(arr, Comparer <string> .Default)); }
public void IsSortedAscending_Int_NotSorted(int[] arr) { Assert.False(SortUtils.IsSortedAscending <int>(arr)); }
public void IsSortedAscending_String_NotSorted(params string[] arr) { Assert.False(SortUtils.IsSortedAscending <string>(arr)); }
private bool ConvertToDomTable(JObject json, out string html) { html = ""; try { int cnt = 0; SortUtils.Sort(json); foreach (var table in json) { if (table.Key != "extrainfo" && table.Key != "moneysum") { continue; } //html += "Table " + table.Key + "</br>"; string header = ""; string body = ""; Dictionary <string, int> mapping = new Dictionary <string, int>(); if (table.Key == "extrainfo") { header = "<table style=\"width:100%\">"; foreach (var bot in (JObject)table.Value) { foreach (JProperty innerkey in ((JObject)bot.Value).Properties()) { if (!mapping.ContainsKey(innerkey.Name)) { mapping[innerkey.Name] = ++cnt; } } } string[] thing = new string[cnt + 1]; foreach (string x in mapping.Keys) { thing[mapping[x]] = x; } body += Row(thing, "th"); foreach (var bot in (JObject)table.Value) { thing = new string[cnt + 1]; thing[0] = bot.Key; foreach (JProperty innerkey in ((JObject)bot.Value).Properties()) { thing[mapping[innerkey.Name]] = (string)innerkey.Value; } body += Row(thing, "td"); } } else if (table.Key == "moneysum") { header = "<table style=\"width:50%\">"; string[] thing = new string[2]; foreach (var field in (JObject)table.Value) { thing[0] = field.Key; thing[1] = (string)field.Value; body += Row(thing, "th"); } } string footer = "</table>"; html += header + body + footer; } return(true); } catch { return(false); } }
public void IsSortedAscending_Int_Sorted(int[] arr) { Assert.True(SortUtils.IsSortedAscending <int>(arr)); }
public static IEnumerable <string> Get(string root, string beginFile, string[] extensions) { if (beginFile == null) { beginFile = string.Empty; } IEnumerable <string> dirFiles; string directory = Helper.GetParent(beginFile); if (directory.Length > 0) { try { dirFiles = Directory.GetFiles(directory).Where(p => Helper.CompareFilePath(beginFile, p) > 0); Helper.FilterFiles(ref dirFiles, extensions); } catch { yield break; } foreach (string path in SortUtils.HeapSortDesc(dirFiles, Helper.CompareFilePath)) { yield return(path); } Helper.NormalizeDirectoryPath(ref root); while (true) { IEnumerable <string> parentDirs; string parent = Helper.GetParent(directory); if (parent.Length < root.Length) { break; } try { parentDirs = Directory.GetDirectories(parent).Select(Helper.NormalizeDirectoryPath) .Where(p => Helper.CompareDirectoryPath(directory, p) > 0); } catch { directory = parent; continue; } foreach (string brother in SortUtils.HeapSortDesc(parentDirs, Helper.CompareDirectoryPath)) { foreach (string file in EnumerateFilesReverseRecursive(brother, extensions)) { yield return(file); } } try { dirFiles = Directory.GetFiles(parent); Helper.FilterFiles(ref dirFiles, extensions); } catch { yield break; } foreach (string file in SortUtils.HeapSortDesc(dirFiles, Helper.CompareFilePath)) { yield return(file); } directory = parent; } } foreach (string file in EnumerateFilesReverseRecursive(root, extensions)) { if (Helper.CompareFilePath(beginFile, file) >= 0) { yield break; } yield return(file); } }
/// <summary> /// Builds a target type that based on declared type that is associated with the current type. /// </summary> /// <param name="declaredType"> Declared type. </param> private void BuildType(Type declaredType) { Type createdType = null; if (!this.createdTypes.TryGetValue(declaredType, out createdType)) { // Creates a new type createdType = (this.isGenericTypeDefinition) ? this.hostType.MakeGenericType(declaredType.GetGenericArguments()) : this.hostType; // Creates an activator for the created type IFunctionCallback <object> factoryMethod = ReflectionUtils.CreateInstance(createdType); // Creates the callbacks for the get/set accessors of the fields KeyAttribute keyAttribute; FormattableValue value; LinkedList <FormattableValue> loadedValues = new LinkedList <FormattableValue>(); IFunctionCallback <object, object> getAccessor = null; IActionCallback <object, object> setAccessor = null; FieldInfo[] fields = createdType.GetFields(BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance | BindingFlags.FlattenHierarchy); int fieldsLength = fields.Length; FieldInfo field = null; for (int i = 0; i < fieldsLength; i++) { field = fields[i]; keyAttribute = field.GetAttribute <KeyAttribute>(true); if (keyAttribute != null) { getAccessor = ReflectionUtils.CreateGetAccessor(createdType, field); setAccessor = ReflectionUtils.CreateSetAccessor(createdType, field); value = new FormattableValue( keyAttribute.Name, keyAttribute.Optional, keyAttribute.Order, field.FieldType, getAccessor, setAccessor ); loadedValues.AddLast(value); } } // Action that used for getting the key attribute and property info of the overridden properties PropertyInfo property = null; MethodInfo propertyGetMethod = null; Type baseType = null; Action <Type, string> getPropertyKeyAttributeFromBase = null; getPropertyKeyAttributeFromBase = (t, n) => { baseType = t.BaseType; if (baseType == null) { return; } property = baseType.GetProperty(n, BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance | BindingFlags.DeclaredOnly); if (property != null) { keyAttribute = property.GetAttribute <KeyAttribute>(true); if (keyAttribute == null) { propertyGetMethod = property.GetGetMethod(true); if (propertyGetMethod == null || propertyGetMethod.GetBaseDefinition() != propertyGetMethod) { // Property is overridden - tests the properties of the base type getPropertyKeyAttributeFromBase(baseType, n); } } } else { // Required property isn't exists in the type - tests the properties of the base type getPropertyKeyAttributeFromBase(baseType, n); } }; // Creates the callbacks for the get/set accessors of the properties PropertyInfo[] properties = createdType.GetProperties(BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance | BindingFlags.FlattenHierarchy); int propertiesLength = properties.Length; for (int i = 0; i < propertiesLength; i++) { property = properties[i]; keyAttribute = property.GetAttribute <KeyAttribute>(true); if (keyAttribute == null) { propertyGetMethod = property.GetGetMethod(true); if (propertyGetMethod == null || propertyGetMethod.GetBaseDefinition() != propertyGetMethod) { // Property is overridden - tests the properties of the base type getPropertyKeyAttributeFromBase(createdType, property.Name); } } if (keyAttribute == null || property.GetIndexParameters().Length != 0) { continue; } getAccessor = ReflectionUtils.CreateGetAccessor(createdType, property); setAccessor = ReflectionUtils.CreateSetAccessor(createdType, property); value = new FormattableValue( keyAttribute.Name, keyAttribute.Optional, keyAttribute.Order, property.PropertyType, getAccessor, setAccessor ); loadedValues.AddLast(value); } // Sorts the values by order FormattableValue[] sortedValues = loadedValues.ToArray(); SortUtils.QuickSort(true, sortedValues); // Saves the factory method, sorted values and created type this.factoryMethods.Add(declaredType, factoryMethod); this.values.Add(declaredType, sortedValues); this.createdTypes.Add(declaredType, createdType); } }
/// <summary> /// Creates a single randomly initialised genome. /// A random set of connections are made form the input to the output neurons, the number of /// connections made is based on the NeatGenomeParameters.InitialInterconnectionsProportion /// which specifies the proportion of all possible input-output connections to be made in /// initial genomes. /// /// The connections that are made are allocated innovation IDs in a consistent manner across /// the initial population of genomes. To do this we allocate IDs sequentially to all possible /// interconnections and then randomly select some proportion of connections for inclusion in the /// genome. In addition, for this scheme to work the innovation ID generator must be reset to zero /// prior to each call to CreateGenome(), and a test is made to ensure this is the case. /// /// The consistent allocation of innovation IDs ensure that equivalent connections in different /// genomes have the same innovation ID, and although this isn't strictly necessary it is /// required for sexual reproduction to work effectively - like structures are detected by comparing /// innovation IDs only. /// </summary> /// <param name="birthGeneration">The current evolution algorithm generation. /// Assigned to the new genome as its birth generation.</param> public NeatGenome CreateGenome(uint birthGeneration) { NeuronGeneList neuronGeneList = new NeuronGeneList(_inputNeuronCount + _outputNeuronCount); NeuronGeneList inputNeuronGeneList = new NeuronGeneList(_inputNeuronCount); // includes single bias neuron. NeuronGeneList outputNeuronGeneList = new NeuronGeneList(_outputNeuronCount); // Create a single bias neuron. uint biasNeuronId = _innovationIdGenerator.NextId; if (0 != biasNeuronId) { // The ID generator must be reset before calling this method so that all generated genomes use the // same innovation ID for matching neurons and structures. throw new SharpNeatException("IdGenerator must be reset before calling CreateGenome(uint)"); } // Note. Genes within nGeneList must always be arranged according to the following layout plan. // Bias - single neuron. Innovation ID = 0 // Input neurons. // Output neurons. // Hidden neurons. NeuronGene neuronGene = CreateNeuronGene(biasNeuronId, NodeType.Bias); inputNeuronGeneList.Add(neuronGene); neuronGeneList.Add(neuronGene); // Create input neuron genes. for (int i = 0; i < _inputNeuronCount; i++) { neuronGene = CreateNeuronGene(_innovationIdGenerator.NextId, NodeType.Input); inputNeuronGeneList.Add(neuronGene); neuronGeneList.Add(neuronGene); } // Create output neuron genes. for (int i = 0; i < _outputNeuronCount; i++) { neuronGene = CreateNeuronGene(_innovationIdGenerator.NextId, NodeType.Output); outputNeuronGeneList.Add(neuronGene); neuronGeneList.Add(neuronGene); } // Define all possible connections between the input and output neurons (fully interconnected). int srcCount = inputNeuronGeneList.Count; int tgtCount = outputNeuronGeneList.Count; ConnectionDefinition[] connectionDefArr = new ConnectionDefinition[srcCount * tgtCount]; for (int srcIdx = 0, i = 0; srcIdx < srcCount; srcIdx++) { for (int tgtIdx = 0; tgtIdx < tgtCount; tgtIdx++) { connectionDefArr[i++] = new ConnectionDefinition(_innovationIdGenerator.NextId, srcIdx, tgtIdx); } } // Shuffle the array of possible connections. SortUtils.Shuffle(connectionDefArr, _rng); // Select connection definitions from the head of the list and convert them to real connections. // We want some proportion of all possible connections but at least one (Connectionless genomes are not allowed). int connectionCount = (int)NumericsUtils.ProbabilisticRound( (double)connectionDefArr.Length * _neatGenomeParamsComplexifying.InitialInterconnectionsProportion, _rng); connectionCount = Math.Max(1, connectionCount); // Create the connection gene list and populate it. ConnectionGeneList connectionGeneList = new ConnectionGeneList(connectionCount); for (int i = 0; i < connectionCount; i++) { ConnectionDefinition def = connectionDefArr[i]; NeuronGene srcNeuronGene = inputNeuronGeneList[def._sourceNeuronIdx]; NeuronGene tgtNeuronGene = outputNeuronGeneList[def._targetNeuronIdx]; ConnectionGene cGene = new ConnectionGene(def._innovationId, srcNeuronGene.InnovationId, tgtNeuronGene.InnovationId, GenerateRandomConnectionWeight()); connectionGeneList.Add(cGene); // Register connection with endpoint neurons. srcNeuronGene.TargetNeurons.Add(cGene.TargetNodeId); tgtNeuronGene.SourceNeurons.Add(cGene.SourceNodeId); } // Ensure connections are sorted. connectionGeneList.SortByInnovationId(); // Create and return the completed genome object. return(CreateGenome(_genomeIdGenerator.NextId, birthGeneration, neuronGeneList, connectionGeneList, _inputNeuronCount, _outputNeuronCount, false)); }
public void QuickSortArrayAndAssociatedArray_AssociatedArrayNull_ValidateIsNull() { AssertValidation.NotNull( () => SortUtils.QuickSort <int, int>(true, new int[6], null as IList <int>[]), "AssociatedLists"); }
public void QuickSortOnlyArray_ArrayNull_ValidateIsNull() { AssertValidation.NotNull( () => SortUtils.QuickSort <int>(true, null), "List"); }